Clone of chromium aad1ce808763f59c7a3753e08f1500a104ecc6fd refs/remotes/origin/HEAD
diff --git a/tools/android/OWNERS b/tools/android/OWNERS
new file mode 100644
index 0000000..f8370f6
--- /dev/null
+++ b/tools/android/OWNERS
@@ -0,0 +1,4 @@
+digit@chromium.org
+michaelbai@chromium.org
+wangxianzhu@chromium.org
+yfriedman@chromium.org
diff --git a/tools/android/adb_reboot/adb_reboot.c b/tools/android/adb_reboot/adb_reboot.c
new file mode 100644
index 0000000..d414dd5
--- /dev/null
+++ b/tools/android/adb_reboot/adb_reboot.c
@@ -0,0 +1,43 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <time.h>
+#include <unistd.h>
+
+int main(int argc, char ** argv) {
+ int i = fork();
+ struct stat ft;
+ time_t ct;
+
+ if (i < 0) {
+ printf("fork error");
+ return 1;
+ }
+ if (i > 0)
+ return 0;
+
+ /* child (daemon) continues */
+ int j;
+ for (j = 0; j < sysconf(_SC_OPEN_MAX); j++)
+ close(j);
+
+ setsid(); /* obtain a new process group */
+
+ while (1) {
+ sleep(120);
+
+ stat("/sdcard/host_heartbeat", &ft);
+ time(&ct);
+ if (ct - ft.st_mtime > 120) {
+ /* File was not touched for some time. */
+ system("su -c reboot");
+ }
+ }
+
+ return 0;
+}
diff --git a/tools/android/adb_reboot/adb_reboot.gyp b/tools/android/adb_reboot/adb_reboot.gyp
new file mode 100644
index 0000000..85134b9
--- /dev/null
+++ b/tools/android/adb_reboot/adb_reboot.gyp
@@ -0,0 +1,14 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'adb_reboot',
+ 'type': 'executable',
+ 'sources': [
+ 'adb_reboot.c',
+ ],
+ },
+ ],
+}
diff --git a/tools/android/adb_remote_setup.sh b/tools/android/adb_remote_setup.sh
new file mode 100755
index 0000000..87c6601
--- /dev/null
+++ b/tools/android/adb_remote_setup.sh
@@ -0,0 +1,95 @@
+#!/bin/bash
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# URL from which the latest version of this script can be downloaded.
+script_url="http://src.chromium.org/svn/trunk/src/tools/android/adb_remote_setup.sh"
+
+# Replaces this file with the latest version of the script and runs it.
+update-self() {
+ local script="${BASH_SOURCE[0]}"
+ local new_script="${script}.new"
+ local updater_script="${script}.updater"
+ curl -sSf -o "$new_script" "$script_url" || return
+ chmod +x "$new_script" || return
+
+ # Replace this file with the newly downloaded script.
+ cat > "$updater_script" << EOF
+#!/bin/bash
+if mv "$new_script" "$script"; then
+ rm -- "$updater_script"
+else
+ echo "Note: script update failed."
+fi
+ADB_REMOTE_SETUP_NO_UPDATE=1 exec /bin/bash "$script" $@
+EOF
+ exec /bin/bash "$updater_script" "$@"
+}
+
+if [[ "$ADB_REMOTE_SETUP_NO_UPDATE" -ne 1 ]]; then
+ update-self "$@" || echo 'Note: script update failed'
+fi
+
+if [[ $# -ne 1 && $# -ne 2 ]]; then
+ cat <<'EOF'
+Usage: adb_remote_setup.sh REMOTE_HOST [REMOTE_ADB]
+
+Configures adb on a remote machine to communicate with a device attached to the
+local machine. This is useful for installing APKs, running tests, etc while
+working remotely.
+
+Arguments:
+ REMOTE_HOST hostname of remote machine
+ REMOTE_ADB path to adb on the remote machine (you can omit this if adb is in
+ the remote host's path)
+EOF
+ exit 1
+fi
+
+remote_host="$1"
+remote_adb="${2:-adb}"
+
+# Ensure adb is in the local machine's path.
+if ! which adb >/dev/null; then
+ echo "error: adb must be in your local machine's path."
+ exit 1
+fi
+
+if which kinit >/dev/null; then
+ # Allow ssh to succeed without typing your password multiple times.
+ kinit -R || kinit
+fi
+
+# Ensure local and remote versions of adb are the same.
+remote_adb_version=$(ssh "$remote_host" "$remote_adb version")
+local_adb_version=$(adb version)
+if [[ "$local_adb_version" != "$remote_adb_version" ]]; then
+ echo >&2
+ echo "WARNING: local adb is not the same version as remote adb." >&2
+ echo "This should be fixed since it may result in protocol errors." >&2
+ echo " local adb: $local_adb_version" >&2
+ echo " remote adb: $remote_adb_version" >&2
+ echo >&2
+ sleep 5
+fi
+
+# Kill the adb server on the remote host.
+ssh "$remote_host" "$remote_adb kill-server"
+
+# Start the adb server locally.
+adb start-server
+
+# Forward various ports from the remote host to the local host:
+# 5037: adb
+# 8001: http server
+# 9031: sync server
+# 10000: net unittests
+# 10201: net unittests
+ssh -C \
+ -R 5037:localhost:5037 \
+ -L 8001:localhost:8001 \
+ -L 9031:localhost:9031 \
+ -R 10000:localhost:10000 \
+ -R 10201:localhost:10201 \
+ "$remote_host"
diff --git a/tools/android/android_tools.gyp b/tools/android/android_tools.gyp
new file mode 100644
index 0000000..84de85c
--- /dev/null
+++ b/tools/android/android_tools.gyp
@@ -0,0 +1,52 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ # Intermediate target grouping the android tools needed to run native
+ # unittests and instrumentation test apks.
+ {
+ 'target_name': 'android_tools',
+ 'type': 'none',
+ 'dependencies': [
+ 'adb_reboot/adb_reboot.gyp:adb_reboot',
+ 'file_poller/file_poller.gyp:file_poller',
+ 'forwarder2/forwarder.gyp:forwarder2',
+ 'md5sum/md5sum.gyp:md5sum',
+ 'purge_ashmem/purge_ashmem.gyp:purge_ashmem',
+ 'run_pie/run_pie.gyp:run_pie',
+ '../../tools/telemetry/telemetry.gyp:*#host',
+ ],
+ },
+ {
+ 'target_name': 'heap_profiler',
+ 'type': 'none',
+ 'dependencies': [
+ 'heap_profiler/heap_profiler.gyp:heap_dump',
+ 'heap_profiler/heap_profiler.gyp:heap_profiler',
+ ],
+ },
+ {
+ 'target_name': 'memdump',
+ 'type': 'none',
+ 'dependencies': [
+ 'memdump/memdump.gyp:memdump',
+ ],
+ },
+ {
+ 'target_name': 'memconsumer',
+ 'type': 'none',
+ 'dependencies': [
+ 'memconsumer/memconsumer.gyp:memconsumer',
+ ],
+ },
+ {
+ 'target_name': 'ps_ext',
+ 'type': 'none',
+ 'dependencies': [
+ 'ps_ext/ps_ext.gyp:ps_ext',
+ ],
+ },
+ ],
+}
diff --git a/tools/android/asan/asan_device_setup.sh b/tools/android/asan/asan_device_setup.sh
new file mode 100755
index 0000000..5948f2b
--- /dev/null
+++ b/tools/android/asan/asan_device_setup.sh
@@ -0,0 +1,194 @@
+#!/bin/bash -e
+#===- lib/asan/scripts/asan_device_setup.py -----------------------------------===#
+#
+# The LLVM Compiler Infrastructure
+#
+# This file is distributed under the University of Illinois Open Source
+# License. See LICENSE.TXT for details.
+#
+# Prepare Android device to run ASan applications.
+#
+#===------------------------------------------------------------------------===#
+
+
+HERE="$(cd "$(dirname "$0")" && pwd)"
+
+revert=no
+extra_options=
+device=
+lib=
+
+function usage {
+ echo "usage: $0 [--revert] [--device device-id] [--lib path] [--extra_options options]"
+ echo " --revert: Uninstall ASan from the device."
+ echo " --lib: Path to ASan runtime library."
+ echo " --extra_options: Extra ASAN_OPTIONS."
+ echo " --device: Install to the given device. Use 'adb devices' to find"
+ echo " device-id."
+ echo
+ exit 1
+}
+
+while [[ $# > 0 ]]; do
+ case $1 in
+ --revert)
+ revert=yes
+ ;;
+ --extra-options)
+ shift
+ if [[ $# == 0 ]]; then
+ echo "--extra-options requires an argument."
+ exit 1
+ fi
+ extra_options="$1"
+ ;;
+ --lib)
+ shift
+ if [[ $# == 0 ]]; then
+ echo "--lib requires an argument."
+ exit 1
+ fi
+ lib="$1"
+ ;;
+ --device)
+ shift
+ if [[ $# == 0 ]]; then
+ echo "--device requires an argument."
+ exit 1
+ fi
+ device="$1"
+ ;;
+ *)
+ usage
+ ;;
+ esac
+ shift
+done
+
+ADB=${ADB:-adb}
+if [[ x$device != x ]]; then
+ ADB="$ADB -s $device"
+fi
+
+ASAN_RT="libclang_rt.asan-arm-android.so"
+
+if [[ x$revert == xyes ]]; then
+ echo '>> Uninstalling ASan'
+ $ADB root
+ $ADB wait-for-device
+ $ADB remount
+ $ADB shell mv /system/bin/app_process.real /system/bin/app_process
+ $ADB shell rm /system/bin/asanwrapper
+ $ADB shell rm /system/lib/$ASAN_RT
+
+ echo '>> Restarting shell'
+ $ADB shell stop
+ $ADB shell start
+
+ echo '>> Done'
+ exit 0
+fi
+
+if [[ -d "$lib" ]]; then
+ ASAN_RT_PATH="$lib"
+elif [[ -f "$lib" && "$lib" == *"$ASAN_RT" ]]; then
+ ASAN_RT_PATH=$(dirname "$lib")
+elif [[ -f "$HERE/$ASAN_RT" ]]; then
+ ASAN_RT_PATH="$HERE"
+elif [[ $(basename "$HERE") == "bin" ]]; then
+ # We could be in the toolchain's base directory.
+ # Consider ../lib and ../lib/clang/$VERSION/lib/linux.
+ P=$(ls "$HERE"/../lib/"$ASAN_RT" "$HERE"/../lib/clang/*/lib/linux/"$ASAN_RT" 2>/dev/null | sort | tail -1)
+ if [[ -n "$P" ]]; then
+ ASAN_RT_PATH="$(dirname "$P")"
+ fi
+fi
+
+if [[ -z "$ASAN_RT_PATH" || ! -f "$ASAN_RT_PATH/$ASAN_RT" ]]; then
+ echo "ASan runtime library not found"
+ exit 1
+fi
+
+TMPDIRBASE=$(mktemp -d)
+TMPDIROLD="$TMPDIRBASE/old"
+TMPDIR="$TMPDIRBASE/new"
+mkdir "$TMPDIROLD"
+
+echo '>> Remounting /system rw'
+$ADB root
+$ADB wait-for-device
+$ADB remount
+
+echo '>> Copying files from the device'
+$ADB pull /system/bin/app_process "$TMPDIROLD"
+$ADB pull /system/bin/app_process.real "$TMPDIROLD" || true
+$ADB pull /system/bin/asanwrapper "$TMPDIROLD" || true
+$ADB pull /system/lib/libclang_rt.asan-arm-android.so "$TMPDIROLD" || true
+cp -r "$TMPDIROLD" "$TMPDIR"
+
+if ! [[ -f "$TMPDIR/app_process" ]]; then
+ echo "app_process missing???"
+ exit 1
+fi
+
+if [[ -f "$TMPDIR/app_process.real" ]]; then
+ echo "app_process.real exists, updating the wrapper"
+else
+ echo "app_process.real missing, new installation"
+ mv "$TMPDIR/app_process" "$TMPDIR/app_process.real"
+fi
+
+echo '>> Generating wrappers'
+
+cp "$ASAN_RT_PATH/$ASAN_RT" "$TMPDIR/"
+
+# FIXME: alloc_dealloc_mismatch=0 prevents a failure in libdvm startup,
+# which may or may not be a real bug (probably not).
+ASAN_OPTIONS=start_deactivated=1,alloc_dealloc_mismatch=0
+if [[ x$extra_options != x ]] ; then
+ ASAN_OPTIONS="$ASAN_OPTIONS,$extra_options"
+fi
+
+# Zygote wrapper.
+cat <<EOF >"$TMPDIR/app_process"
+#!/system/bin/sh
+ASAN_OPTIONS=$ASAN_OPTIONS \\
+LD_PRELOAD=libclang_rt.asan-arm-android.so \\
+exec /system/bin/app_process.real \$@
+
+EOF
+
+# General command-line tool wrapper (use for anything that's not started as
+# zygote).
+cat <<EOF >"$TMPDIR/asanwrapper"
+#!/system/bin/sh
+LD_PRELOAD=libclang_rt.asan-arm-android.so \\
+exec \$@
+
+EOF
+
+if ! ( cd "$TMPDIRBASE" && diff -qr old/ new/ ) ; then
+ echo '>> Pushing files to the device'
+ $ADB push "$TMPDIR/$ASAN_RT" /system/lib/
+ $ADB push "$TMPDIR/app_process" /system/bin/app_process
+ $ADB push "$TMPDIR/app_process.real" /system/bin/app_process.real
+ $ADB push "$TMPDIR/asanwrapper" /system/bin/asanwrapper
+ $ADB shell chown root.shell \
+ /system/bin/app_process \
+ /system/bin/app_process.real \
+ /system/bin/asanwrapper
+ $ADB shell chmod 755 \
+ /system/bin/app_process \
+ /system/bin/app_process.real \
+ /system/bin/asanwrapper
+
+ echo '>> Restarting shell (asynchronous)'
+ $ADB shell stop
+ $ADB shell start
+
+ echo '>> Please wait until the device restarts'
+else
+ echo '>> Device is up to date'
+fi
+
+rm -r "$TMPDIRBASE"
diff --git a/tools/android/checkstyle/checkstyle.py b/tools/android/checkstyle/checkstyle.py
new file mode 100644
index 0000000..25f202f
--- /dev/null
+++ b/tools/android/checkstyle/checkstyle.py
@@ -0,0 +1,69 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script that is used by PRESUBMIT.py to run style checks on Java files."""
+
+import os
+import subprocess
+
+
+CHROMIUM_SRC = os.path.normpath(
+ os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir))
+CHECKSTYLE_ROOT = os.path.join(CHROMIUM_SRC, 'third_party', 'checkstyle',
+ 'checkstyle-5.7-all.jar')
+
+
+def RunCheckstyle(input_api, output_api, style_file):
+ if not os.path.exists(style_file):
+ file_error = (' Java checkstyle configuration file is missing: '
+ + style_file)
+ return [output_api.PresubmitError(file_error)]
+
+ # Filter out non-Java files and files that were deleted.
+ java_files = [x.LocalPath() for x in input_api.AffectedFiles(False, False)
+ if os.path.splitext(x.LocalPath())[1] == '.java']
+ if not java_files:
+ return []
+
+ # Run checkstyle
+ checkstyle_env = os.environ.copy()
+ checkstyle_env['JAVA_CMD'] = 'java'
+ try:
+ check = subprocess.Popen(['java', '-cp',
+ CHECKSTYLE_ROOT,
+ 'com.puppycrawl.tools.checkstyle.Main', '-c',
+ style_file] + java_files,
+ stdout=subprocess.PIPE, env=checkstyle_env)
+ stdout, _ = check.communicate()
+ if check.returncode == 0:
+ return []
+ except OSError as e:
+ import errno
+ if e.errno == errno.ENOENT:
+ install_error = (' checkstyle is not installed. Please run '
+ 'build/install-build-deps-android.sh')
+ return [output_api.PresubmitPromptWarning(install_error)]
+
+ # Remove non-error values from stdout
+ errors = stdout.splitlines()
+
+ if errors and errors[0] == 'Starting audit...':
+ del errors[0]
+ if errors and errors[-1] == 'Audit done.':
+ del errors[-1]
+
+ # Filter out warnings
+ errors = [x for x in errors if 'warning: ' not in x]
+ if not errors:
+ return []
+
+ local_path = input_api.PresubmitLocalPath()
+ output = []
+ for error in errors:
+ # Change the full file path to relative path in the output lines
+ full_path, end = error.split(':', 1)
+ rel_path = os.path.relpath(full_path, local_path)
+ output.append(' %s:%s' % (rel_path, end))
+ return [output_api.PresubmitPromptWarning('\n'.join(output))]
diff --git a/tools/android/checkstyle/chromium-style-5.0.xml b/tools/android/checkstyle/chromium-style-5.0.xml
new file mode 100644
index 0000000..bc40f87
--- /dev/null
+++ b/tools/android/checkstyle/chromium-style-5.0.xml
@@ -0,0 +1,183 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.3//EN" "http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
+
+<!--
+ See installation instructions: https://sites.google.com/a/chromium.org/dev/checkstyle
+-->
+<module name="Checker">
+ <property name="severity" value="warning"/>
+ <property name="charset" value="UTF-8"/>
+ <module name="TreeWalker">
+ <module name="AvoidStarImport">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="IllegalCatch">
+ <property name="severity" value="warning"/>
+ </module>
+ <module name="RedundantImport">
+ <message key="import.redundant" value="Redundant import: {0}. Use :JavaImportOrganize (ECLIM) or Ctrl+Shift+O (Eclipse) to sort imports"/>
+ <property name="severity" value="error"/>
+ </module>
+ <module name="UnusedImports">
+ <property name="severity" value="error"/>
+ <property name="processJavadoc" value="true"/>
+ <message key="import.unused" value="Unused import: {0}. Use :JavaImportOrganize (ECLIM) or Ctrl+Shift+O (Eclipse) to sort imports"/>
+ </module>
+ <module name="JavadocType">
+ <property name="severity" value="error"/>
+ <property name="tokens" value="INTERFACE_DEF, CLASS_DEF"/>
+ <property name="scope" value="public"/>
+ <message key="javadoc.missing" value="Public classes and interfaces require JavaDoc comments."/>
+ </module>
+ <module name="JavadocMethod">
+ <property name="severity" value="warning"/>
+ <property name="scope" value="public"/>
+ <property name="allowMissingParamTags" value="true"/>
+ <property name="allowMissingPropertyJavadoc" value="true"/>
+ <property name="allowMissingReturnTag" value="true"/>
+ <property name="allowMissingThrowsTags" value="true"/>
+ </module>
+ <module name="PackageName">
+ <property name="severity" value="error"/>
+ <property name="format" value="^[a-z]+(\.[a-z][a-z0-9_]{1,})*$"/>
+ </module>
+ <module name="SimplifyBooleanExpression">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="SimplifyBooleanReturn">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="TypeName">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="ConstantName">
+ <property name="severity" value="error"/>
+ <property name="format" value="^([A-Z][A-Z0-9]*(_[A-Z0-9]+)*)|(s[A-Z][a-zA-Z0-9]*)$"/>
+ <message key="name.invalidPattern" value="Static final field names must either be all caps (e.g. int HEIGHT_PX) for 'true' constants, or start with s (e.g. AtomicInteger sNextId or Runnable sSuspendTask) for fields with mutable state or that don't 'feel' like constants."/>
+ </module>
+ <!-- Non-public, non-static field names start with m. -->
+ <module name="MemberName">
+ <property name="severity" value="error"/>
+ <property name="format" value="^m[A-Z][a-zA-Z0-9]*$"/>
+ <property name="applyToPublic" value="false"/>
+ <message key="name.invalidPattern" value="Non-public, non-static field names start with m."/>
+ </module>
+ <!-- Static field names start with s. -->
+ <module name="StaticVariableName">
+ <property name="severity" value="error"/>
+ <property name="format" value="^s[A-Z][a-zA-Z0-9]*$"/>
+ <property name="applyToPublic" value="false"/>
+ <message key="name.invalidPattern" value="Static field names start with s."/>
+ </module>
+ <module name="MethodName">
+ <property name="severity" value="error"/>
+ <property name="format" value="^[a-z][a-zA-Z0-9_]*$"/>
+ <message key="name.invalidPattern" value="Method names should start with a lower case letter (e.g. getWidth())"/>
+ </module>
+ <module name="ParameterName">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="LocalFinalVariableName">
+ <property name="severity" value="error"/>
+ <property name="format" value="^m|s|((([ms][a-z0-9])|([a-ln-rt-z]))[a-zA-Z0-9]*)$"/>
+ <message key="name.invalidPattern" value="Local variables should be camel-cased (e.g. int minWidth = 4)."/>
+ </module>
+ <module name="LocalVariableName">
+ <property name="severity" value="error"/>
+ <property name="format" value="^m|s|((([ms][a-z0-9])|([a-ln-rt-z]))[a-zA-Z0-9]*)$"/>
+ <message key="name.invalidPattern" value="Local variables should be camel-cased (e.g. int minWidth = 4)."/>
+ </module>
+ <module name="LineLength">
+ <property name="severity" value="error"/>
+ <property name="ignorePattern" value="^import.*$" />
+ <property name="max" value="100"/>
+ </module>
+ <module name="LeftCurly">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="RightCurly">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="NeedBraces">
+ <property name="severity" value="warning"/>
+ <property name="tokens" value="LITERAL_FOR, LITERAL_WHILE, LITERAL_DO"/>
+ </module>
+ <module name="EmptyBlock">
+ <property name="severity" value="error"/>
+ <property name="option" value="text"/>
+ <metadata name="altname" value="EmptyCatchBlock"/>
+ </module>
+ <module name="UpperEll">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="FallThrough">
+ <property name="severity" value="error"/>
+ <property name="reliefPattern" value=".*"/>
+ </module>
+ <module name="ModifierOrder">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="WhitespaceAround">
+ <property name="severity" value="error"/>
+ <property name="tokens" value="ASSIGN, BAND, BAND_ASSIGN, BOR, BOR_ASSIGN, BSR, BSR_ASSIGN, BXOR, BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN, EQUAL, GE, GT, LAND, LE, LITERAL_ASSERT, LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF, LITERAL_RETURN, LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS, MINUS_ASSIGN, MOD, MOD_ASSIGN, NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION, SL, SLIST, SL_ASSIGN, SR, SR_ASSIGN, STAR, STAR_ASSIGN, TYPE_EXTENSION_AND" />
+ <property name="allowEmptyConstructors" value="true"/>
+ <property name="allowEmptyMethods" value="true"/>
+ </module>
+ <module name="WhitespaceAfter">
+ <property name="severity" value="error"/>
+ <property name="tokens" value="COMMA, SEMI, TYPECAST"/>
+ </module>
+ <module name="NoWhitespaceAfter">
+ <property name="severity" value="error"/>
+ <property name="tokens" value="BNOT, DEC, DOT, INC, LNOT, UNARY_MINUS, UNARY_PLUS"/>
+ </module>
+ <module name="NoWhitespaceBefore">
+ <property name="severity" value="error"/>
+ <property name="allowLineBreaks" value="true"/>
+ <property name="tokens" value="SEMI, DOT, POST_DEC, POST_INC"/>
+ </module>
+ <module name="EmptyStatement">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="NoFinalizer">
+ <property name="severity" value="warning"/>
+ </module>
+ <module name="ParenPad">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="ImportOrder">
+ <property name="severity" value="error"/>
+ <message key="import.ordering" value="Wrong order for {0} import. Use :JavaImportOrganize (ECLIM) or Ctrl+Shift+O (Eclipse) to sort imports"/>
+ <property name="groups" value="android, com, dalvik, gov, junit, libcore, net, org, java, javax"/>
+ <property name="ordered" value="true"/>
+ <property name="option" value="top"/>
+ <property name="separated" value="true"/>
+ </module>
+ <!-- TODO(aurimas): make indentation an error once https://github.com/checkstyle/checkstyle/issues/255 is fixed. -->
+ <module name="Indentation">
+ <property name="severity" value="warning"/>
+ <property name="basicOffset" value="4"/>
+ <property name="throwsIndent" value="8"/>
+ </module>
+ </module>
+ <module name="FileTabCharacter">
+ <property name="severity" value="error"/>
+ </module>
+ <module name="RegexpSingleline">
+ <property name="format" value="((//.*)|(\*.*))FIXME"/>
+ <property name="message" value="TODO is preferred to FIXME. e.g. "TODO(johndoe):"/>
+ </module>
+ <module name="RegexpSingleline">
+ <property name="format" value="((//.*)|(\*.*))(?<!TODO\(.{0,100})(TODO[^(])|(TODO\([^)]*$)"/>
+ <property name="message" value="All TODOs should be named. e.g. "TODO(johndoe):"/>
+ </module>
+ <module name="RegexpSingleline">
+ <property name="severity" value="error"/>
+ <property name="format" value="[ \t]+$"/>
+ <property name="message" value="Trailing whitespace"/>
+ </module>
+ <module name="RegexpHeader">
+ <property name="severity" value="error"/>
+ <property name="header" value="^// Copyright 20\d\d The Chromium Authors. All rights reserved.$\n^// Use of this source code is governed by a BSD-style license that can be$\n^// found in the LICENSE file.$"/>
+ </module>
+</module>
diff --git a/tools/android/common/adb_connection.cc b/tools/android/common/adb_connection.cc
new file mode 100644
index 0000000..9985a3a
--- /dev/null
+++ b/tools/android/common/adb_connection.cc
@@ -0,0 +1,107 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/common/adb_connection.h"
+
+#include <arpa/inet.h>
+#include <errno.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/socket.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "base/logging.h"
+#include "base/posix/eintr_wrapper.h"
+#include "tools/android/common/net.h"
+
+namespace tools {
+namespace {
+
+void CloseSocket(int fd) {
+ if (fd >= 0) {
+ int old_errno = errno;
+ close(fd);
+ errno = old_errno;
+ }
+}
+
+} // namespace
+
+int ConnectAdbHostSocket(const char* forward_to) {
+ // ADB port forward request format: HHHHtcp:port:address.
+ // HHHH is the hexidecimal length of the "tcp:port:address" part.
+ const size_t kBufferMaxLength = 30;
+ const size_t kLengthOfLength = 4;
+ const size_t kAddressMaxLength = kBufferMaxLength - kLengthOfLength;
+
+ const char kAddressPrefix[] = { 't', 'c', 'p', ':' };
+ size_t address_length = arraysize(kAddressPrefix) + strlen(forward_to);
+ if (address_length > kBufferMaxLength - kLengthOfLength) {
+ LOG(ERROR) << "Forward to address is too long: " << forward_to;
+ return -1;
+ }
+
+ char request[kBufferMaxLength];
+ memcpy(request + kLengthOfLength, kAddressPrefix, arraysize(kAddressPrefix));
+ memcpy(request + kLengthOfLength + arraysize(kAddressPrefix),
+ forward_to, strlen(forward_to));
+
+ char length_buffer[kLengthOfLength + 1];
+ snprintf(length_buffer, arraysize(length_buffer), "%04X",
+ static_cast<int>(address_length));
+ memcpy(request, length_buffer, kLengthOfLength);
+
+ int host_socket = socket(AF_INET, SOCK_STREAM, 0);
+ if (host_socket < 0) {
+ LOG(ERROR) << "Failed to create adb socket: " << strerror(errno);
+ return -1;
+ }
+
+ DisableNagle(host_socket);
+
+ const int kAdbPort = 5037;
+ sockaddr_in addr;
+ memset(&addr, 0, sizeof(addr));
+ addr.sin_family = AF_INET;
+ addr.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
+ addr.sin_port = htons(kAdbPort);
+ if (HANDLE_EINTR(connect(host_socket, reinterpret_cast<sockaddr*>(&addr),
+ sizeof(addr))) < 0) {
+ LOG(ERROR) << "Failed to connect adb socket: " << strerror(errno);
+ CloseSocket(host_socket);
+ return -1;
+ }
+
+ size_t bytes_remaining = address_length + kLengthOfLength;
+ size_t bytes_sent = 0;
+ while (bytes_remaining > 0) {
+ int ret = HANDLE_EINTR(send(host_socket, request + bytes_sent,
+ bytes_remaining, 0));
+ if (ret < 0) {
+ LOG(ERROR) << "Failed to send request: " << strerror(errno);
+ CloseSocket(host_socket);
+ return -1;
+ }
+
+ bytes_sent += ret;
+ bytes_remaining -= ret;
+ }
+
+ const size_t kAdbStatusLength = 4;
+ char response[kBufferMaxLength];
+ int response_length = HANDLE_EINTR(recv(host_socket, response,
+ kBufferMaxLength, 0));
+ if (response_length < kAdbStatusLength ||
+ strncmp("OKAY", response, kAdbStatusLength) != 0) {
+ LOG(ERROR) << "Bad response from ADB: length: " << response_length
+ << " data: " << DumpBinary(response, response_length);
+ CloseSocket(host_socket);
+ return -1;
+ }
+
+ return host_socket;
+}
+
+} // namespace tools
diff --git a/tools/android/common/adb_connection.h b/tools/android/common/adb_connection.h
new file mode 100644
index 0000000..3fa0fb3
--- /dev/null
+++ b/tools/android/common/adb_connection.h
@@ -0,0 +1,18 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_COMMON_ADB_CONNECTION_H_
+#define TOOLS_ANDROID_COMMON_ADB_CONNECTION_H_
+
+namespace tools {
+
+// Creates a socket that can forward to a host socket through ADB.
+// The format of forward_to is <port>:<ip_address>.
+// Returns the socket handle, or -1 on any error.
+int ConnectAdbHostSocket(const char* forward_to);
+
+} // namespace tools
+
+#endif // TOOLS_ANDROID_COMMON_ADB_CONNECTION_H_
+
diff --git a/tools/android/common/common.gyp b/tools/android/common/common.gyp
new file mode 100644
index 0000000..8622625
--- /dev/null
+++ b/tools/android/common/common.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'android_tools_common',
+ 'type': 'static_library',
+ 'toolsets': ['host', 'target'],
+ 'include_dirs': [
+ '..',
+ '../../..',
+ ],
+ 'sources': [
+ 'adb_connection.cc',
+ 'adb_connection.h',
+ 'daemon.cc',
+ 'daemon.h',
+ 'net.cc',
+ 'net.h',
+ ],
+ },
+ ],
+}
+
diff --git a/tools/android/common/daemon.cc b/tools/android/common/daemon.cc
new file mode 100644
index 0000000..699c615
--- /dev/null
+++ b/tools/android/common/daemon.cc
@@ -0,0 +1,75 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/common/daemon.h"
+
+#include <errno.h>
+#include <signal.h>
+#include <stdio.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "base/command_line.h"
+#include "base/logging.h"
+
+namespace {
+
+const char kNoSpawnDaemon[] = "D";
+
+int g_exit_status = 0;
+
+void Exit(int unused) {
+ _exit(g_exit_status);
+}
+
+void CloseFileDescriptor(int fd) {
+ int old_errno = errno;
+ close(fd);
+ errno = old_errno;
+}
+
+} // namespace
+
+namespace tools {
+
+bool HasHelpSwitch(const CommandLine& command_line) {
+ return command_line.HasSwitch("h") || command_line.HasSwitch("help");
+}
+
+bool HasNoSpawnDaemonSwitch(const CommandLine& command_line) {
+ return command_line.HasSwitch(kNoSpawnDaemon);
+}
+
+void ShowHelp(const char* program,
+ const char* extra_title,
+ const char* extra_descriptions) {
+ printf("Usage: %s [-%s] %s\n"
+ " -%s stops from spawning a daemon process\n%s",
+ program, kNoSpawnDaemon, extra_title, kNoSpawnDaemon,
+ extra_descriptions);
+}
+
+void SpawnDaemon(int exit_status) {
+ g_exit_status = exit_status;
+ signal(SIGUSR1, Exit);
+
+ if (fork()) {
+ // In parent process.
+ sleep(10); // Wait for the child process to finish setsid().
+ NOTREACHED();
+ }
+
+ // In child process.
+ setsid(); // Detach the child process from its parent.
+ kill(getppid(), SIGUSR1); // Inform the parent process to exit.
+
+ // Close the standard input and outputs, otherwise the process may block
+ // adbd when the shell exits.
+ // Comment out these lines if you want to see outputs for debugging.
+ CloseFileDescriptor(0);
+ CloseFileDescriptor(1);
+ CloseFileDescriptor(2);
+}
+
+} // namespace tools
diff --git a/tools/android/common/daemon.h b/tools/android/common/daemon.h
new file mode 100644
index 0000000..99faf72
--- /dev/null
+++ b/tools/android/common/daemon.h
@@ -0,0 +1,30 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_COMMON_DAEMON_H_
+#define TOOLS_ANDROID_COMMON_DAEMON_H_
+
+namespace base {
+class CommandLine;
+}
+
+namespace tools {
+
+bool HasHelpSwitch(const base::CommandLine& command_line);
+
+bool HasNoSpawnDaemonSwitch(const base::CommandLine& command_line);
+
+void ShowHelp(const char* program,
+ const char* extra_title,
+ const char* extra_descriptions);
+
+// Spawns a daemon process and exits the current process with exit_status.
+// Any code executed after this function returns will be executed in the
+// spawned daemon process.
+void SpawnDaemon(int exit_status);
+
+} // namespace tools
+
+#endif // TOOLS_ANDROID_COMMON_DAEMON_H_
+
diff --git a/tools/android/common/net.cc b/tools/android/common/net.cc
new file mode 100644
index 0000000..3b9ef15
--- /dev/null
+++ b/tools/android/common/net.cc
@@ -0,0 +1,40 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/common/net.h"
+
+#include <netinet/in.h>
+#include <netinet/tcp.h>
+#include <sys/socket.h>
+#include <sys/types.h>
+
+#include "base/strings/stringprintf.h"
+
+namespace tools {
+
+int DisableNagle(int socket) {
+ int on = 1;
+ return setsockopt(socket, IPPROTO_TCP, TCP_NODELAY, &on, sizeof(on));
+}
+
+int DeferAccept(int socket) {
+ int on = 1;
+ return setsockopt(socket, IPPROTO_TCP, TCP_DEFER_ACCEPT, &on, sizeof(on));
+}
+
+std::string DumpBinary(const char* buffer, size_t length) {
+ std::string result = "[";
+ for (int i = 0; i < length; ++i) {
+ base::StringAppendF(&result, "%02x,",
+ static_cast<unsigned char>(buffer[i]));
+ }
+
+ if (length)
+ result.erase(result.length() - 1);
+
+ return result + "]";
+}
+
+} // namespace tools
+
diff --git a/tools/android/common/net.h b/tools/android/common/net.h
new file mode 100644
index 0000000..e361954
--- /dev/null
+++ b/tools/android/common/net.h
@@ -0,0 +1,25 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_COMMON_NET_H_
+#define TOOLS_ANDROID_COMMON_NET_H_
+
+#include <string>
+
+namespace tools {
+
+// DisableNagle can improve TCP transmission performance. Both Chrome net stack
+// and adb tool use it.
+int DisableNagle(int socket);
+
+// Wake up listener only when data arrive.
+int DeferAccept(int socket);
+
+// Dumps a binary buffer into a string in a human-readable format.
+std::string DumpBinary(const char* buffer, size_t length);
+
+} // namespace tools
+
+#endif // TOOLS_ANDROID_COMMON_NET_H_
+
diff --git a/tools/android/file_poller/file_poller.cc b/tools/android/file_poller/file_poller.cc
new file mode 100644
index 0000000..c73db8b
--- /dev/null
+++ b/tools/android/file_poller/file_poller.cc
@@ -0,0 +1,207 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// When run with 2 or more arguments the file_poller tool will open a port on
+// the device, print it on its standard output and then start collect file
+// contents. The first argument is the polling rate in Hz, and the following
+// arguments are file to poll.
+// When run with the port of an already running file_poller, the tool will
+// contact the first instance, retrieve the sample and print those on its
+// standard output. This will also terminate the first instance.
+
+#include <errno.h>
+#include <fcntl.h>
+#include <netinet/in.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/socket.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "base/logging.h"
+
+// Context containing the files to poll and the polling rate.
+struct Context {
+ size_t nb_files;
+ int* file_fds;
+ int poll_rate;
+};
+
+// Write from the buffer to the given file descriptor.
+void safe_write(int fd, const char* buffer, int size) {
+ const char* index = buffer;
+ size_t to_write = size;
+ while (to_write > 0) {
+ int written = write(fd, index, to_write);
+ if (written < 0)
+ PLOG(FATAL);
+ index += written;
+ to_write -= written;
+ }
+}
+
+// Transfer the content of a file descriptor to another.
+void transfer_to_fd(int fd_in, int fd_out) {
+ char buffer[1024];
+ int n;
+ while ((n = read(fd_in, buffer, sizeof(buffer))) > 0)
+ safe_write(fd_out, buffer, n);
+}
+
+// Transfer the content of a file descriptor to a buffer.
+int transfer_to_buffer(int fd_in, char* bufffer, size_t size) {
+ char* index = bufffer;
+ size_t to_read = size;
+ int n;
+ while (to_read > 0 && ((n = read(fd_in, index, to_read)) > 0)) {
+ index += n;
+ to_read -= n;
+ }
+ if (n < 0)
+ PLOG(FATAL);
+ return size - to_read;
+}
+
+// Try to open the file at the given path for reading. Exit in case of failure.
+int checked_open(const char* path) {
+ int fd = open(path, O_RDONLY);
+ if (fd < 0)
+ PLOG(FATAL);
+ return fd;
+}
+
+void transfer_measurement(int fd_in, int fd_out, bool last) {
+ char buffer[1024];
+ if (lseek(fd_in, 0, SEEK_SET) < 0)
+ PLOG(FATAL);
+ int n = transfer_to_buffer(fd_in, buffer, sizeof(buffer));
+ safe_write(fd_out, buffer, n - 1);
+ safe_write(fd_out, last ? "\n" : " ", 1);
+}
+
+// Acquire a sample and save it to the given file descriptor.
+void acquire_sample(int fd, const Context& context) {
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+ char buffer[1024];
+ int n = snprintf(buffer, sizeof(buffer), "%d.%06d ", tv.tv_sec, tv.tv_usec);
+ safe_write(fd, buffer, n);
+
+ for (int i = 0; i < context.nb_files; ++i)
+ transfer_measurement(context.file_fds[i], fd, i == (context.nb_files - 1));
+}
+
+void poll_content(const Context& context) {
+ // Create and bind the socket so that the port can be written to stdout.
+ int sockfd = socket(AF_INET, SOCK_STREAM, 0);
+ struct sockaddr_in socket_info;
+ socket_info.sin_family = AF_INET;
+ socket_info.sin_addr.s_addr = htonl(INADDR_ANY);
+ socket_info.sin_port = htons(0);
+ if (bind(sockfd, (struct sockaddr*)&socket_info, sizeof(socket_info)) < 0)
+ PLOG(FATAL);
+ socklen_t size = sizeof(socket_info);
+ getsockname(sockfd, (struct sockaddr*)&socket_info, &size);
+ printf("%d\n", ntohs(socket_info.sin_port));
+ // Using a pipe to ensure child is diconnected from the terminal before
+ // quitting.
+ int pipes[2];
+ pipe(pipes);
+ pid_t pid = fork();
+ if (pid < 0)
+ PLOG(FATAL);
+ if (pid != 0) {
+ close(pipes[1]);
+ // Not expecting any data to be received.
+ read(pipes[0], NULL, 1);
+ signal(SIGCHLD, SIG_IGN);
+ return;
+ }
+
+ // Detach from terminal.
+ setsid();
+ close(STDIN_FILENO);
+ close(STDOUT_FILENO);
+ close(STDERR_FILENO);
+ close(pipes[0]);
+
+ // Start listening for incoming connection.
+ if (listen(sockfd, 1) < 0)
+ PLOG(FATAL);
+
+ // Signal the parent that it can now safely exit.
+ close(pipes[1]);
+
+ // Prepare file to store the samples.
+ int fd;
+ char filename[] = "/data/local/tmp/fileXXXXXX";
+ fd = mkstemp(filename);
+ unlink(filename);
+
+ // Collect samples until a client connect on the socket.
+ fd_set rfds;
+ struct timeval timeout;
+ do {
+ acquire_sample(fd, context);
+ timeout.tv_sec = 0;
+ timeout.tv_usec = 1000000 / context.poll_rate;
+ FD_ZERO(&rfds);
+ FD_SET(sockfd, &rfds);
+ } while (select(sockfd + 1, &rfds, NULL, NULL, &timeout) == 0);
+
+ // Collect a final sample.
+ acquire_sample(fd, context);
+
+ // Send the result back.
+ struct sockaddr_in remote_socket_info;
+ int rfd = accept(sockfd, (struct sockaddr*)&remote_socket_info, &size);
+ if (rfd < 0)
+ PLOG(FATAL);
+ if (lseek(fd, 0, SEEK_SET) < 0)
+ PLOG(FATAL);
+ transfer_to_fd(fd, rfd);
+}
+
+void content_collection(int port) {
+ int sockfd = socket(AF_INET, SOCK_STREAM, 0);
+ // Connect to localhost.
+ struct sockaddr_in socket_info;
+ socket_info.sin_family = AF_INET;
+ socket_info.sin_addr.s_addr = htonl(0x7f000001);
+ socket_info.sin_port = htons(port);
+ if (connect(sockfd, (struct sockaddr*)&socket_info, sizeof(socket_info)) <
+ 0) {
+ PLOG(FATAL);
+ }
+ transfer_to_fd(sockfd, STDOUT_FILENO);
+}
+
+int main(int argc, char** argv) {
+ if (argc == 1) {
+ fprintf(stderr,
+ "Usage: \n"
+ " %s port\n"
+ " %s rate FILE...\n",
+ argv[0],
+ argv[0]);
+ exit(EXIT_FAILURE);
+ }
+ if (argc == 2) {
+ // Argument is the port to connect to.
+ content_collection(atoi(argv[1]));
+ } else {
+ // First argument is the poll frequency, in Hz, following arguments are the
+ // file to poll.
+ Context context;
+ context.poll_rate = atoi(argv[1]);
+ context.nb_files = argc - 2;
+ context.file_fds = new int[context.nb_files];
+ for (int i = 2; i < argc; ++i)
+ context.file_fds[i - 2] = checked_open(argv[i]);
+ poll_content(context);
+ }
+ return EXIT_SUCCESS;
+}
diff --git a/tools/android/file_poller/file_poller.gyp b/tools/android/file_poller/file_poller.gyp
new file mode 100644
index 0000000..097344d
--- /dev/null
+++ b/tools/android/file_poller/file_poller.gyp
@@ -0,0 +1,18 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'file_poller',
+ 'type': 'executable',
+ 'dependencies': [
+ '../../../base/base.gyp:base',
+ ],
+ 'sources': [
+ 'file_poller.cc',
+ ],
+ },
+ ],
+}
diff --git a/tools/android/find_unused_resources.py b/tools/android/find_unused_resources.py
new file mode 100755
index 0000000..1e8fa48
--- /dev/null
+++ b/tools/android/find_unused_resources.py
@@ -0,0 +1,145 @@
+#!/usr/bin/python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Lists unused Java strings and other resources."""
+
+import optparse
+import re
+import subprocess
+import sys
+
+
+def GetLibraryResources(r_txt_paths):
+ """Returns the resources packaged in a list of libraries.
+
+ Args:
+ r_txt_paths: paths to each library's generated R.txt file which lists the
+ resources it contains.
+
+ Returns:
+ The resources in the libraries as a list of tuples (type, name). Example:
+ [('drawable', 'arrow'), ('layout', 'month_picker'), ...]
+ """
+ resources = []
+ for r_txt_path in r_txt_paths:
+ with open(r_txt_path, 'r') as f:
+ for line in f:
+ line = line.strip()
+ if not line:
+ continue
+ data_type, res_type, name, _ = line.split(None, 3)
+ assert data_type in ('int', 'int[]')
+ # Hide attrs, which are redundant with styleables and always appear
+ # unused, and hide ids, which are innocuous even if unused.
+ if res_type in ('attr', 'id'):
+ continue
+ resources.append((res_type, name))
+ return resources
+
+
+def GetUsedResources(source_paths, resource_types):
+ """Returns the types and names of resources used in Java or resource files.
+
+ Args:
+ source_paths: a list of files or folders collectively containing all the
+ Java files, resource files, and the AndroidManifest.xml.
+ resource_types: a list of resource types to look for. Example:
+ ['string', 'drawable']
+
+ Returns:
+ The resources referenced by the Java and resource files as a list of tuples
+ (type, name). Example:
+ [('drawable', 'app_icon'), ('layout', 'month_picker'), ...]
+ """
+ type_regex = '|'.join(map(re.escape, resource_types))
+ patterns = [r'@(())(%s)/(\w+)' % type_regex,
+ r'\b((\w+\.)*)R\.(%s)\.(\w+)' % type_regex]
+ resources = []
+ for pattern in patterns:
+ p = subprocess.Popen(
+ ['grep', '-REIhoe', pattern] + source_paths,
+ stdout=subprocess.PIPE)
+ grep_out, grep_err = p.communicate()
+ # Check stderr instead of return code, since return code is 1 when no
+ # matches are found.
+ assert not grep_err, 'grep failed'
+ matches = re.finditer(pattern, grep_out)
+ for match in matches:
+ package = match.group(1)
+ if package == 'android.':
+ continue
+ type_ = match.group(3)
+ name = match.group(4)
+ resources.append((type_, name))
+ return resources
+
+
+def FormatResources(resources):
+ """Formats a list of resources for printing.
+
+ Args:
+ resources: a list of resources, given as (type, name) tuples.
+ """
+ return '\n'.join(['%-12s %s' % (t, n) for t, n in sorted(resources)])
+
+
+def ParseArgs(args):
+ parser = optparse.OptionParser()
+ parser.add_option('-v', help='Show verbose output', action='store_true')
+ parser.add_option('-s', '--source-path', help='Specify a source folder path '
+ '(e.g. ui/android/java)', action='append', default=[])
+ parser.add_option('-r', '--r-txt-path', help='Specify a "first-party" R.txt '
+ 'file (e.g. out/Debug/content_shell_apk/R.txt)',
+ action='append', default=[])
+ parser.add_option('-t', '--third-party-r-txt-path', help='Specify an R.txt '
+ 'file for a third party library', action='append',
+ default=[])
+ options, args = parser.parse_args(args=args)
+ if args:
+ parser.error('positional arguments not allowed')
+ if not options.source_path:
+ parser.error('at least one source folder path must be specified with -s')
+ if not options.r_txt_path:
+ parser.error('at least one R.txt path must be specified with -r')
+ return (options.v, options.source_path, options.r_txt_path,
+ options.third_party_r_txt_path)
+
+
+def main(args=None):
+ verbose, source_paths, r_txt_paths, third_party_r_txt_paths = ParseArgs(args)
+ defined_resources = (set(GetLibraryResources(r_txt_paths)) -
+ set(GetLibraryResources(third_party_r_txt_paths)))
+ resource_types = list(set([r[0] for r in defined_resources]))
+ used_resources = set(GetUsedResources(source_paths, resource_types))
+ unused_resources = defined_resources - used_resources
+ undefined_resources = used_resources - defined_resources
+
+ # aapt dump fails silently. Notify the user if things look wrong.
+ if not defined_resources:
+ print >> sys.stderr, (
+ 'Warning: No resources found. Did you provide the correct R.txt paths?')
+ if not used_resources:
+ print >> sys.stderr, (
+ 'Warning: No resources referenced from Java or resource files. Did you '
+ 'provide the correct source paths?')
+ if undefined_resources:
+ print >> sys.stderr, (
+ 'Warning: found %d "undefined" resources that are referenced by Java '
+ 'files or by other resources, but are not defined anywhere. Run with '
+ '-v to see them.' % len(undefined_resources))
+
+ if verbose:
+ print '%d undefined resources:' % len(undefined_resources)
+ print FormatResources(undefined_resources), '\n'
+ print '%d resources defined:' % len(defined_resources)
+ print FormatResources(defined_resources), '\n'
+ print '%d used resources:' % len(used_resources)
+ print FormatResources(used_resources), '\n'
+ print '%d unused resources:' % len(unused_resources)
+ print FormatResources(unused_resources)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/android/findbugs_plugin/README b/tools/android/findbugs_plugin/README
new file mode 100644
index 0000000..3ba3f53
--- /dev/null
+++ b/tools/android/findbugs_plugin/README
@@ -0,0 +1,15 @@
+This is the FindBugs plugin for chrome on android.
+
+Currently it detects:
+- synchronized method
+- synchronized 'this'
+
+We don't want the synchronized method and synchronized 'this' to be
+used, the exception is the synchronized method defined in Android
+API.
+
+The plugin jar file was prebuilt and checked in, to rebuild the
+plugin, you need ant, and run below command, the new jar file will
+be in lib directory.
+
+ant install
diff --git a/tools/android/findbugs_plugin/build.xml b/tools/android/findbugs_plugin/build.xml
new file mode 100644
index 0000000..09ee13c
--- /dev/null
+++ b/tools/android/findbugs_plugin/build.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Copyright (c) 2012 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+
+<project name="findbugs_plugin" basedir=".">
+
+ <description>
+ Build findbugs_plugin for Chromium Android
+ </description>
+ <property name="src.dir" location="src" />
+ <property name="lib.dir" location="../../../third_party/findbugs/lib" />
+ <property name="bin.dir" location="lib" />
+ <property name="intermediate.dir" location="intermediate" />
+ <property name="jar.name" value="chromiumPlugin.jar" />
+
+ <path id="classpath.id">
+ <fileset dir="${lib.dir}">
+ <include name="**/*.jar" />
+ </fileset>
+ </path>
+
+ <target name="makedir">
+ <mkdir dir="${intermediate.dir}" />
+ <mkdir dir="${bin.dir}" />
+ </target>
+
+ <target name="findbugs_plugin_classes" depends="makedir">
+ <javac srcdir="${src.dir}" destdir="${intermediate.dir}"
+ classpathref="classpath.id" includeantruntime="false" />
+ </target>
+
+ <target name="copy_xml_files" depends="makedir">
+ <copy file="messages.xml" todir="${intermediate.dir}" />
+ <copy file="findbugs.xml" todir="${intermediate.dir}" />
+ </target>
+
+ <target name="findbugs_plugin_jar" depends="findbugs_plugin_classes, copy_xml_files">
+ <jar destfile="${bin.dir}/${jar.name}" basedir="${intermediate.dir}">
+ </jar>
+ </target>
+
+ <target name="install" depends="findbugs_plugin_jar">
+ <delete dir="${intermediate.dir}" />
+ </target>
+</project>
diff --git a/tools/android/findbugs_plugin/findbugs.xml b/tools/android/findbugs_plugin/findbugs.xml
new file mode 100644
index 0000000..43b1f34
--- /dev/null
+++ b/tools/android/findbugs_plugin/findbugs.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Copyright (c) 2012 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+
+<FindbugsPlugin xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:noNamespaceSchemaLocation="findbugsplugin.xsd"
+ pluginid="SynchronizedThisDetector"
+ provider="chromium"
+ website="http://code.google.com/p/chromium/wiki/UseFindBugsForAndroid">
+ <Detector class="org.chromium.tools.findbugs.plugin.SynchronizedThisDetector" reports="CHROMIUM_SYNCHRONIZED_THIS" />
+ <BugPattern type="CHROMIUM_SYNCHRONIZED_THIS" abbrev="CST" category="CORRECTNESS"/>
+
+ <Detector class="org.chromium.tools.findbugs.plugin.SynchronizedMethodDetector" reports="CHROMIUM_SYNCHRONIZED_METHOD" />
+ <BugPattern type="CHROMIUM_SYNCHRONIZED_METHOD" abbrev="CSM" category="CORRECTNESS"/>
+</FindbugsPlugin>
diff --git a/tools/android/findbugs_plugin/findbugs_plugin.gyp b/tools/android/findbugs_plugin/findbugs_plugin.gyp
new file mode 100644
index 0000000..16d06e6
--- /dev/null
+++ b/tools/android/findbugs_plugin/findbugs_plugin.gyp
@@ -0,0 +1,16 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'findbugs_plugin_test',
+ 'type': 'none',
+ 'variables': {
+ 'java_in_dir': 'test/java/',
+ },
+ 'includes': [ '../../../build/java.gypi' ],
+ }
+ ]
+}
diff --git a/tools/android/findbugs_plugin/lib/chromiumPlugin.jar b/tools/android/findbugs_plugin/lib/chromiumPlugin.jar
new file mode 100644
index 0000000..6ccf61b
--- /dev/null
+++ b/tools/android/findbugs_plugin/lib/chromiumPlugin.jar
Binary files differ
diff --git a/tools/android/findbugs_plugin/messages.xml b/tools/android/findbugs_plugin/messages.xml
new file mode 100644
index 0000000..aea983b
--- /dev/null
+++ b/tools/android/findbugs_plugin/messages.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+ Copyright (c) 2012 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+
+<MessageCollection xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:noNamespaceSchemaLocation="messagecollection.xsd">
+
+ <Plugin>
+ <ShortDescription>Chromium FindBugs Plugin </ShortDescription>
+ <Details>Adds style checks enforced in the chromium project.</Details>
+ </Plugin>
+
+ <Detector class="org.chromium.tools.findbugs.plugin.SynchronizedThisDetector">
+ <Details>
+ <![CDATA[
+ Shouldn't use synchronized(this).
+ ]]>
+ </Details>
+
+ </Detector>
+
+ <BugPattern type="CHROMIUM_SYNCHRONIZED_THIS">
+ <ShortDescription>Shouldn't use synchronized(this)</ShortDescription>
+ <LongDescription>Shouldn't use synchronized(this), please narrow down the synchronization scope.</LongDescription>
+ <Details>
+<![CDATA[
+<p>Shouldn't use synchronized(this), please narrow down the synchronization scope.</p>
+]]>
+ </Details>
+ </BugPattern>
+
+ <Detector class="org.chromium.tools.findbugs.plugin.SynchronizedMethodDetector">
+ <Details>
+ <![CDATA[
+ Shouldn't use synchronized method.
+ ]]>
+ </Details>
+
+ </Detector>
+
+ <BugPattern type="CHROMIUM_SYNCHRONIZED_METHOD">
+ <ShortDescription>Shouldn't use synchronized method</ShortDescription>
+ <LongDescription>Shouldn't use synchronized method, please narrow down the synchronization scope.</LongDescription>
+ <Details>
+<![CDATA[
+<p>Shouldn't use synchronized method, please narrow down the synchronization scope.</p>
+]]>
+ </Details>
+ </BugPattern>
+
+ <BugCode abbrev="CHROMIUM">CHROMIUM</BugCode>
+</MessageCollection>
diff --git a/tools/android/findbugs_plugin/src/org/chromium/tools/findbugs/plugin/SynchronizedMethodDetector.java b/tools/android/findbugs_plugin/src/org/chromium/tools/findbugs/plugin/SynchronizedMethodDetector.java
new file mode 100644
index 0000000..d1d7614
--- /dev/null
+++ b/tools/android/findbugs_plugin/src/org/chromium/tools/findbugs/plugin/SynchronizedMethodDetector.java
@@ -0,0 +1,37 @@
+// Copyright 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.tools.findbugs.plugin;
+
+import org.apache.bcel.classfile.Code;
+
+import edu.umd.cs.findbugs.BugInstance;
+import edu.umd.cs.findbugs.BugReporter;
+import edu.umd.cs.findbugs.bcel.OpcodeStackDetector;
+
+/**
+ * This class detects the synchronized method.
+ */
+public class SynchronizedMethodDetector extends OpcodeStackDetector {
+ private BugReporter mBugReporter;
+
+ public SynchronizedMethodDetector(BugReporter bugReporter) {
+ this.mBugReporter = bugReporter;
+ }
+
+ @Override
+ public void visit(Code code) {
+ if (getMethod().isSynchronized()) {
+ mBugReporter.reportBug(new BugInstance(this, "CHROMIUM_SYNCHRONIZED_METHOD",
+ NORMAL_PRIORITY)
+ .addClassAndMethod(this)
+ .addSourceLine(this));
+ }
+ super.visit(code);
+ }
+
+ @Override
+ public void sawOpcode(int arg0) {
+ }
+}
diff --git a/tools/android/findbugs_plugin/src/org/chromium/tools/findbugs/plugin/SynchronizedThisDetector.java b/tools/android/findbugs_plugin/src/org/chromium/tools/findbugs/plugin/SynchronizedThisDetector.java
new file mode 100644
index 0000000..9a4e5e1
--- /dev/null
+++ b/tools/android/findbugs_plugin/src/org/chromium/tools/findbugs/plugin/SynchronizedThisDetector.java
@@ -0,0 +1,73 @@
+// Copyright 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.tools.findbugs.plugin;
+
+import org.apache.bcel.classfile.Code;
+
+import edu.umd.cs.findbugs.BugInstance;
+import edu.umd.cs.findbugs.BugReporter;
+import edu.umd.cs.findbugs.bcel.OpcodeStackDetector;
+
+/**
+ * This class detects the synchronized(this).
+ *
+ * The pattern of byte code of synchronized(this) is
+ * aload_0 # Load the 'this' pointer on top of stack
+ * dup # Duplicate the 'this' pointer
+ * astore_x # Store this for late use, it might be astore.
+ * monitorenter
+ */
+public class SynchronizedThisDetector extends OpcodeStackDetector {
+ private static final int PATTERN[] = {ALOAD_0, DUP, 0xff, 0xff, MONITORENTER};
+
+ private int mStep = 0;
+ private BugReporter mBugReporter;
+
+ public SynchronizedThisDetector(BugReporter bugReporter) {
+ mBugReporter = bugReporter;
+ }
+
+ @Override
+ public void visit(Code code) {
+ mStep = 0;
+ super.visit(code);
+ }
+
+ @Override
+ public void sawOpcode(int seen) {
+ if (PATTERN[mStep] == seen) {
+ mStep++;
+ if (mStep == PATTERN.length) {
+ mBugReporter.reportBug(new BugInstance(this, "CHROMIUM_SYNCHRONIZED_THIS",
+ NORMAL_PRIORITY)
+ .addClassAndMethod(this)
+ .addSourceLine(this));
+ mStep = 0;
+ return;
+ }
+ } else if (mStep == 2) {
+ // This could be astore_x
+ switch (seen) {
+ case ASTORE_0:
+ case ASTORE_1:
+ case ASTORE_2:
+ case ASTORE_3:
+ mStep += 2;
+ break;
+ case ASTORE:
+ mStep++;
+ break;
+ default:
+ mStep = 0;
+ break;
+ }
+ } else if (mStep == 3) {
+ // Could be any byte following the ASTORE.
+ mStep++;
+ } else {
+ mStep = 0;
+ }
+ }
+}
diff --git a/tools/android/findbugs_plugin/test/expected_result.txt b/tools/android/findbugs_plugin/test/expected_result.txt
new file mode 100644
index 0000000..076b007
--- /dev/null
+++ b/tools/android/findbugs_plugin/test/expected_result.txt
@@ -0,0 +1,3 @@
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope. At SimpleSynchronizedMethod.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope. At SimpleSynchronizedStaticMethod.java
+M C CST: Shouldn't use synchronized(this), please narrow down the synchronization scope. At SimpleSynchronizedThis.java
diff --git a/tools/android/findbugs_plugin/test/java/src/org/chromium/tools/findbugs/plugin/SimpleSynchronizedMethod.java b/tools/android/findbugs_plugin/test/java/src/org/chromium/tools/findbugs/plugin/SimpleSynchronizedMethod.java
new file mode 100644
index 0000000..ded7848
--- /dev/null
+++ b/tools/android/findbugs_plugin/test/java/src/org/chromium/tools/findbugs/plugin/SimpleSynchronizedMethod.java
@@ -0,0 +1,17 @@
+// Copyright 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.tools.findbugs.plugin;
+
+/**
+ * This class has synchronized method and is used to test
+ * SynchronizedMethodDetector.
+ */
+class SimpleSynchronizedMethod {
+ private int mCounter = 0;
+
+ synchronized void synchronizedMethod() {
+ mCounter++;
+ }
+}
diff --git a/tools/android/findbugs_plugin/test/java/src/org/chromium/tools/findbugs/plugin/SimpleSynchronizedStaticMethod.java b/tools/android/findbugs_plugin/test/java/src/org/chromium/tools/findbugs/plugin/SimpleSynchronizedStaticMethod.java
new file mode 100644
index 0000000..d652dbe
--- /dev/null
+++ b/tools/android/findbugs_plugin/test/java/src/org/chromium/tools/findbugs/plugin/SimpleSynchronizedStaticMethod.java
@@ -0,0 +1,16 @@
+// Copyright 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.tools.findbugs.plugin;
+
+/**
+ * This class is used to test SynchronizedMethodDetector
+ */
+class SimpleSynchronizedStaticMethod {
+ private static int sCounter = 0;
+
+ static synchronized void synchronizedStaticMethod() {
+ sCounter++;
+ }
+}
diff --git a/tools/android/findbugs_plugin/test/java/src/org/chromium/tools/findbugs/plugin/SimpleSynchronizedThis.java b/tools/android/findbugs_plugin/test/java/src/org/chromium/tools/findbugs/plugin/SimpleSynchronizedThis.java
new file mode 100644
index 0000000..9125155
--- /dev/null
+++ b/tools/android/findbugs_plugin/test/java/src/org/chromium/tools/findbugs/plugin/SimpleSynchronizedThis.java
@@ -0,0 +1,19 @@
+// Copyright 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.tools.findbugs.plugin;
+
+/**
+ * This class has synchronized(this) statement and is used to test
+ * SynchronizedThisDetector.
+ */
+class SimpleSynchronizedThis {
+ private int mCounter = 0;
+
+ void synchronizedThis() {
+ synchronized (this) {
+ mCounter++;
+ }
+ }
+}
diff --git a/tools/android/findbugs_plugin/test/run_findbugs_plugin_tests.py b/tools/android/findbugs_plugin/test/run_findbugs_plugin_tests.py
new file mode 100755
index 0000000..c2e1531
--- /dev/null
+++ b/tools/android/findbugs_plugin/test/run_findbugs_plugin_tests.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is used to test the findbugs plugin, it calls
+# build/android/pylib/utils/findbugs.py to analyze the classes in
+# org.chromium.tools.findbugs.plugin package, and expects to get the same
+# issue with those in expected_result.txt.
+#
+# Useful command line:
+# --rebaseline to generate the expected_result.txt, please make sure don't
+# remove the expected result of exsting tests.
+
+
+import optparse
+import os
+import sys
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ '..', '..', '..', '..',
+ 'build', 'android')))
+
+from pylib import constants
+from pylib.utils import findbugs
+
+
+def main(argv):
+ parser = findbugs.GetCommonParser()
+
+ options, _ = parser.parse_args()
+
+ if not options.known_bugs:
+ options.known_bugs = os.path.join(constants.DIR_SOURCE_ROOT, 'tools',
+ 'android', 'findbugs_plugin', 'test',
+ 'expected_result.txt')
+ if not options.only_analyze:
+ options.only_analyze = 'org.chromium.tools.findbugs.plugin.*'
+
+ return findbugs.Run(options)
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/tools/android/forwarder/forwarder.cc b/tools/android/forwarder/forwarder.cc
new file mode 100644
index 0000000..fe49903
--- /dev/null
+++ b/tools/android/forwarder/forwarder.cc
@@ -0,0 +1,426 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <errno.h>
+#include <fcntl.h>
+#include <netinet/in.h>
+#include <netinet/tcp.h>
+#include <pthread.h>
+#include <signal.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/select.h>
+#include <sys/socket.h>
+#include <sys/wait.h>
+#include <unistd.h>
+
+#include "base/command_line.h"
+#include "base/logging.h"
+#include "base/posix/eintr_wrapper.h"
+#include "tools/android/common/adb_connection.h"
+#include "tools/android/common/daemon.h"
+#include "tools/android/common/net.h"
+
+namespace {
+
+const pthread_t kInvalidThread = static_cast<pthread_t>(-1);
+volatile bool g_killed = false;
+
+void CloseSocket(int fd) {
+ if (fd >= 0) {
+ int old_errno = errno;
+ close(fd);
+ errno = old_errno;
+ }
+}
+
+class Buffer {
+ public:
+ Buffer()
+ : bytes_read_(0),
+ write_offset_(0) {
+ }
+
+ bool CanRead() {
+ return bytes_read_ == 0;
+ }
+
+ bool CanWrite() {
+ return write_offset_ < bytes_read_;
+ }
+
+ int Read(int fd) {
+ int ret = -1;
+ if (CanRead()) {
+ ret = HANDLE_EINTR(read(fd, buffer_, kBufferSize));
+ if (ret > 0)
+ bytes_read_ = ret;
+ }
+ return ret;
+ }
+
+ int Write(int fd) {
+ int ret = -1;
+ if (CanWrite()) {
+ ret = HANDLE_EINTR(write(fd, buffer_ + write_offset_,
+ bytes_read_ - write_offset_));
+ if (ret > 0) {
+ write_offset_ += ret;
+ if (write_offset_ == bytes_read_) {
+ write_offset_ = 0;
+ bytes_read_ = 0;
+ }
+ }
+ }
+ return ret;
+ }
+
+ private:
+ // A big buffer to let our file-over-http bridge work more like real file.
+ static const int kBufferSize = 1024 * 128;
+ int bytes_read_;
+ int write_offset_;
+ char buffer_[kBufferSize];
+
+ DISALLOW_COPY_AND_ASSIGN(Buffer);
+};
+
+class Server;
+
+struct ForwarderThreadInfo {
+ ForwarderThreadInfo(Server* a_server, int a_forwarder_index)
+ : server(a_server),
+ forwarder_index(a_forwarder_index) {
+ }
+ Server* server;
+ int forwarder_index;
+};
+
+struct ForwarderInfo {
+ time_t start_time;
+ int socket1;
+ time_t socket1_last_byte_time;
+ size_t socket1_bytes;
+ int socket2;
+ time_t socket2_last_byte_time;
+ size_t socket2_bytes;
+};
+
+class Server {
+ public:
+ Server()
+ : thread_(kInvalidThread),
+ socket_(-1) {
+ memset(forward_to_, 0, sizeof(forward_to_));
+ memset(&forwarders_, 0, sizeof(forwarders_));
+ }
+
+ int GetFreeForwarderIndex() {
+ for (int i = 0; i < kMaxForwarders; i++) {
+ if (forwarders_[i].start_time == 0)
+ return i;
+ }
+ return -1;
+ }
+
+ void DisposeForwarderInfo(int index) {
+ forwarders_[index].start_time = 0;
+ }
+
+ ForwarderInfo* GetForwarderInfo(int index) {
+ return &forwarders_[index];
+ }
+
+ void DumpInformation() {
+ LOG(INFO) << "Server information: " << forward_to_;
+ LOG(INFO) << "No.: age up(bytes,idle) down(bytes,idle)";
+ int count = 0;
+ time_t now = time(NULL);
+ for (int i = 0; i < kMaxForwarders; i++) {
+ const ForwarderInfo& info = forwarders_[i];
+ if (info.start_time) {
+ count++;
+ LOG(INFO) << count << ": " << now - info.start_time << " up("
+ << info.socket1_bytes << ","
+ << now - info.socket1_last_byte_time << " down("
+ << info.socket2_bytes << ","
+ << now - info.socket2_last_byte_time << ")";
+ }
+ }
+ }
+
+ void Shutdown() {
+ if (socket_ >= 0)
+ shutdown(socket_, SHUT_RDWR);
+ }
+
+ bool InitSocket(const char* arg);
+
+ void StartThread() {
+ pthread_create(&thread_, NULL, ServerThread, this);
+ }
+
+ void JoinThread() {
+ if (thread_ != kInvalidThread)
+ pthread_join(thread_, NULL);
+ }
+
+ private:
+ static void* ServerThread(void* arg);
+
+ // There are 3 kinds of threads that will access the array:
+ // 1. Server thread will get a free ForwarderInfo and initialize it;
+ // 2. Forwarder threads will dispose the ForwarderInfo when it finishes;
+ // 3. Main thread will iterate and print the forwarders.
+ // Using an array is not optimal, but can avoid locks or other complex
+ // inter-thread communication.
+ static const int kMaxForwarders = 512;
+ ForwarderInfo forwarders_[kMaxForwarders];
+
+ pthread_t thread_;
+ int socket_;
+ char forward_to_[40];
+
+ DISALLOW_COPY_AND_ASSIGN(Server);
+};
+
+// Forwards all outputs from one socket to another socket.
+void* ForwarderThread(void* arg) {
+ ForwarderThreadInfo* thread_info =
+ reinterpret_cast<ForwarderThreadInfo*>(arg);
+ Server* server = thread_info->server;
+ int index = thread_info->forwarder_index;
+ delete thread_info;
+ ForwarderInfo* info = server->GetForwarderInfo(index);
+ int socket1 = info->socket1;
+ int socket2 = info->socket2;
+ int nfds = socket1 > socket2 ? socket1 + 1 : socket2 + 1;
+ fd_set read_fds;
+ fd_set write_fds;
+ Buffer buffer1;
+ Buffer buffer2;
+
+ while (!g_killed) {
+ FD_ZERO(&read_fds);
+ if (buffer1.CanRead())
+ FD_SET(socket1, &read_fds);
+ if (buffer2.CanRead())
+ FD_SET(socket2, &read_fds);
+
+ FD_ZERO(&write_fds);
+ if (buffer1.CanWrite())
+ FD_SET(socket2, &write_fds);
+ if (buffer2.CanWrite())
+ FD_SET(socket1, &write_fds);
+
+ if (HANDLE_EINTR(select(nfds, &read_fds, &write_fds, NULL, NULL)) <= 0) {
+ LOG(ERROR) << "Select error: " << strerror(errno);
+ break;
+ }
+
+ int now = time(NULL);
+ if (FD_ISSET(socket1, &read_fds)) {
+ info->socket1_last_byte_time = now;
+ int bytes = buffer1.Read(socket1);
+ if (bytes <= 0)
+ break;
+ info->socket1_bytes += bytes;
+ }
+ if (FD_ISSET(socket2, &read_fds)) {
+ info->socket2_last_byte_time = now;
+ int bytes = buffer2.Read(socket2);
+ if (bytes <= 0)
+ break;
+ info->socket2_bytes += bytes;
+ }
+ if (FD_ISSET(socket1, &write_fds)) {
+ if (buffer2.Write(socket1) <= 0)
+ break;
+ }
+ if (FD_ISSET(socket2, &write_fds)) {
+ if (buffer1.Write(socket2) <= 0)
+ break;
+ }
+ }
+
+ CloseSocket(socket1);
+ CloseSocket(socket2);
+ server->DisposeForwarderInfo(index);
+ return NULL;
+}
+
+// Listens to a server socket. On incoming request, forward it to the host.
+// static
+void* Server::ServerThread(void* arg) {
+ Server* server = reinterpret_cast<Server*>(arg);
+ while (!g_killed) {
+ int forwarder_index = server->GetFreeForwarderIndex();
+ if (forwarder_index < 0) {
+ LOG(ERROR) << "Too many forwarders";
+ continue;
+ }
+
+ struct sockaddr_in addr;
+ socklen_t addr_len = sizeof(addr);
+ int socket = HANDLE_EINTR(accept(server->socket_,
+ reinterpret_cast<sockaddr*>(&addr),
+ &addr_len));
+ if (socket < 0) {
+ LOG(ERROR) << "Failed to accept: " << strerror(errno);
+ break;
+ }
+ tools::DisableNagle(socket);
+
+ int host_socket = tools::ConnectAdbHostSocket(server->forward_to_);
+ if (host_socket >= 0) {
+ // Set NONBLOCK flag because we use select().
+ fcntl(socket, F_SETFL, fcntl(socket, F_GETFL) | O_NONBLOCK);
+ fcntl(host_socket, F_SETFL, fcntl(host_socket, F_GETFL) | O_NONBLOCK);
+
+ ForwarderInfo* forwarder_info = server->GetForwarderInfo(forwarder_index);
+ time_t now = time(NULL);
+ forwarder_info->start_time = now;
+ forwarder_info->socket1 = socket;
+ forwarder_info->socket1_last_byte_time = now;
+ forwarder_info->socket1_bytes = 0;
+ forwarder_info->socket2 = host_socket;
+ forwarder_info->socket2_last_byte_time = now;
+ forwarder_info->socket2_bytes = 0;
+
+ pthread_t thread;
+ pthread_create(&thread, NULL, ForwarderThread,
+ new ForwarderThreadInfo(server, forwarder_index));
+ } else {
+ // Close the unused client socket which is failed to connect to host.
+ CloseSocket(socket);
+ }
+ }
+
+ CloseSocket(server->socket_);
+ server->socket_ = -1;
+ return NULL;
+}
+
+// Format of arg: <Device port>[:<Forward to port>:<Forward to address>]
+bool Server::InitSocket(const char* arg) {
+ char* endptr;
+ int local_port = static_cast<int>(strtol(arg, &endptr, 10));
+ if (local_port < 0)
+ return false;
+
+ if (*endptr != ':') {
+ snprintf(forward_to_, sizeof(forward_to_), "%d:127.0.0.1", local_port);
+ } else {
+ strncpy(forward_to_, endptr + 1, sizeof(forward_to_) - 1);
+ }
+
+ socket_ = socket(AF_INET, SOCK_STREAM, 0);
+ if (socket_ < 0) {
+ perror("server socket");
+ return false;
+ }
+ tools::DisableNagle(socket_);
+
+ sockaddr_in addr;
+ memset(&addr, 0, sizeof(addr));
+ addr.sin_family = AF_INET;
+ addr.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
+ addr.sin_port = htons(local_port);
+ int reuse_addr = 1;
+ setsockopt(socket_, SOL_SOCKET, SO_REUSEADDR,
+ &reuse_addr, sizeof(reuse_addr));
+ tools::DeferAccept(socket_);
+ if (HANDLE_EINTR(bind(socket_, reinterpret_cast<sockaddr*>(&addr),
+ sizeof(addr))) < 0 ||
+ HANDLE_EINTR(listen(socket_, 5)) < 0) {
+ perror("server bind");
+ CloseSocket(socket_);
+ socket_ = -1;
+ return false;
+ }
+
+ if (local_port == 0) {
+ socklen_t addrlen = sizeof(addr);
+ if (getsockname(socket_, reinterpret_cast<sockaddr*>(&addr), &addrlen)
+ != 0) {
+ perror("get listen address");
+ CloseSocket(socket_);
+ socket_ = -1;
+ return false;
+ }
+ local_port = ntohs(addr.sin_port);
+ }
+
+ printf("Forwarding device port %d to host %s\n", local_port, forward_to_);
+ return true;
+}
+
+int g_server_count = 0;
+Server* g_servers = NULL;
+
+void KillHandler(int unused) {
+ g_killed = true;
+ for (int i = 0; i < g_server_count; i++)
+ g_servers[i].Shutdown();
+}
+
+void DumpInformation(int unused) {
+ for (int i = 0; i < g_server_count; i++)
+ g_servers[i].DumpInformation();
+}
+
+} // namespace
+
+int main(int argc, char** argv) {
+ printf("Android device to host TCP forwarder\n");
+ printf("Like 'adb forward' but in the reverse direction\n");
+
+ CommandLine command_line(argc, argv);
+ CommandLine::StringVector server_args = command_line.GetArgs();
+ if (tools::HasHelpSwitch(command_line) || server_args.empty()) {
+ tools::ShowHelp(
+ argv[0],
+ "<Device port>[:<Forward to port>:<Forward to address>] ...",
+ " <Forward to port> default is <Device port>\n"
+ " <Forward to address> default is 127.0.0.1\n"
+ "If <Device port> is 0, a port will by dynamically allocated.\n");
+ return 0;
+ }
+
+ g_servers = new Server[server_args.size()];
+ g_server_count = 0;
+ int failed_count = 0;
+ for (size_t i = 0; i < server_args.size(); i++) {
+ if (!g_servers[g_server_count].InitSocket(server_args[i].c_str())) {
+ printf("Couldn't start forwarder server for port spec: %s\n",
+ server_args[i].c_str());
+ ++failed_count;
+ } else {
+ ++g_server_count;
+ }
+ }
+
+ if (g_server_count == 0) {
+ printf("No forwarder servers could be started. Exiting.\n");
+ delete [] g_servers;
+ return failed_count;
+ }
+
+ if (!tools::HasNoSpawnDaemonSwitch(command_line))
+ tools::SpawnDaemon(failed_count);
+
+ signal(SIGTERM, KillHandler);
+ signal(SIGUSR2, DumpInformation);
+
+ for (int i = 0; i < g_server_count; i++)
+ g_servers[i].StartThread();
+ for (int i = 0; i < g_server_count; i++)
+ g_servers[i].JoinThread();
+ g_server_count = 0;
+ delete [] g_servers;
+
+ return 0;
+}
+
diff --git a/tools/android/forwarder/forwarder.gyp b/tools/android/forwarder/forwarder.gyp
new file mode 100644
index 0000000..1df518b
--- /dev/null
+++ b/tools/android/forwarder/forwarder.gyp
@@ -0,0 +1,43 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'forwarder',
+ 'type': 'none',
+ 'dependencies': [
+ 'forwarder_symbols',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'strip_forwarder',
+ 'inputs': ['<(PRODUCT_DIR)/forwarder_symbols'],
+ 'outputs': ['<(PRODUCT_DIR)/forwarder'],
+ 'action': [
+ '<(android_strip)',
+ '--strip-unneeded',
+ '<@(_inputs)',
+ '-o',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ }, {
+ 'target_name': 'forwarder_symbols',
+ 'type': 'executable',
+ 'dependencies': [
+ '../../../base/base.gyp:base',
+ '../common/common.gyp:android_tools_common',
+ ],
+ 'include_dirs': [
+ '../../..',
+ ],
+ 'sources': [
+ 'forwarder.cc',
+ ],
+ },
+ ],
+}
+
diff --git a/tools/android/forwarder2/command.cc b/tools/android/forwarder2/command.cc
new file mode 100644
index 0000000..9b0aa24
--- /dev/null
+++ b/tools/android/forwarder2/command.cc
@@ -0,0 +1,96 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/forwarder2/command.h"
+
+#include <errno.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "base/logging.h"
+#include "base/safe_strerror_posix.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_piece.h"
+#include "tools/android/forwarder2/socket.h"
+
+using base::StringPiece;
+
+namespace {
+
+
+// Command format:
+// <port>:<type>
+//
+// Where:
+// <port> is a 5-chars zero-padded ASCII decimal integer
+// matching the target port for the command (e.g.
+// '08080' for port 8080)
+// <type> is a 3-char zero-padded ASCII decimal integer
+// matching a command::Type value (e.g. 002 for
+// ACK).
+// The column (:) is used as a separator for easier reading.
+const int kPortStringSize = 5;
+const int kCommandTypeStringSize = 2;
+// Command string size also includes the ':' separator char.
+const int kCommandStringSize = kPortStringSize + kCommandTypeStringSize + 1;
+
+} // namespace
+
+namespace forwarder2 {
+
+bool ReadCommand(Socket* socket,
+ int* port_out,
+ command::Type* command_type_out) {
+ char command_buffer[kCommandStringSize + 1];
+ // To make logging easier.
+ command_buffer[kCommandStringSize] = '\0';
+
+ int bytes_read = socket->ReadNumBytes(command_buffer, kCommandStringSize);
+ if (bytes_read != kCommandStringSize) {
+ if (bytes_read < 0)
+ LOG(ERROR) << "Read() error: " << safe_strerror(errno);
+ else if (!bytes_read)
+ LOG(ERROR) << "Read() error, endpoint was unexpectedly closed.";
+ else
+ LOG(ERROR) << "Read() error, not enough data received from the socket.";
+ return false;
+ }
+
+ StringPiece port_str(command_buffer, kPortStringSize);
+ if (!StringToInt(port_str, port_out)) {
+ LOG(ERROR) << "Could not parse the command port string: "
+ << port_str;
+ return false;
+ }
+
+ StringPiece command_type_str(
+ &command_buffer[kPortStringSize + 1], kCommandTypeStringSize);
+ int command_type;
+ if (!StringToInt(command_type_str, &command_type)) {
+ LOG(ERROR) << "Could not parse the command type string: "
+ << command_type_str;
+ return false;
+ }
+ *command_type_out = static_cast<command::Type>(command_type);
+ return true;
+}
+
+bool SendCommand(command::Type command, int port, Socket* socket) {
+ char buffer[kCommandStringSize + 1];
+ int len = snprintf(buffer, sizeof(buffer), "%05d:%02d", port, command);
+ CHECK_EQ(len, kCommandStringSize);
+ // Write the full command minus the leading \0 char.
+ return socket->WriteNumBytes(buffer, len) == len;
+}
+
+bool ReceivedCommand(command::Type command, Socket* socket) {
+ int port;
+ command::Type received_command;
+ if (!ReadCommand(socket, &port, &received_command))
+ return false;
+ return received_command == command;
+}
+
+} // namespace forwarder
diff --git a/tools/android/forwarder2/command.h b/tools/android/forwarder2/command.h
new file mode 100644
index 0000000..8e222ef
--- /dev/null
+++ b/tools/android/forwarder2/command.h
@@ -0,0 +1,48 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_COMMAND_H_
+#define TOOLS_ANDROID_FORWARDER2_COMMAND_H_
+
+#include "base/basictypes.h"
+
+namespace forwarder2 {
+
+class Socket;
+
+namespace command {
+
+enum Type {
+ ACCEPT_ERROR = 0,
+ ACCEPT_SUCCESS,
+ ACK,
+ ADB_DATA_SOCKET_ERROR,
+ ADB_DATA_SOCKET_SUCCESS,
+ BIND_ERROR,
+ BIND_SUCCESS,
+ DATA_CONNECTION,
+ HOST_SERVER_ERROR,
+ HOST_SERVER_SUCCESS,
+ KILL_ALL_LISTENERS,
+ LISTEN,
+ UNLISTEN,
+ UNLISTEN_ERROR,
+ UNLISTEN_SUCCESS,
+};
+
+} // namespace command
+
+bool ReadCommand(Socket* socket,
+ int* port_out,
+ command::Type* command_type_out);
+
+// Helper function to read the command from the |socket| and return true if the
+// |command| is equal to the given command parameter.
+bool ReceivedCommand(command::Type command, Socket* socket);
+
+bool SendCommand(command::Type command, int port, Socket* socket);
+
+} // namespace forwarder
+
+#endif // TOOLS_ANDROID_FORWARDER2_COMMAND_H_
diff --git a/tools/android/forwarder2/common.cc b/tools/android/forwarder2/common.cc
new file mode 100644
index 0000000..3b7387d
--- /dev/null
+++ b/tools/android/forwarder2/common.cc
@@ -0,0 +1,28 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/forwarder2/common.h"
+
+#include <errno.h>
+#include <unistd.h>
+
+#include "base/logging.h"
+#include "base/posix/eintr_wrapper.h"
+#include "base/safe_strerror_posix.h"
+
+namespace forwarder2 {
+
+void PError(const char* msg) {
+ LOG(ERROR) << msg << ": " << safe_strerror(errno);
+}
+
+void CloseFD(int fd) {
+ const int errno_copy = errno;
+ if (IGNORE_EINTR(close(fd)) < 0) {
+ PError("close");
+ errno = errno_copy;
+ }
+}
+
+} // namespace forwarder2
diff --git a/tools/android/forwarder2/common.h b/tools/android/forwarder2/common.h
new file mode 100644
index 0000000..43de57b
--- /dev/null
+++ b/tools/android/forwarder2/common.h
@@ -0,0 +1,89 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Common helper functions/classes used both in the host and device forwarder.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_COMMON_H_
+#define TOOLS_ANDROID_FORWARDER2_COMMON_H_
+
+#include <stdarg.h>
+#include <stdio.h>
+#include <errno.h>
+
+#include "base/basictypes.h"
+#include "base/compiler_specific.h"
+#include "base/logging.h"
+#include "base/posix/eintr_wrapper.h"
+
+// Preserving errno for Close() is important because the function is very often
+// used in cleanup code, after an error occurred, and it is very easy to pass an
+// invalid file descriptor to close() in this context, or more rarely, a
+// spurious signal might make close() return -1 + setting errno to EINTR,
+// masking the real reason for the original error. This leads to very unpleasant
+// debugging sessions.
+#define PRESERVE_ERRNO_HANDLE_EINTR(Func) \
+ do { \
+ int local_errno = errno; \
+ (void) HANDLE_EINTR(Func); \
+ errno = local_errno; \
+ } while (false);
+
+// Wrapper around RAW_LOG() which is signal-safe. The only purpose of this macro
+// is to avoid documenting uses of RawLog().
+#define SIGNAL_SAFE_LOG(Level, Msg) \
+ RAW_LOG(Level, Msg);
+
+namespace forwarder2 {
+
+// Note that the two following functions are not signal-safe.
+
+// Chromium logging-aware implementation of libc's perror().
+void PError(const char* msg);
+
+// Closes the provided file descriptor and logs an error if it failed.
+void CloseFD(int fd);
+
+// Helps build a formatted C-string allocated in a fixed-size array. This is
+// useful in signal handlers where base::StringPrintf() can't be used safely
+// (due to its use of LOG()).
+template <int BufferSize>
+class FixedSizeStringBuilder {
+ public:
+ FixedSizeStringBuilder() {
+ Reset();
+ }
+
+ const char* buffer() const { return buffer_; }
+
+ void Reset() {
+ buffer_[0] = 0;
+ write_ptr_ = buffer_;
+ }
+
+ // Returns the number of bytes appended to the underlying buffer or -1 if it
+ // failed.
+ int Append(const char* format, ...) PRINTF_FORMAT(/* + 1 for 'this' */ 2, 3) {
+ if (write_ptr_ >= buffer_ + BufferSize)
+ return -1;
+ va_list ap;
+ va_start(ap, format);
+ const int bytes_written = vsnprintf(
+ write_ptr_, BufferSize - (write_ptr_ - buffer_), format, ap);
+ va_end(ap);
+ if (bytes_written > 0)
+ write_ptr_ += bytes_written;
+ return bytes_written;
+ }
+
+ private:
+ char* write_ptr_;
+ char buffer_[BufferSize];
+
+ COMPILE_ASSERT(BufferSize >= 1, Size_of_buffer_must_be_at_least_one);
+ DISALLOW_COPY_AND_ASSIGN(FixedSizeStringBuilder);
+};
+
+} // namespace forwarder2
+
+#endif // TOOLS_ANDROID_FORWARDER2_COMMON_H_
diff --git a/tools/android/forwarder2/daemon.cc b/tools/android/forwarder2/daemon.cc
new file mode 100644
index 0000000..19a1054
--- /dev/null
+++ b/tools/android/forwarder2/daemon.cc
@@ -0,0 +1,290 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/forwarder2/daemon.h"
+
+#include <errno.h>
+#include <fcntl.h>
+#include <signal.h>
+#include <sys/file.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <sys/wait.h>
+#include <unistd.h>
+
+#include <cstdlib>
+#include <cstring>
+#include <string>
+
+#include "base/basictypes.h"
+#include "base/files/file_path.h"
+#include "base/files/file_util.h"
+#include "base/logging.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/posix/eintr_wrapper.h"
+#include "base/safe_strerror_posix.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/stringprintf.h"
+#include "tools/android/forwarder2/common.h"
+#include "tools/android/forwarder2/socket.h"
+
+namespace forwarder2 {
+namespace {
+
+const int kBufferSize = 256;
+
+// Timeout constant used for polling when connecting to the daemon's Unix Domain
+// Socket and also when waiting for its death when it is killed.
+const int kNumTries = 100;
+const int kIdleTimeMSec = 20;
+
+void InitLoggingForDaemon(const std::string& log_file) {
+ logging::LoggingSettings settings;
+ settings.logging_dest =
+ log_file.empty() ?
+ logging::LOG_TO_SYSTEM_DEBUG_LOG : logging::LOG_TO_FILE;
+ settings.log_file = log_file.c_str();
+ settings.lock_log = logging::DONT_LOCK_LOG_FILE;
+ CHECK(logging::InitLogging(settings));
+}
+
+bool RunServerAcceptLoop(const std::string& welcome_message,
+ Socket* server_socket,
+ Daemon::ServerDelegate* server_delegate) {
+ bool failed = false;
+ for (;;) {
+ scoped_ptr<Socket> client_socket(new Socket());
+ if (!server_socket->Accept(client_socket.get())) {
+ if (server_socket->DidReceiveEvent())
+ break;
+ PError("Accept()");
+ failed = true;
+ break;
+ }
+ if (!client_socket->Write(welcome_message.c_str(),
+ welcome_message.length() + 1)) {
+ PError("Write()");
+ failed = true;
+ continue;
+ }
+ server_delegate->OnClientConnected(client_socket.Pass());
+ }
+ return !failed;
+}
+
+void SigChildHandler(int signal_number) {
+ DCHECK_EQ(signal_number, SIGCHLD);
+ int status;
+ pid_t child_pid = waitpid(-1 /* any child */, &status, WNOHANG);
+ if (child_pid < 0) {
+ PError("waitpid");
+ return;
+ }
+ if (child_pid == 0)
+ return;
+ if (WIFEXITED(status) && WEXITSTATUS(status) == 0)
+ return;
+ // Avoid using StringAppendF() since it's unsafe in a signal handler due to
+ // its use of LOG().
+ FixedSizeStringBuilder<256> string_builder;
+ string_builder.Append("Daemon (pid=%d) died unexpectedly with ", child_pid);
+ if (WIFEXITED(status))
+ string_builder.Append("status %d.", WEXITSTATUS(status));
+ else if (WIFSIGNALED(status))
+ string_builder.Append("signal %d.", WTERMSIG(status));
+ else
+ string_builder.Append("unknown reason.");
+ SIGNAL_SAFE_LOG(ERROR, string_builder.buffer());
+}
+
+// Note that 0 is written to |lock_owner_pid| in case the file is not locked.
+bool GetFileLockOwnerPid(int fd, pid_t* lock_owner_pid) {
+ struct flock lock_info = {};
+ lock_info.l_type = F_WRLCK;
+ lock_info.l_whence = SEEK_CUR;
+ const int ret = HANDLE_EINTR(fcntl(fd, F_GETLK, &lock_info));
+ if (ret < 0) {
+ if (errno == EBADF) {
+ // Assume that the provided file descriptor corresponding to the PID file
+ // was valid until the daemon removed this file.
+ *lock_owner_pid = 0;
+ return true;
+ }
+ PError("fcntl");
+ return false;
+ }
+ if (lock_info.l_type == F_UNLCK) {
+ *lock_owner_pid = 0;
+ return true;
+ }
+ CHECK_EQ(F_WRLCK /* exclusive lock */, lock_info.l_type);
+ *lock_owner_pid = lock_info.l_pid;
+ return true;
+}
+
+scoped_ptr<Socket> ConnectToUnixDomainSocket(
+ const std::string& socket_name,
+ int tries_count,
+ int idle_time_msec,
+ const std::string& expected_welcome_message) {
+ for (int i = 0; i < tries_count; ++i) {
+ scoped_ptr<Socket> socket(new Socket());
+ if (!socket->ConnectUnix(socket_name)) {
+ if (idle_time_msec)
+ usleep(idle_time_msec * 1000);
+ continue;
+ }
+ char buf[kBufferSize];
+ DCHECK(expected_welcome_message.length() + 1 <= sizeof(buf));
+ memset(buf, 0, sizeof(buf));
+ if (socket->Read(buf, expected_welcome_message.length() + 1) < 0) {
+ perror("read");
+ continue;
+ }
+ if (expected_welcome_message != buf) {
+ LOG(ERROR) << "Unexpected message read from daemon: " << buf;
+ break;
+ }
+ return socket.Pass();
+ }
+ return scoped_ptr<Socket>();
+}
+
+} // namespace
+
+Daemon::Daemon(const std::string& log_file_path,
+ const std::string& identifier,
+ ClientDelegate* client_delegate,
+ ServerDelegate* server_delegate,
+ GetExitNotifierFDCallback get_exit_fd_callback)
+ : log_file_path_(log_file_path),
+ identifier_(identifier),
+ client_delegate_(client_delegate),
+ server_delegate_(server_delegate),
+ get_exit_fd_callback_(get_exit_fd_callback) {
+ DCHECK(client_delegate_);
+ DCHECK(server_delegate_);
+ DCHECK(get_exit_fd_callback_);
+}
+
+Daemon::~Daemon() {}
+
+bool Daemon::SpawnIfNeeded() {
+ const int kSingleTry = 1;
+ const int kNoIdleTime = 0;
+ scoped_ptr<Socket> client_socket = ConnectToUnixDomainSocket(
+ identifier_, kSingleTry, kNoIdleTime, identifier_);
+ if (!client_socket) {
+ switch (fork()) {
+ case -1:
+ PError("fork()");
+ return false;
+ // Child.
+ case 0: {
+ if (setsid() < 0) { // Detach the child process from its parent.
+ PError("setsid()");
+ exit(1);
+ }
+ InitLoggingForDaemon(log_file_path_);
+ CloseFD(STDIN_FILENO);
+ CloseFD(STDOUT_FILENO);
+ CloseFD(STDERR_FILENO);
+ const int null_fd = open("/dev/null", O_RDWR);
+ CHECK_EQ(null_fd, STDIN_FILENO);
+ CHECK_EQ(dup(null_fd), STDOUT_FILENO);
+ CHECK_EQ(dup(null_fd), STDERR_FILENO);
+ Socket command_socket;
+ if (!command_socket.BindUnix(identifier_)) {
+ scoped_ptr<Socket> client_socket = ConnectToUnixDomainSocket(
+ identifier_, kSingleTry, kNoIdleTime, identifier_);
+ if (client_socket.get()) {
+ // The daemon was spawned by a concurrent process.
+ exit(0);
+ }
+ PError("bind()");
+ exit(1);
+ }
+ server_delegate_->Init();
+ command_socket.AddEventFd(get_exit_fd_callback_());
+ return RunServerAcceptLoop(
+ identifier_, &command_socket, server_delegate_);
+ }
+ default:
+ break;
+ }
+ }
+ // Parent.
+ // Install the custom SIGCHLD handler.
+ sigset_t blocked_signals_set;
+ if (sigprocmask(0 /* first arg ignored */, NULL, &blocked_signals_set) < 0) {
+ PError("sigprocmask()");
+ return false;
+ }
+ struct sigaction old_action;
+ struct sigaction new_action;
+ memset(&new_action, 0, sizeof(new_action));
+ new_action.sa_handler = SigChildHandler;
+ new_action.sa_flags = SA_NOCLDSTOP;
+ sigemptyset(&new_action.sa_mask);
+ if (sigaction(SIGCHLD, &new_action, &old_action) < 0) {
+ PError("sigaction()");
+ return false;
+ }
+ // Connect to the daemon's Unix Domain Socket.
+ bool failed = false;
+ if (!client_socket) {
+ client_socket = ConnectToUnixDomainSocket(
+ identifier_, kNumTries, kIdleTimeMSec, identifier_);
+ if (!client_socket) {
+ LOG(ERROR) << "Could not connect to daemon's Unix Daemon socket";
+ failed = true;
+ }
+ }
+ if (!failed)
+ client_delegate_->OnDaemonReady(client_socket.get());
+ // Restore the previous signal action for SIGCHLD.
+ if (sigaction(SIGCHLD, &old_action, NULL) < 0) {
+ PError("sigaction");
+ failed = true;
+ }
+ return !failed;
+}
+
+bool Daemon::Kill() {
+ pid_t daemon_pid = Socket::GetUnixDomainSocketProcessOwner(identifier_);
+ if (daemon_pid < 0) {
+ LOG(ERROR) << "No forwarder daemon seems to be running";
+ return true;
+ }
+ if (kill(daemon_pid, SIGTERM) < 0) {
+ if (errno == ESRCH /* invalid PID */) {
+ // The daemon exited for some reason (e.g. kill by a process other than
+ // us) right before the call to kill() above.
+ LOG(ERROR) << "Could not kill daemon with PID " << daemon_pid;
+ return true;
+ }
+ PError("kill");
+ return false;
+ }
+ for (int i = 0; i < kNumTries; ++i) {
+ const pid_t previous_pid = daemon_pid;
+ daemon_pid = Socket::GetUnixDomainSocketProcessOwner(identifier_);
+ if (daemon_pid < 0)
+ return true;
+ // Since we are polling we might not see the 'daemon exited' event if
+ // another daemon was spawned during our idle period.
+ if (daemon_pid != previous_pid) {
+ LOG(WARNING) << "Daemon (pid=" << previous_pid
+ << ") was successfully killed but a new daemon (pid="
+ << daemon_pid << ") seems to be running now.";
+ return true;
+ }
+ usleep(kIdleTimeMSec * 1000);
+ }
+ LOG(ERROR) << "Timed out while killing daemon. "
+ "It might still be tearing down.";
+ return false;
+}
+
+} // namespace forwarder2
diff --git a/tools/android/forwarder2/daemon.h b/tools/android/forwarder2/daemon.h
new file mode 100644
index 0000000..4b05ea4
--- /dev/null
+++ b/tools/android/forwarder2/daemon.h
@@ -0,0 +1,75 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_DAEMON_H_
+#define TOOLS_ANDROID_FORWARDER2_DAEMON_H_
+
+#include <string>
+
+#include "base/basictypes.h"
+#include "base/memory/scoped_ptr.h"
+
+namespace forwarder2 {
+
+class Socket;
+
+// Provides a way to spawn a daemon and communicate with it.
+class Daemon {
+ public:
+ // Callback used by the daemon to shutdown properly. See pipe_notifier.h for
+ // more details.
+ typedef int (*GetExitNotifierFDCallback)();
+
+ class ClientDelegate {
+ public:
+ virtual ~ClientDelegate() {}
+
+ // Called after the daemon is ready to receive commands.
+ virtual void OnDaemonReady(Socket* daemon_socket) = 0;
+ };
+
+ class ServerDelegate {
+ public:
+ virtual ~ServerDelegate() {}
+
+ // Called after the daemon bound its Unix Domain Socket. This can be used to
+ // setup signal handlers or perform global initialization.
+ virtual void Init() = 0;
+
+ virtual void OnClientConnected(scoped_ptr<Socket> client_socket) = 0;
+ };
+
+ // |identifier| should be a unique string identifier. It is used to
+ // bind/connect the underlying Unix Domain Socket.
+ // Note that this class does not take ownership of |client_delegate| and
+ // |server_delegate|.
+ Daemon(const std::string& log_file_path,
+ const std::string& identifier,
+ ClientDelegate* client_delegate,
+ ServerDelegate* server_delegate,
+ GetExitNotifierFDCallback get_exit_fd_callback);
+
+ ~Daemon();
+
+ // Returns whether the daemon was successfully spawned. Note that this does
+ // not necessarily mean that the current process was forked in case the daemon
+ // is already running.
+ bool SpawnIfNeeded();
+
+ // Kills the daemon and blocks until it exited. Returns whether it succeeded.
+ bool Kill();
+
+ private:
+ const std::string log_file_path_;
+ const std::string identifier_;
+ ClientDelegate* const client_delegate_;
+ ServerDelegate* const server_delegate_;
+ const GetExitNotifierFDCallback get_exit_fd_callback_;
+
+ DISALLOW_COPY_AND_ASSIGN(Daemon);
+};
+
+} // namespace forwarder2
+
+#endif // TOOLS_ANDROID_FORWARDER2_DAEMON_H_
diff --git a/tools/android/forwarder2/device_controller.cc b/tools/android/forwarder2/device_controller.cc
new file mode 100644
index 0000000..a4cb9c7
--- /dev/null
+++ b/tools/android/forwarder2/device_controller.cc
@@ -0,0 +1,158 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/forwarder2/device_controller.h"
+
+#include <utility>
+
+#include "base/basictypes.h"
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "base/logging.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop_proxy.h"
+#include "base/single_thread_task_runner.h"
+#include "tools/android/forwarder2/command.h"
+#include "tools/android/forwarder2/device_listener.h"
+#include "tools/android/forwarder2/socket.h"
+#include "tools/android/forwarder2/util.h"
+
+namespace forwarder2 {
+
+// static
+scoped_ptr<DeviceController> DeviceController::Create(
+ const std::string& adb_unix_socket,
+ int exit_notifier_fd) {
+ scoped_ptr<DeviceController> device_controller;
+ scoped_ptr<Socket> host_socket(new Socket());
+ if (!host_socket->BindUnix(adb_unix_socket)) {
+ PLOG(ERROR) << "Could not BindAndListen DeviceController socket on port "
+ << adb_unix_socket << ": ";
+ return device_controller.Pass();
+ }
+ LOG(INFO) << "Listening on Unix Domain Socket " << adb_unix_socket;
+ device_controller.reset(
+ new DeviceController(host_socket.Pass(), exit_notifier_fd));
+ return device_controller.Pass();
+}
+
+DeviceController::~DeviceController() {
+ DCHECK(construction_task_runner_->RunsTasksOnCurrentThread());
+}
+
+void DeviceController::Start() {
+ AcceptHostCommandSoon();
+}
+
+DeviceController::DeviceController(scoped_ptr<Socket> host_socket,
+ int exit_notifier_fd)
+ : host_socket_(host_socket.Pass()),
+ exit_notifier_fd_(exit_notifier_fd),
+ construction_task_runner_(base::MessageLoopProxy::current()),
+ weak_ptr_factory_(this) {
+ host_socket_->AddEventFd(exit_notifier_fd);
+}
+
+void DeviceController::AcceptHostCommandSoon() {
+ base::MessageLoopProxy::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&DeviceController::AcceptHostCommandInternal,
+ base::Unretained(this)));
+}
+
+void DeviceController::AcceptHostCommandInternal() {
+ scoped_ptr<Socket> socket(new Socket);
+ if (!host_socket_->Accept(socket.get())) {
+ if (!host_socket_->DidReceiveEvent())
+ PLOG(ERROR) << "Could not Accept DeviceController socket";
+ else
+ LOG(INFO) << "Received exit notification";
+ return;
+ }
+ base::ScopedClosureRunner accept_next_client(
+ base::Bind(&DeviceController::AcceptHostCommandSoon,
+ base::Unretained(this)));
+ // So that |socket| doesn't block on read if it has notifications.
+ socket->AddEventFd(exit_notifier_fd_);
+ int port;
+ command::Type command;
+ if (!ReadCommand(socket.get(), &port, &command)) {
+ LOG(ERROR) << "Invalid command received.";
+ return;
+ }
+ const ListenersMap::iterator listener_it = listeners_.find(port);
+ DeviceListener* const listener = listener_it == listeners_.end()
+ ? static_cast<DeviceListener*>(NULL) : listener_it->second.get();
+ switch (command) {
+ case command::LISTEN: {
+ if (listener != NULL) {
+ LOG(WARNING) << "Already forwarding port " << port
+ << ". Attempting to restart the listener.\n";
+ DeleteRefCountedValueInMapFromIterator(listener_it, &listeners_);
+ }
+ scoped_ptr<DeviceListener> new_listener(
+ DeviceListener::Create(
+ socket.Pass(), port,
+ base::Bind(&DeviceController::DeleteListenerOnError,
+ weak_ptr_factory_.GetWeakPtr())));
+ if (!new_listener)
+ return;
+ new_listener->Start();
+ // |port| can be zero, to allow dynamically allocated port, so instead, we
+ // call DeviceListener::listener_port() to retrieve the currently
+ // allocated port to this new listener.
+ const int listener_port = new_listener->listener_port();
+ listeners_.insert(
+ std::make_pair(listener_port,
+ linked_ptr<DeviceListener>(new_listener.release())));
+ LOG(INFO) << "Forwarding device port " << listener_port << " to host.";
+ break;
+ }
+ case command::DATA_CONNECTION:
+ if (listener == NULL) {
+ LOG(ERROR) << "Data Connection command received, but "
+ << "listener has not been set up yet for port " << port;
+ // After this point it is assumed that, once we close our Adb Data
+ // socket, the Adb forwarder command will propagate the closing of
+ // sockets all the way to the host side.
+ break;
+ }
+ listener->SetAdbDataSocket(socket.Pass());
+ break;
+ case command::UNLISTEN:
+ LOG(INFO) << "Unmapping port " << port;
+ if (!listener) {
+ LOG(ERROR) << "No listener found for port " << port;
+ SendCommand(command::UNLISTEN_ERROR, port, socket.get());
+ break;
+ }
+ DeleteRefCountedValueInMapFromIterator(listener_it, &listeners_);
+ SendCommand(command::UNLISTEN_SUCCESS, port, socket.get());
+ break;
+ default:
+ // TODO(felipeg): add a KillAllListeners command.
+ LOG(ERROR) << "Invalid command received. Port: " << port
+ << " Command: " << command;
+ }
+}
+
+// static
+void DeviceController::DeleteListenerOnError(
+ const base::WeakPtr<DeviceController>& device_controller_ptr,
+ scoped_ptr<DeviceListener> device_listener) {
+ DeviceListener* const listener = device_listener.release();
+ DeviceController* const controller = device_controller_ptr.get();
+ if (!controller) {
+ // |listener| was already deleted by the controller that did have its
+ // ownership.
+ return;
+ }
+ DCHECK(controller->construction_task_runner_->RunsTasksOnCurrentThread());
+ bool listener_did_exist = DeleteRefCountedValueInMap(
+ listener->listener_port(), &controller->listeners_);
+ DCHECK(listener_did_exist);
+ // Note that |listener| was deleted by DeleteRefCountedValueInMap().
+}
+
+} // namespace forwarder
diff --git a/tools/android/forwarder2/device_controller.h b/tools/android/forwarder2/device_controller.h
new file mode 100644
index 0000000..567a08d
--- /dev/null
+++ b/tools/android/forwarder2/device_controller.h
@@ -0,0 +1,71 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_DEVICE_CONTROLLER_H_
+#define TOOLS_ANDROID_FORWARDER2_DEVICE_CONTROLLER_H_
+
+#include <string>
+
+#include "base/basictypes.h"
+#include "base/containers/hash_tables.h"
+#include "base/memory/linked_ptr.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
+#include "tools/android/forwarder2/socket.h"
+
+namespace base {
+class SingleThreadTaskRunner;
+} // namespace base
+
+namespace forwarder2 {
+
+class DeviceListener;
+
+// There is a single DeviceController per device_forwarder process, and it is in
+// charge of managing all active redirections on the device side (one
+// DeviceListener each).
+class DeviceController {
+ public:
+ static scoped_ptr<DeviceController> Create(const std::string& adb_unix_socket,
+ int exit_notifier_fd);
+ ~DeviceController();
+
+ void Start();
+
+ private:
+ typedef base::hash_map<
+ int /* port */, linked_ptr<DeviceListener> > ListenersMap;
+
+ DeviceController(scoped_ptr<Socket> host_socket, int exit_notifier_fd);
+
+ void AcceptHostCommandSoon();
+ void AcceptHostCommandInternal();
+
+ // Note that this can end up being called after the DeviceController is
+ // destroyed which is why a weak pointer is used.
+ static void DeleteListenerOnError(
+ const base::WeakPtr<DeviceController>& device_controller_ptr,
+ scoped_ptr<DeviceListener> device_listener);
+
+ const scoped_ptr<Socket> host_socket_;
+ // Used to notify the controller to exit.
+ const int exit_notifier_fd_;
+ // Lets ensure DeviceListener instances are deleted on the thread they were
+ // created on.
+ const scoped_refptr<base::SingleThreadTaskRunner> construction_task_runner_;
+ ListenersMap listeners_;
+
+ //WeakPtrFactory's documentation says:
+ // Member variables should appear before the WeakPtrFactory, to ensure
+ // that any WeakPtrs to Controller are invalidated before its members
+ // variable's destructors are executed, rendering them invalid.
+ base::WeakPtrFactory<DeviceController> weak_ptr_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(DeviceController);
+};
+
+} // namespace forwarder
+
+#endif // TOOLS_ANDROID_FORWARDER2_DEVICE_CONTROLLER_H_
diff --git a/tools/android/forwarder2/device_forwarder_main.cc b/tools/android/forwarder2/device_forwarder_main.cc
new file mode 100644
index 0000000..cad46f4
--- /dev/null
+++ b/tools/android/forwarder2/device_forwarder_main.cc
@@ -0,0 +1,169 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <signal.h>
+#include <stdlib.h>
+
+#include <iostream>
+#include <string>
+
+#include "base/at_exit.h"
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/compiler_specific.h"
+#include "base/logging.h"
+#include "base/strings/string_piece.h"
+#include "base/strings/stringprintf.h"
+#include "base/threading/thread.h"
+#include "tools/android/forwarder2/common.h"
+#include "tools/android/forwarder2/daemon.h"
+#include "tools/android/forwarder2/device_controller.h"
+#include "tools/android/forwarder2/pipe_notifier.h"
+
+namespace forwarder2 {
+namespace {
+
+// Leaky global instance, accessed from the signal handler.
+forwarder2::PipeNotifier* g_notifier = NULL;
+
+const int kBufSize = 256;
+
+const char kUnixDomainSocketPath[] = "chrome_device_forwarder";
+const char kDaemonIdentifier[] = "chrome_device_forwarder_daemon";
+
+void KillHandler(int /* unused */) {
+ CHECK(g_notifier);
+ if (!g_notifier->Notify())
+ exit(1);
+}
+
+// Lets the daemon fetch the exit notifier file descriptor.
+int GetExitNotifierFD() {
+ DCHECK(g_notifier);
+ return g_notifier->receiver_fd();
+}
+
+class ServerDelegate : public Daemon::ServerDelegate {
+ public:
+ ServerDelegate() : initialized_(false) {}
+
+ virtual ~ServerDelegate() {
+ if (!controller_thread_.get())
+ return;
+ // The DeviceController instance, if any, is constructed on the controller
+ // thread. Make sure that it gets deleted on that same thread. Note that
+ // DeleteSoon() is not used here since it would imply reading |controller_|
+ // from the main thread while it's set on the internal thread.
+ controller_thread_->message_loop_proxy()->PostTask(
+ FROM_HERE,
+ base::Bind(&ServerDelegate::DeleteControllerOnInternalThread,
+ base::Unretained(this)));
+ }
+
+ void DeleteControllerOnInternalThread() {
+ DCHECK(
+ controller_thread_->message_loop_proxy()->RunsTasksOnCurrentThread());
+ controller_.reset();
+ }
+
+ // Daemon::ServerDelegate:
+ virtual void Init() OVERRIDE {
+ DCHECK(!g_notifier);
+ g_notifier = new forwarder2::PipeNotifier();
+ signal(SIGTERM, KillHandler);
+ signal(SIGINT, KillHandler);
+ controller_thread_.reset(new base::Thread("controller_thread"));
+ controller_thread_->Start();
+ }
+
+ virtual void OnClientConnected(scoped_ptr<Socket> client_socket) OVERRIDE {
+ if (initialized_) {
+ client_socket->WriteString("OK");
+ return;
+ }
+ controller_thread_->message_loop()->PostTask(
+ FROM_HERE,
+ base::Bind(&ServerDelegate::StartController, base::Unretained(this),
+ GetExitNotifierFD(), base::Passed(&client_socket)));
+ initialized_ = true;
+ }
+
+ private:
+ void StartController(int exit_notifier_fd, scoped_ptr<Socket> client_socket) {
+ DCHECK(!controller_.get());
+ scoped_ptr<DeviceController> controller(
+ DeviceController::Create(kUnixDomainSocketPath, exit_notifier_fd));
+ if (!controller.get()) {
+ client_socket->WriteString(
+ base::StringPrintf("ERROR: Could not initialize device controller "
+ "with ADB socket path: %s",
+ kUnixDomainSocketPath));
+ return;
+ }
+ controller_.swap(controller);
+ controller_->Start();
+ client_socket->WriteString("OK");
+ client_socket->Close();
+ }
+
+ scoped_ptr<DeviceController> controller_;
+ scoped_ptr<base::Thread> controller_thread_;
+ bool initialized_;
+};
+
+class ClientDelegate : public Daemon::ClientDelegate {
+ public:
+ ClientDelegate() : has_failed_(false) {}
+
+ bool has_failed() const { return has_failed_; }
+
+ // Daemon::ClientDelegate:
+ virtual void OnDaemonReady(Socket* daemon_socket) OVERRIDE {
+ char buf[kBufSize];
+ const int bytes_read = daemon_socket->Read(
+ buf, sizeof(buf) - 1 /* leave space for null terminator */);
+ CHECK_GT(bytes_read, 0);
+ DCHECK(bytes_read < sizeof(buf));
+ buf[bytes_read] = 0;
+ base::StringPiece msg(buf, bytes_read);
+ if (msg.starts_with("ERROR")) {
+ LOG(ERROR) << msg;
+ has_failed_ = true;
+ return;
+ }
+ }
+
+ private:
+ bool has_failed_;
+};
+
+int RunDeviceForwarder(int argc, char** argv) {
+ CommandLine::Init(argc, argv); // Needed by logging.
+ const bool kill_server = CommandLine::ForCurrentProcess()->HasSwitch(
+ "kill-server");
+ if ((kill_server && argc != 2) || (!kill_server && argc != 1)) {
+ std::cerr << "Usage: device_forwarder [--kill-server]" << std::endl;
+ return 1;
+ }
+ base::AtExitManager at_exit_manager; // Used by base::Thread.
+ ClientDelegate client_delegate;
+ ServerDelegate daemon_delegate;
+ const char kLogFilePath[] = ""; // Log to logcat.
+ Daemon daemon(kLogFilePath, kDaemonIdentifier, &client_delegate,
+ &daemon_delegate, &GetExitNotifierFD);
+
+ if (kill_server)
+ return !daemon.Kill();
+
+ if (!daemon.SpawnIfNeeded())
+ return 1;
+ return client_delegate.has_failed();
+}
+
+} // namespace
+} // namespace forwarder2
+
+int main(int argc, char** argv) {
+ return forwarder2::RunDeviceForwarder(argc, argv);
+}
diff --git a/tools/android/forwarder2/device_listener.cc b/tools/android/forwarder2/device_listener.cc
new file mode 100644
index 0000000..b48a746
--- /dev/null
+++ b/tools/android/forwarder2/device_listener.cc
@@ -0,0 +1,130 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/forwarder2/device_listener.h"
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/callback.h"
+#include "base/logging.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop_proxy.h"
+#include "base/single_thread_task_runner.h"
+#include "tools/android/forwarder2/command.h"
+#include "tools/android/forwarder2/forwarder.h"
+#include "tools/android/forwarder2/socket.h"
+
+namespace forwarder2 {
+
+// static
+scoped_ptr<DeviceListener> DeviceListener::Create(
+ scoped_ptr<Socket> host_socket,
+ int listener_port,
+ const ErrorCallback& error_callback) {
+ scoped_ptr<Socket> listener_socket(new Socket());
+ scoped_ptr<DeviceListener> device_listener;
+ if (!listener_socket->BindTcp("", listener_port)) {
+ LOG(ERROR) << "Device could not bind and listen to local port "
+ << listener_port;
+ SendCommand(command::BIND_ERROR, listener_port, host_socket.get());
+ return device_listener.Pass();
+ }
+ // In case the |listener_port_| was zero, GetPort() will return the
+ // currently (non-zero) allocated port for this socket.
+ listener_port = listener_socket->GetPort();
+ SendCommand(command::BIND_SUCCESS, listener_port, host_socket.get());
+ device_listener.reset(
+ new DeviceListener(listener_socket.Pass(), host_socket.Pass(),
+ listener_port, error_callback));
+ return device_listener.Pass();
+}
+
+DeviceListener::~DeviceListener() {
+ DCHECK(deletion_task_runner_->RunsTasksOnCurrentThread());
+ deletion_notifier_.Notify();
+}
+
+void DeviceListener::Start() {
+ thread_.Start();
+ AcceptNextClientSoon();
+}
+
+void DeviceListener::SetAdbDataSocket(scoped_ptr<Socket> adb_data_socket) {
+ thread_.message_loop_proxy()->PostTask(
+ FROM_HERE,
+ base::Bind(&DeviceListener::OnAdbDataSocketReceivedOnInternalThread,
+ base::Unretained(this), base::Passed(&adb_data_socket)));
+}
+
+DeviceListener::DeviceListener(scoped_ptr<Socket> listener_socket,
+ scoped_ptr<Socket> host_socket,
+ int port,
+ const ErrorCallback& error_callback)
+ : self_deleter_helper_(this, error_callback),
+ listener_socket_(listener_socket.Pass()),
+ host_socket_(host_socket.Pass()),
+ listener_port_(port),
+ deletion_task_runner_(base::MessageLoopProxy::current()),
+ thread_("DeviceListener") {
+ CHECK(host_socket_.get());
+ DCHECK(deletion_task_runner_.get());
+ host_socket_->AddEventFd(deletion_notifier_.receiver_fd());
+ listener_socket_->AddEventFd(deletion_notifier_.receiver_fd());
+}
+
+void DeviceListener::AcceptNextClientSoon() {
+ thread_.message_loop_proxy()->PostTask(
+ FROM_HERE,
+ base::Bind(&DeviceListener::AcceptClientOnInternalThread,
+ base::Unretained(this)));
+}
+
+void DeviceListener::AcceptClientOnInternalThread() {
+ device_data_socket_.reset(new Socket());
+ if (!listener_socket_->Accept(device_data_socket_.get())) {
+ if (listener_socket_->DidReceiveEvent()) {
+ LOG(INFO) << "Received exit notification, stopped accepting clients.";
+ OnInternalThreadError();
+ return;
+ }
+ LOG(WARNING) << "Could not Accept in ListenerSocket.";
+ SendCommand(command::ACCEPT_ERROR, listener_port_, host_socket_.get());
+ OnInternalThreadError();
+ return;
+ }
+ SendCommand(command::ACCEPT_SUCCESS, listener_port_, host_socket_.get());
+ if (!ReceivedCommand(command::HOST_SERVER_SUCCESS,
+ host_socket_.get())) {
+ SendCommand(command::ACK, listener_port_, host_socket_.get());
+ LOG(ERROR) << "Host could not connect to server.";
+ device_data_socket_->Close();
+ if (host_socket_->has_error()) {
+ LOG(ERROR) << "Adb Control connection lost. "
+ << "Listener port: " << listener_port_;
+ OnInternalThreadError();
+ return;
+ }
+ // It can continue if the host forwarder could not connect to the host
+ // server but the control connection is still alive (no errors). The device
+ // acknowledged that (above), and it can re-try later.
+ AcceptNextClientSoon();
+ return;
+ }
+}
+
+void DeviceListener::OnAdbDataSocketReceivedOnInternalThread(
+ scoped_ptr<Socket> adb_data_socket) {
+ DCHECK(adb_data_socket);
+ SendCommand(command::ADB_DATA_SOCKET_SUCCESS, listener_port_,
+ host_socket_.get());
+ forwarders_manager_.CreateAndStartNewForwarder(
+ device_data_socket_.Pass(), adb_data_socket.Pass());
+ AcceptNextClientSoon();
+}
+
+void DeviceListener::OnInternalThreadError() {
+ self_deleter_helper_.MaybeSelfDeleteSoon();
+}
+
+} // namespace forwarder
diff --git a/tools/android/forwarder2/device_listener.h b/tools/android/forwarder2/device_listener.h
new file mode 100644
index 0000000..c7724f4
--- /dev/null
+++ b/tools/android/forwarder2/device_listener.h
@@ -0,0 +1,106 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_DEVICE_LISTENER_H_
+#define TOOLS_ANDROID_FORWARDER2_DEVICE_LISTENER_H_
+
+#include "base/basictypes.h"
+#include "base/callback.h"
+#include "base/compiler_specific.h"
+#include "base/logging.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/threading/thread.h"
+#include "tools/android/forwarder2/forwarders_manager.h"
+#include "tools/android/forwarder2/pipe_notifier.h"
+#include "tools/android/forwarder2/self_deleter_helper.h"
+#include "tools/android/forwarder2/socket.h"
+
+namespace base {
+class SingleThreadTaskRunner;
+} // namespace base
+
+namespace forwarder2 {
+
+class Forwarder;
+
+// A DeviceListener instance is used in the device_forwarder program to bind to
+// a specific device-side |port| and wait for client connections. When a
+// connection happens, it informs the corresponding HostController instance
+// running on the host, through |host_socket|. Then the class expects a call to
+// its SetAdbDataSocket() method (performed by the device controller) once the
+// host opened a new connection to the device. When this happens, a new internal
+// Forwarder instance is started.
+// Note that instances of this class are owned by the device controller which
+// creates and destroys them on the same thread. In case an internal error
+// happens on the DeviceListener's internal thread, the DeviceListener
+// can also self-delete by executing the user-provided callback on the thread
+// the DeviceListener was created on.
+// Note that the DeviceListener's destructor joins its internal thread (i.e.
+// waits for its completion) which means that the internal thread is guaranteed
+// not to be running anymore once the object is deleted.
+class DeviceListener {
+ public:
+ // Callback that is used for self-deletion on error to let the device
+ // controller perform some additional cleanup work (e.g. removing the device
+ // listener instance from its internal map before deleting it).
+ typedef base::Callback<void (scoped_ptr<DeviceListener>)> ErrorCallback;
+
+ static scoped_ptr<DeviceListener> Create(scoped_ptr<Socket> host_socket,
+ int port,
+ const ErrorCallback& error_callback);
+
+ ~DeviceListener();
+
+ void Start();
+
+ void SetAdbDataSocket(scoped_ptr<Socket> adb_data_socket);
+
+ int listener_port() const { return listener_port_; }
+
+ private:
+ DeviceListener(scoped_ptr<Socket> listener_socket,
+ scoped_ptr<Socket> host_socket,
+ int port,
+ const ErrorCallback& error_callback);
+
+ // Pushes an AcceptClientOnInternalThread() task to the internal thread's
+ // message queue in order to wait for a new client soon.
+ void AcceptNextClientSoon();
+
+ void AcceptClientOnInternalThread();
+
+ void OnAdbDataSocketReceivedOnInternalThread(
+ scoped_ptr<Socket> adb_data_socket);
+
+ void OnInternalThreadError();
+
+ SelfDeleterHelper<DeviceListener> self_deleter_helper_;
+ // Used for the listener thread to be notified on destruction. We have one
+ // notifier per Listener thread since each Listener thread may be requested to
+ // exit for different reasons independently from each other and independent
+ // from the main program, ex. when the host requests to forward/listen the
+ // same port again. Both the |host_socket_| and |listener_socket_| must share
+ // the same receiver file descriptor from |deletion_notifier_| and it is set
+ // in the constructor.
+ PipeNotifier deletion_notifier_;
+ // The local device listener socket for accepting connections from the local
+ // port (listener_port_).
+ const scoped_ptr<Socket> listener_socket_;
+ // The listener socket for sending control commands.
+ const scoped_ptr<Socket> host_socket_;
+ scoped_ptr<Socket> device_data_socket_;
+ const int listener_port_;
+ // Task runner used for deletion set at construction time (i.e. the object is
+ // deleted on the same thread it is created on).
+ scoped_refptr<base::SingleThreadTaskRunner> deletion_task_runner_;
+ base::Thread thread_;
+ ForwardersManager forwarders_manager_;
+
+ DISALLOW_COPY_AND_ASSIGN(DeviceListener);
+};
+
+} // namespace forwarder
+
+#endif // TOOLS_ANDROID_FORWARDER2_DEVICE_LISTENER_H_
diff --git a/tools/android/forwarder2/forwarder.cc b/tools/android/forwarder2/forwarder.cc
new file mode 100644
index 0000000..1e0bcd0
--- /dev/null
+++ b/tools/android/forwarder2/forwarder.cc
@@ -0,0 +1,255 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/forwarder2/forwarder.h"
+
+#include "base/basictypes.h"
+#include "base/logging.h"
+#include "base/posix/eintr_wrapper.h"
+#include "tools/android/forwarder2/socket.h"
+
+namespace forwarder2 {
+namespace {
+
+const int kBufferSize = 32 * 1024;
+
+} // namespace
+
+
+// Helper class to buffer reads and writes from one socket to another.
+// Each implements a small buffer connected two one input socket, and
+// one output socket.
+//
+// socket_from_ ---> [BufferedCopier] ---> socket_to_
+//
+// These objects are used in a pair to handle duplex traffic, as in:
+//
+// ------> [BufferedCopier_1] --->
+// / \
+// socket_1 * * socket_2
+// \ /
+// <------ [BufferedCopier_2] <----
+//
+// When a BufferedCopier is in the READING state (see below), it only listens
+// to events on its input socket, and won't detect when its output socket
+// disconnects. To work around this, its peer will call its Close() method
+// when that happens.
+
+class Forwarder::BufferedCopier {
+ public:
+ // Possible states:
+ // READING - Empty buffer and Waiting for input.
+ // WRITING - Data in buffer, and waiting for output.
+ // CLOSING - Like WRITING, but do not try to read after that.
+ // CLOSED - Completely closed.
+ //
+ // State transitions are:
+ //
+ // T01: READING ---[receive data]---> WRITING
+ // T02: READING ---[error on input socket]---> CLOSED
+ // T03: READING ---[Close() call]---> CLOSED
+ //
+ // T04: WRITING ---[write partial data]---> WRITING
+ // T05: WRITING ---[write all data]----> READING
+ // T06: WRITING ---[error on output socket]----> CLOSED
+ // T07: WRITING ---[Close() call]---> CLOSING
+ //
+ // T08: CLOSING ---[write partial data]---> CLOSING
+ // T09: CLOSING ---[write all data]----> CLOSED
+ // T10: CLOSING ---[Close() call]---> CLOSING
+ // T11: CLOSING ---[error on output socket] ---> CLOSED
+ //
+ enum State {
+ STATE_READING = 0,
+ STATE_WRITING = 1,
+ STATE_CLOSING = 2,
+ STATE_CLOSED = 3,
+ };
+
+ // Does NOT own the pointers.
+ BufferedCopier(Socket* socket_from, Socket* socket_to)
+ : socket_from_(socket_from),
+ socket_to_(socket_to),
+ bytes_read_(0),
+ write_offset_(0),
+ peer_(NULL),
+ state_(STATE_READING) {}
+
+ // Sets the 'peer_' field pointing to the other BufferedCopier in a pair.
+ void SetPeer(BufferedCopier* peer) {
+ DCHECK(!peer_);
+ peer_ = peer;
+ }
+
+ bool is_closed() const { return state_ == STATE_CLOSED; }
+
+ // Gently asks to close a buffer. Called either by the peer or the forwarder.
+ void Close() {
+ switch (state_) {
+ case STATE_READING:
+ state_ = STATE_CLOSED; // T03
+ break;
+ case STATE_WRITING:
+ state_ = STATE_CLOSING; // T07
+ break;
+ case STATE_CLOSING:
+ break; // T10
+ case STATE_CLOSED:
+ ;
+ }
+ }
+
+ // Call this before select(). This updates |read_fds|,
+ // |write_fds| and |max_fd| appropriately *if* the buffer isn't closed.
+ void PrepareSelect(fd_set* read_fds, fd_set* write_fds, int* max_fd) {
+ int fd;
+ switch (state_) {
+ case STATE_READING:
+ DCHECK(bytes_read_ == 0);
+ DCHECK(write_offset_ == 0);
+ fd = socket_from_->fd();
+ if (fd < 0) {
+ ForceClose(); // T02
+ return;
+ }
+ FD_SET(fd, read_fds);
+ break;
+
+ case STATE_WRITING:
+ case STATE_CLOSING:
+ DCHECK(bytes_read_ > 0);
+ DCHECK(write_offset_ < bytes_read_);
+ fd = socket_to_->fd();
+ if (fd < 0) {
+ ForceClose(); // T06
+ return;
+ }
+ FD_SET(fd, write_fds);
+ break;
+
+ case STATE_CLOSED:
+ return;
+ }
+ *max_fd = std::max(*max_fd, fd);
+ }
+
+ // Call this after a select() call to operate over the buffer.
+ void ProcessSelect(const fd_set& read_fds, const fd_set& write_fds) {
+ int fd, ret;
+ switch (state_) {
+ case STATE_READING:
+ fd = socket_from_->fd();
+ if (fd < 0) {
+ state_ = STATE_CLOSED; // T02
+ return;
+ }
+ if (!FD_ISSET(fd, &read_fds))
+ return;
+
+ ret = socket_from_->NonBlockingRead(buffer_, kBufferSize);
+ if (ret <= 0) {
+ ForceClose(); // T02
+ return;
+ }
+ bytes_read_ = ret;
+ write_offset_ = 0;
+ state_ = STATE_WRITING; // T01
+ break;
+
+ case STATE_WRITING:
+ case STATE_CLOSING:
+ fd = socket_to_->fd();
+ if (fd < 0) {
+ ForceClose(); // T06 + T11
+ return;
+ }
+ if (!FD_ISSET(fd, &write_fds))
+ return;
+
+ ret = socket_to_->NonBlockingWrite(buffer_ + write_offset_,
+ bytes_read_ - write_offset_);
+ if (ret <= 0) {
+ ForceClose(); // T06 + T11
+ return;
+ }
+
+ write_offset_ += ret;
+ if (write_offset_ < bytes_read_)
+ return; // T08 + T04
+
+ write_offset_ = 0;
+ bytes_read_ = 0;
+ if (state_ == STATE_CLOSING) {
+ ForceClose(); // T09
+ return;
+ }
+ state_ = STATE_READING; // T05
+ break;
+
+ case STATE_CLOSED:
+ ;
+ }
+ }
+
+ private:
+ // Internal method used to close the buffer and notify the peer, if any.
+ void ForceClose() {
+ if (peer_) {
+ peer_->Close();
+ peer_ = NULL;
+ }
+ state_ = STATE_CLOSED;
+ }
+
+ // Not owned.
+ Socket* socket_from_;
+ Socket* socket_to_;
+
+ int bytes_read_;
+ int write_offset_;
+ BufferedCopier* peer_;
+ State state_;
+ char buffer_[kBufferSize];
+
+ DISALLOW_COPY_AND_ASSIGN(BufferedCopier);
+};
+
+Forwarder::Forwarder(scoped_ptr<Socket> socket1,
+ scoped_ptr<Socket> socket2)
+ : socket1_(socket1.Pass()),
+ socket2_(socket2.Pass()),
+ buffer1_(new BufferedCopier(socket1_.get(), socket2_.get())),
+ buffer2_(new BufferedCopier(socket2_.get(), socket1_.get())) {
+ buffer1_->SetPeer(buffer2_.get());
+ buffer2_->SetPeer(buffer1_.get());
+}
+
+Forwarder::~Forwarder() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+}
+
+void Forwarder::RegisterFDs(fd_set* read_fds, fd_set* write_fds, int* max_fd) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ buffer1_->PrepareSelect(read_fds, write_fds, max_fd);
+ buffer2_->PrepareSelect(read_fds, write_fds, max_fd);
+}
+
+void Forwarder::ProcessEvents(const fd_set& read_fds, const fd_set& write_fds) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ buffer1_->ProcessSelect(read_fds, write_fds);
+ buffer2_->ProcessSelect(read_fds, write_fds);
+}
+
+bool Forwarder::IsClosed() const {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ return buffer1_->is_closed() && buffer2_->is_closed();
+}
+
+void Forwarder::Shutdown() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ buffer1_->Close();
+ buffer2_->Close();
+}
+
+} // namespace forwarder2
diff --git a/tools/android/forwarder2/forwarder.gyp b/tools/android/forwarder2/forwarder.gyp
new file mode 100644
index 0000000..fbf5eba
--- /dev/null
+++ b/tools/android/forwarder2/forwarder.gyp
@@ -0,0 +1,70 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'forwarder2',
+ 'type': 'none',
+ 'dependencies': [
+ 'device_forwarder',
+ 'host_forwarder#host',
+ ],
+ # For the component build, ensure dependent shared libraries are stripped
+ # and put alongside forwarder to simplify pushing to the device.
+ 'variables': {
+ 'output_dir': '<(PRODUCT_DIR)/forwarder_dist/',
+ 'native_binary': '<(PRODUCT_DIR)/device_forwarder',
+ },
+ 'includes': ['../../../build/android/native_app_dependencies.gypi'],
+ },
+ {
+ 'target_name': 'device_forwarder',
+ 'type': 'executable',
+ 'toolsets': ['target'],
+ 'dependencies': [
+ '../../../base/base.gyp:base',
+ '../common/common.gyp:android_tools_common',
+ ],
+ 'include_dirs': [
+ '../../..',
+ ],
+ 'sources': [
+ 'command.cc',
+ 'common.cc',
+ 'daemon.cc',
+ 'device_controller.cc',
+ 'device_forwarder_main.cc',
+ 'device_listener.cc',
+ 'forwarder.cc',
+ 'forwarders_manager.cc',
+ 'pipe_notifier.cc',
+ 'socket.cc',
+ ],
+ },
+ {
+ 'target_name': 'host_forwarder',
+ 'type': 'executable',
+ 'toolsets': ['host'],
+ 'dependencies': [
+ '../../../base/base.gyp:base',
+ '../common/common.gyp:android_tools_common',
+ ],
+ 'include_dirs': [
+ '../../..',
+ ],
+ 'sources': [
+ 'command.cc',
+ 'common.cc',
+ 'daemon.cc',
+ 'forwarder.cc',
+ 'forwarders_manager.cc',
+ 'host_controller.cc',
+ 'host_forwarder_main.cc',
+ 'pipe_notifier.cc',
+ 'socket.cc',
+ ],
+ },
+ ],
+}
diff --git a/tools/android/forwarder2/forwarder.h b/tools/android/forwarder2/forwarder.h
new file mode 100644
index 0000000..0be86fc
--- /dev/null
+++ b/tools/android/forwarder2/forwarder.h
@@ -0,0 +1,47 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_FORWARDER_H_
+#define TOOLS_ANDROID_FORWARDER2_FORWARDER_H_
+
+#include <sys/select.h>
+
+#include "base/memory/scoped_ptr.h"
+#include "base/threading/thread_checker.h"
+
+namespace forwarder2 {
+
+class Socket;
+
+// Internal class that forwards traffic between |socket1| and |socket2|. Note
+// that this class is not thread-safe.
+class Forwarder {
+ public:
+ Forwarder(scoped_ptr<Socket> socket1, scoped_ptr<Socket> socket2);
+
+ ~Forwarder();
+
+ void RegisterFDs(fd_set* read_fds, fd_set* write_fds, int* max_fd);
+
+ void ProcessEvents(const fd_set& read_fds, const fd_set& write_fds);
+
+ bool IsClosed() const;
+
+ void Shutdown();
+
+ private:
+ class BufferedCopier;
+
+ base::ThreadChecker thread_checker_;
+ const scoped_ptr<Socket> socket1_;
+ const scoped_ptr<Socket> socket2_;
+ // Copies data from socket1 to socket2.
+ const scoped_ptr<BufferedCopier> buffer1_;
+ // Copies data from socket2 to socket1.
+ const scoped_ptr<BufferedCopier> buffer2_;
+};
+
+} // namespace forwarder2
+
+#endif // TOOLS_ANDROID_FORWARDER2_FORWARDER_H_
diff --git a/tools/android/forwarder2/forwarders_manager.cc b/tools/android/forwarder2/forwarders_manager.cc
new file mode 100644
index 0000000..1795cb5
--- /dev/null
+++ b/tools/android/forwarder2/forwarders_manager.cc
@@ -0,0 +1,132 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/forwarder2/forwarders_manager.h"
+
+#include <sys/select.h>
+#include <unistd.h>
+
+#include <algorithm>
+
+#include "base/basictypes.h"
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "base/location.h"
+#include "base/logging.h"
+#include "base/message_loop/message_loop_proxy.h"
+#include "base/posix/eintr_wrapper.h"
+#include "tools/android/forwarder2/forwarder.h"
+#include "tools/android/forwarder2/socket.h"
+
+namespace forwarder2 {
+
+ForwardersManager::ForwardersManager() : thread_("ForwardersManagerThread") {
+ thread_.Start();
+ WaitForEventsOnInternalThreadSoon();
+}
+
+
+ForwardersManager::~ForwardersManager() {
+ deletion_notifier_.Notify();
+}
+
+void ForwardersManager::CreateAndStartNewForwarder(scoped_ptr<Socket> socket1,
+ scoped_ptr<Socket> socket2) {
+ // Note that the internal Forwarder vector is populated on the internal thread
+ // which is the only thread from which it's accessed.
+ thread_.message_loop_proxy()->PostTask(
+ FROM_HERE,
+ base::Bind(&ForwardersManager::CreateNewForwarderOnInternalThread,
+ base::Unretained(this), base::Passed(&socket1),
+ base::Passed(&socket2)));
+
+ // Guarantees that the CreateNewForwarderOnInternalThread callback posted to
+ // the internal thread gets executed immediately.
+ wakeup_notifier_.Notify();
+}
+
+void ForwardersManager::CreateNewForwarderOnInternalThread(
+ scoped_ptr<Socket> socket1,
+ scoped_ptr<Socket> socket2) {
+ DCHECK(thread_.message_loop_proxy()->RunsTasksOnCurrentThread());
+ forwarders_.push_back(new Forwarder(socket1.Pass(), socket2.Pass()));
+}
+
+void ForwardersManager::WaitForEventsOnInternalThreadSoon() {
+ thread_.message_loop_proxy()->PostTask(
+ FROM_HERE,
+ base::Bind(&ForwardersManager::WaitForEventsOnInternalThread,
+ base::Unretained(this)));
+}
+
+void ForwardersManager::WaitForEventsOnInternalThread() {
+ DCHECK(thread_.message_loop_proxy()->RunsTasksOnCurrentThread());
+ fd_set read_fds;
+ fd_set write_fds;
+
+ FD_ZERO(&read_fds);
+ FD_ZERO(&write_fds);
+
+ // Populate the file descriptor sets.
+ int max_fd = -1;
+ for (ScopedVector<Forwarder>::iterator it = forwarders_.begin();
+ it != forwarders_.end(); ++it) {
+ Forwarder* const forwarder = *it;
+ forwarder->RegisterFDs(&read_fds, &write_fds, &max_fd);
+ }
+
+ const int notifier_fds[] = {
+ wakeup_notifier_.receiver_fd(),
+ deletion_notifier_.receiver_fd(),
+ };
+
+ for (int i = 0; i < arraysize(notifier_fds); ++i) {
+ const int notifier_fd = notifier_fds[i];
+ DCHECK_GT(notifier_fd, -1);
+ FD_SET(notifier_fd, &read_fds);
+ max_fd = std::max(max_fd, notifier_fd);
+ }
+
+ const int ret = HANDLE_EINTR(
+ select(max_fd + 1, &read_fds, &write_fds, NULL, NULL));
+ if (ret < 0) {
+ PLOG(ERROR) << "select";
+ return;
+ }
+
+ const bool must_shutdown = FD_ISSET(
+ deletion_notifier_.receiver_fd(), &read_fds);
+ if (must_shutdown && forwarders_.empty())
+ return;
+
+ base::ScopedClosureRunner wait_for_events_soon(
+ base::Bind(&ForwardersManager::WaitForEventsOnInternalThreadSoon,
+ base::Unretained(this)));
+
+ if (FD_ISSET(wakeup_notifier_.receiver_fd(), &read_fds)) {
+ // Note that the events on FDs other than the wakeup notifier one, if any,
+ // will be processed upon the next select().
+ wakeup_notifier_.Reset();
+ return;
+ }
+
+ // Notify the Forwarder instances and remove the ones that are closed.
+ for (size_t i = 0; i < forwarders_.size(); ) {
+ Forwarder* const forwarder = forwarders_[i];
+ forwarder->ProcessEvents(read_fds, write_fds);
+
+ if (must_shutdown)
+ forwarder->Shutdown();
+
+ if (!forwarder->IsClosed()) {
+ ++i;
+ continue;
+ }
+
+ std::swap(forwarders_[i], forwarders_.back());
+ forwarders_.pop_back();
+ }
+}
+
+} // namespace forwarder2
diff --git a/tools/android/forwarder2/forwarders_manager.h b/tools/android/forwarder2/forwarders_manager.h
new file mode 100644
index 0000000..4c6dea6
--- /dev/null
+++ b/tools/android/forwarder2/forwarders_manager.h
@@ -0,0 +1,45 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_FORWARDERS_MANAGER_H_
+#define TOOLS_ANDROID_FORWARDER2_FORWARDERS_MANAGER_H_
+
+#include "base/memory/scoped_ptr.h"
+#include "base/memory/scoped_vector.h"
+#include "base/threading/thread.h"
+#include "tools/android/forwarder2/pipe_notifier.h"
+
+namespace forwarder2 {
+
+class Forwarder;
+class Socket;
+
+// Creates, owns and notifies Forwarder instances on its own internal thread.
+class ForwardersManager {
+ public:
+ ForwardersManager();
+
+ // Must be called on the thread the constructor was called on.
+ ~ForwardersManager();
+
+ // Can be called on any thread.
+ void CreateAndStartNewForwarder(scoped_ptr<Socket> socket1,
+ scoped_ptr<Socket> socket2);
+
+ private:
+ void CreateNewForwarderOnInternalThread(scoped_ptr<Socket> socket1,
+ scoped_ptr<Socket> socket2);
+
+ void WaitForEventsOnInternalThreadSoon();
+ void WaitForEventsOnInternalThread();
+
+ ScopedVector<Forwarder> forwarders_;
+ PipeNotifier deletion_notifier_;
+ PipeNotifier wakeup_notifier_;
+ base::Thread thread_;
+};
+
+} // namespace forwarder2
+
+#endif // TOOLS_ANDROID_FORWARDER2_FORWARDERS_MANAGER_H_
diff --git a/tools/android/forwarder2/host_controller.cc b/tools/android/forwarder2/host_controller.cc
new file mode 100644
index 0000000..94e63ec
--- /dev/null
+++ b/tools/android/forwarder2/host_controller.cc
@@ -0,0 +1,170 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/forwarder2/host_controller.h"
+
+#include <string>
+
+#include "base/basictypes.h"
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/logging.h"
+#include "base/memory/scoped_ptr.h"
+#include "tools/android/forwarder2/command.h"
+#include "tools/android/forwarder2/forwarder.h"
+#include "tools/android/forwarder2/socket.h"
+
+namespace forwarder2 {
+
+// static
+scoped_ptr<HostController> HostController::Create(
+ int device_port,
+ int host_port,
+ int adb_port,
+ int exit_notifier_fd,
+ const ErrorCallback& error_callback) {
+ scoped_ptr<HostController> host_controller;
+ scoped_ptr<PipeNotifier> delete_controller_notifier(new PipeNotifier());
+ scoped_ptr<Socket> adb_control_socket(new Socket());
+ adb_control_socket->AddEventFd(exit_notifier_fd);
+ adb_control_socket->AddEventFd(delete_controller_notifier->receiver_fd());
+ if (!adb_control_socket->ConnectTcp(std::string(), adb_port)) {
+ LOG(ERROR) << "Could not connect HostController socket on port: "
+ << adb_port;
+ return host_controller.Pass();
+ }
+ // Send the command to the device start listening to the "device_forward_port"
+ bool send_command_success = SendCommand(
+ command::LISTEN, device_port, adb_control_socket.get());
+ CHECK(send_command_success);
+ int device_port_allocated;
+ command::Type command;
+ if (!ReadCommand(
+ adb_control_socket.get(), &device_port_allocated, &command) ||
+ command != command::BIND_SUCCESS) {
+ LOG(ERROR) << "Device binding error using port " << device_port;
+ return host_controller.Pass();
+ }
+ host_controller.reset(
+ new HostController(
+ device_port_allocated, host_port, adb_port, exit_notifier_fd,
+ error_callback, adb_control_socket.Pass(),
+ delete_controller_notifier.Pass()));
+ return host_controller.Pass();
+}
+
+HostController::~HostController() {
+ DCHECK(deletion_task_runner_->RunsTasksOnCurrentThread());
+ delete_controller_notifier_->Notify();
+}
+
+void HostController::Start() {
+ thread_.Start();
+ ReadNextCommandSoon();
+}
+
+HostController::HostController(
+ int device_port,
+ int host_port,
+ int adb_port,
+ int exit_notifier_fd,
+ const ErrorCallback& error_callback,
+ scoped_ptr<Socket> adb_control_socket,
+ scoped_ptr<PipeNotifier> delete_controller_notifier)
+ : self_deleter_helper_(this, error_callback),
+ device_port_(device_port),
+ host_port_(host_port),
+ adb_port_(adb_port),
+ global_exit_notifier_fd_(exit_notifier_fd),
+ adb_control_socket_(adb_control_socket.Pass()),
+ delete_controller_notifier_(delete_controller_notifier.Pass()),
+ deletion_task_runner_(base::MessageLoopProxy::current()),
+ thread_("HostControllerThread") {
+}
+
+void HostController::ReadNextCommandSoon() {
+ thread_.message_loop_proxy()->PostTask(
+ FROM_HERE,
+ base::Bind(&HostController::ReadCommandOnInternalThread,
+ base::Unretained(this)));
+}
+
+void HostController::ReadCommandOnInternalThread() {
+ if (!ReceivedCommand(command::ACCEPT_SUCCESS, adb_control_socket_.get())) {
+ LOG(ERROR) << "Did not receive ACCEPT_SUCCESS for port: "
+ << host_port_;
+ OnInternalThreadError();
+ return;
+ }
+ // Try to connect to host server.
+ scoped_ptr<Socket> host_server_data_socket(new Socket());
+ if (!host_server_data_socket->ConnectTcp(std::string(), host_port_)) {
+ LOG(ERROR) << "Could not Connect HostServerData socket on port: "
+ << host_port_;
+ SendCommand(
+ command::HOST_SERVER_ERROR, device_port_, adb_control_socket_.get());
+ if (ReceivedCommand(command::ACK, adb_control_socket_.get())) {
+ // It can continue if the host forwarder could not connect to the host
+ // server but the device acknowledged that, so that the device could
+ // re-try later.
+ ReadNextCommandSoon();
+ return;
+ }
+ OnInternalThreadError();
+ return;
+ }
+ LOG(INFO) << "Will send HOST_SERVER_SUCCESS: " << host_port_;
+ SendCommand(
+ command::HOST_SERVER_SUCCESS, device_port_, adb_control_socket_.get());
+ StartForwarder(host_server_data_socket.Pass());
+ ReadNextCommandSoon();
+}
+
+void HostController::StartForwarder(
+ scoped_ptr<Socket> host_server_data_socket) {
+ scoped_ptr<Socket> adb_data_socket(new Socket());
+ if (!adb_data_socket->ConnectTcp("", adb_port_)) {
+ LOG(ERROR) << "Could not connect AdbDataSocket on port: " << adb_port_;
+ OnInternalThreadError();
+ return;
+ }
+ // Open the Adb data connection, and send a command with the
+ // |device_forward_port| as a way for the device to identify the connection.
+ SendCommand(command::DATA_CONNECTION, device_port_, adb_data_socket.get());
+
+ // Check that the device received the new Adb Data Connection. Note that this
+ // check is done through the |adb_control_socket_| that is handled in the
+ // DeviceListener thread just after the call to WaitForAdbDataSocket().
+ if (!ReceivedCommand(command::ADB_DATA_SOCKET_SUCCESS,
+ adb_control_socket_.get())) {
+ LOG(ERROR) << "Device could not handle the new Adb Data Connection.";
+ OnInternalThreadError();
+ return;
+ }
+ forwarders_manager_.CreateAndStartNewForwarder(
+ host_server_data_socket.Pass(), adb_data_socket.Pass());
+}
+
+void HostController::OnInternalThreadError() {
+ UnmapPortOnDevice();
+ self_deleter_helper_.MaybeSelfDeleteSoon();
+}
+
+void HostController::UnmapPortOnDevice() {
+ Socket socket;
+ if (!socket.ConnectTcp("", adb_port_)) {
+ LOG(ERROR) << "Could not connect to device on port " << adb_port_;
+ return;
+ }
+ if (!SendCommand(command::UNLISTEN, device_port_, &socket)) {
+ LOG(ERROR) << "Could not send unmap command for port " << device_port_;
+ return;
+ }
+ if (!ReceivedCommand(command::UNLISTEN_SUCCESS, &socket)) {
+ LOG(ERROR) << "Unamp command failed for port " << device_port_;
+ return;
+ }
+}
+
+} // namespace forwarder2
diff --git a/tools/android/forwarder2/host_controller.h b/tools/android/forwarder2/host_controller.h
new file mode 100644
index 0000000..d228bcc
--- /dev/null
+++ b/tools/android/forwarder2/host_controller.h
@@ -0,0 +1,96 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_HOST_CONTROLLER_H_
+#define TOOLS_ANDROID_FORWARDER2_HOST_CONTROLLER_H_
+
+#include <string>
+
+#include "base/basictypes.h"
+#include "base/callback.h"
+#include "base/compiler_specific.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
+#include "base/threading/thread.h"
+#include "tools/android/forwarder2/forwarders_manager.h"
+#include "tools/android/forwarder2/pipe_notifier.h"
+#include "tools/android/forwarder2/self_deleter_helper.h"
+#include "tools/android/forwarder2/socket.h"
+
+namespace forwarder2 {
+
+// This class partners with DeviceController and has the same lifetime and
+// threading characteristics as DeviceListener. In a nutshell, this class
+// operates on its own thread and is destroyed on the thread it was constructed
+// on. The class' deletion can happen in two different ways:
+// - Its destructor was called by its owner (HostControllersManager).
+// - Its internal thread requested self-deletion after an error happened. In
+// this case the owner (HostControllersManager) is notified on the
+// construction thread through the provided ErrorCallback invoked with the
+// HostController instance. When this callback is invoked, it's up to the
+// owner to delete the instance.
+class HostController {
+ public:
+ // Callback used for self-deletion when an error happens so that the client
+ // can perform some cleanup work before deleting the HostController instance.
+ typedef base::Callback<void (scoped_ptr<HostController>)> ErrorCallback;
+
+ // If |device_port| is zero then a dynamic port is allocated (and retrievable
+ // through device_port() below).
+ static scoped_ptr<HostController> Create(int device_port,
+ int host_port,
+ int adb_port,
+ int exit_notifier_fd,
+ const ErrorCallback& error_callback);
+
+ ~HostController();
+
+ // Starts the internal controller thread.
+ void Start();
+
+ int adb_port() const { return adb_port_; }
+
+ int device_port() const { return device_port_; }
+
+ private:
+ HostController(int device_port,
+ int host_port,
+ int adb_port,
+ int exit_notifier_fd,
+ const ErrorCallback& error_callback,
+ scoped_ptr<Socket> adb_control_socket,
+ scoped_ptr<PipeNotifier> delete_controller_notifier);
+
+ void ReadNextCommandSoon();
+ void ReadCommandOnInternalThread();
+
+ void StartForwarder(scoped_ptr<Socket> host_server_data_socket);
+
+ // Note that this gets also called when ~HostController() is invoked.
+ void OnInternalThreadError();
+
+ void UnmapPortOnDevice();
+
+ SelfDeleterHelper<HostController> self_deleter_helper_;
+ const int device_port_;
+ const int host_port_;
+ const int adb_port_;
+ // Used to notify the controller when the process is killed.
+ const int global_exit_notifier_fd_;
+ scoped_ptr<Socket> adb_control_socket_;
+ // Used to cancel the pending blocking IO operations when the host controller
+ // instance is deleted.
+ scoped_ptr<PipeNotifier> delete_controller_notifier_;
+ // Task runner used for deletion set at deletion time (i.e. the object is
+ // deleted on the same thread it is created on).
+ const scoped_refptr<base::SingleThreadTaskRunner> deletion_task_runner_;
+ base::Thread thread_;
+ ForwardersManager forwarders_manager_;
+
+ DISALLOW_COPY_AND_ASSIGN(HostController);
+};
+
+} // namespace forwarder2
+
+#endif // TOOLS_ANDROID_FORWARDER2_HOST_CONTROLLER_H_
diff --git a/tools/android/forwarder2/host_forwarder_main.cc b/tools/android/forwarder2/host_forwarder_main.cc
new file mode 100644
index 0000000..59571b6
--- /dev/null
+++ b/tools/android/forwarder2/host_forwarder_main.cc
@@ -0,0 +1,460 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <errno.h>
+#include <signal.h>
+#include <sys/types.h>
+#include <sys/wait.h>
+#include <unistd.h>
+
+#include <cstdio>
+#include <iostream>
+#include <limits>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "base/at_exit.h"
+#include "base/basictypes.h"
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/compiler_specific.h"
+#include "base/containers/hash_tables.h"
+#include "base/files/file_path.h"
+#include "base/files/file_util.h"
+#include "base/logging.h"
+#include "base/memory/linked_ptr.h"
+#include "base/memory/scoped_vector.h"
+#include "base/memory/weak_ptr.h"
+#include "base/pickle.h"
+#include "base/safe_strerror_posix.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_piece.h"
+#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
+#include "base/strings/stringprintf.h"
+#include "base/task_runner.h"
+#include "base/threading/thread.h"
+#include "tools/android/forwarder2/common.h"
+#include "tools/android/forwarder2/daemon.h"
+#include "tools/android/forwarder2/host_controller.h"
+#include "tools/android/forwarder2/pipe_notifier.h"
+#include "tools/android/forwarder2/socket.h"
+#include "tools/android/forwarder2/util.h"
+
+namespace forwarder2 {
+namespace {
+
+const char kLogFilePath[] = "/tmp/host_forwarder_log";
+const char kDaemonIdentifier[] = "chrome_host_forwarder_daemon";
+
+const char kKillServerCommand[] = "kill-server";
+const char kForwardCommand[] = "forward";
+
+const int kBufSize = 256;
+
+// Needs to be global to be able to be accessed from the signal handler.
+PipeNotifier* g_notifier = NULL;
+
+// Lets the daemon fetch the exit notifier file descriptor.
+int GetExitNotifierFD() {
+ DCHECK(g_notifier);
+ return g_notifier->receiver_fd();
+}
+
+void KillHandler(int signal_number) {
+ char buf[kBufSize];
+ if (signal_number != SIGTERM && signal_number != SIGINT) {
+ snprintf(buf, sizeof(buf), "Ignoring unexpected signal %d.", signal_number);
+ SIGNAL_SAFE_LOG(WARNING, buf);
+ return;
+ }
+ snprintf(buf, sizeof(buf), "Received signal %d.", signal_number);
+ SIGNAL_SAFE_LOG(WARNING, buf);
+ static int s_kill_handler_count = 0;
+ CHECK(g_notifier);
+ // If for some reason the forwarder get stuck in any socket waiting forever,
+ // we can send a SIGKILL or SIGINT three times to force it die
+ // (non-nicely). This is useful when debugging.
+ ++s_kill_handler_count;
+ if (!g_notifier->Notify() || s_kill_handler_count > 2)
+ exit(1);
+}
+
+// Manages HostController instances. There is one HostController instance for
+// each connection being forwarded. Note that forwarding can happen with many
+// devices (identified with a serial id).
+class HostControllersManager {
+ public:
+ HostControllersManager()
+ : weak_ptr_factory_(this),
+ controllers_(new HostControllerMap()),
+ has_failed_(false) {
+ }
+
+ ~HostControllersManager() {
+ if (!thread_.get())
+ return;
+ // Delete the controllers on the thread they were created on.
+ thread_->message_loop_proxy()->DeleteSoon(
+ FROM_HERE, controllers_.release());
+ }
+
+ void HandleRequest(const std::string& device_serial,
+ int device_port,
+ int host_port,
+ scoped_ptr<Socket> client_socket) {
+ // Lazy initialize so that the CLI process doesn't get this thread created.
+ InitOnce();
+ thread_->message_loop_proxy()->PostTask(
+ FROM_HERE,
+ base::Bind(
+ &HostControllersManager::HandleRequestOnInternalThread,
+ base::Unretained(this), device_serial, device_port, host_port,
+ base::Passed(&client_socket)));
+ }
+
+ bool has_failed() const { return has_failed_; }
+
+ private:
+ typedef base::hash_map<
+ std::string, linked_ptr<HostController> > HostControllerMap;
+
+ static std::string MakeHostControllerMapKey(int adb_port, int device_port) {
+ return base::StringPrintf("%d:%d", adb_port, device_port);
+ }
+
+ void InitOnce() {
+ if (thread_.get())
+ return;
+ at_exit_manager_.reset(new base::AtExitManager());
+ thread_.reset(new base::Thread("HostControllersManagerThread"));
+ thread_->Start();
+ }
+
+ // Invoked when a HostController instance reports an error (e.g. due to a
+ // device connectivity issue). Note that this could be called after the
+ // controller manager was destroyed which is why a weak pointer is used.
+ static void DeleteHostController(
+ const base::WeakPtr<HostControllersManager>& manager_ptr,
+ scoped_ptr<HostController> host_controller) {
+ HostController* const controller = host_controller.release();
+ HostControllersManager* const manager = manager_ptr.get();
+ if (!manager) {
+ // Note that |controller| is not leaked in this case since the host
+ // controllers manager owns the controllers. If the manager was deleted
+ // then all the controllers (including |controller|) were also deleted.
+ return;
+ }
+ DCHECK(manager->thread_->message_loop_proxy()->RunsTasksOnCurrentThread());
+ // Note that this will delete |controller| which is owned by the map.
+ DeleteRefCountedValueInMap(
+ MakeHostControllerMapKey(
+ controller->adb_port(), controller->device_port()),
+ manager->controllers_.get());
+ }
+
+ void HandleRequestOnInternalThread(const std::string& device_serial,
+ int device_port,
+ int host_port,
+ scoped_ptr<Socket> client_socket) {
+ const int adb_port = GetAdbPortForDevice(device_serial);
+ if (adb_port < 0) {
+ SendMessage(
+ "ERROR: could not get adb port for device. You might need to add "
+ "'adb' to your PATH or provide the device serial id.",
+ client_socket.get());
+ return;
+ }
+ if (device_port < 0) {
+ // Remove the previously created host controller.
+ const std::string controller_key = MakeHostControllerMapKey(
+ adb_port, -device_port);
+ const bool controller_did_exist = DeleteRefCountedValueInMap(
+ controller_key, controllers_.get());
+ SendMessage(
+ !controller_did_exist ? "ERROR: could not unmap port" : "OK",
+ client_socket.get());
+
+ RemoveAdbPortForDeviceIfNeeded(device_serial);
+ return;
+ }
+ if (host_port < 0) {
+ SendMessage("ERROR: missing host port", client_socket.get());
+ return;
+ }
+ const bool use_dynamic_port_allocation = device_port == 0;
+ if (!use_dynamic_port_allocation) {
+ const std::string controller_key = MakeHostControllerMapKey(
+ adb_port, device_port);
+ if (controllers_->find(controller_key) != controllers_->end()) {
+ LOG(INFO) << "Already forwarding device port " << device_port
+ << " to host port " << host_port;
+ SendMessage(base::StringPrintf("%d:%d", device_port, host_port),
+ client_socket.get());
+ return;
+ }
+ }
+ // Create a new host controller.
+ scoped_ptr<HostController> host_controller(
+ HostController::Create(
+ device_port, host_port, adb_port, GetExitNotifierFD(),
+ base::Bind(&HostControllersManager::DeleteHostController,
+ weak_ptr_factory_.GetWeakPtr())));
+ if (!host_controller.get()) {
+ has_failed_ = true;
+ SendMessage("ERROR: Connection to device failed.", client_socket.get());
+ return;
+ }
+ // Get the current allocated port.
+ device_port = host_controller->device_port();
+ LOG(INFO) << "Forwarding device port " << device_port << " to host port "
+ << host_port;
+ const std::string msg = base::StringPrintf("%d:%d", device_port, host_port);
+ if (!SendMessage(msg, client_socket.get()))
+ return;
+ host_controller->Start();
+ controllers_->insert(
+ std::make_pair(MakeHostControllerMapKey(adb_port, device_port),
+ linked_ptr<HostController>(host_controller.release())));
+ }
+
+ void RemoveAdbPortForDeviceIfNeeded(const std::string& device_serial) {
+ base::hash_map<std::string, int>::const_iterator it =
+ device_serial_to_adb_port_map_.find(device_serial);
+ if (it == device_serial_to_adb_port_map_.end())
+ return;
+
+ int port = it->second;
+ const std::string prefix = base::StringPrintf("%d:", port);
+ for (HostControllerMap::const_iterator others = controllers_->begin();
+ others != controllers_->end(); ++others) {
+ if (others->first.find(prefix) == 0U)
+ return;
+ }
+ // No other port is being forwarded to this device:
+ // - Remove it from our internal serial -> adb port map.
+ // - Remove from "adb forward" command.
+ LOG(INFO) << "Device " << device_serial << " has no more ports.";
+ device_serial_to_adb_port_map_.erase(device_serial);
+ const std::string serial_part = device_serial.empty() ?
+ std::string() : std::string("-s ") + device_serial;
+ const std::string command = base::StringPrintf(
+ "adb %s forward --remove tcp:%d",
+ serial_part.c_str(),
+ port);
+ const int ret = system(command.c_str());
+ LOG(INFO) << command << " ret: " << ret;
+ // Wait for the socket to be fully unmapped.
+ const std::string port_mapped_cmd = base::StringPrintf(
+ "lsof -nPi:%d",
+ port);
+ const int poll_interval_us = 500 * 1000;
+ int retries = 3;
+ while (retries) {
+ const int port_unmapped = system(port_mapped_cmd.c_str());
+ LOG(INFO) << "Device " << device_serial << " port " << port << " unmap "
+ << port_unmapped;
+ if (port_unmapped)
+ break;
+ --retries;
+ usleep(poll_interval_us);
+ }
+ }
+
+ int GetAdbPortForDevice(const std::string& device_serial) {
+ base::hash_map<std::string, int>::const_iterator it =
+ device_serial_to_adb_port_map_.find(device_serial);
+ if (it != device_serial_to_adb_port_map_.end())
+ return it->second;
+ Socket bind_socket;
+ CHECK(bind_socket.BindTcp("127.0.0.1", 0));
+ const int port = bind_socket.GetPort();
+ bind_socket.Close();
+ const std::string serial_part = device_serial.empty() ?
+ std::string() : std::string("-s ") + device_serial;
+ const std::string command = base::StringPrintf(
+ "adb %s forward tcp:%d localabstract:chrome_device_forwarder",
+ serial_part.c_str(),
+ port);
+ LOG(INFO) << command;
+ const int ret = system(command.c_str());
+ if (ret < 0 || !WIFEXITED(ret) || WEXITSTATUS(ret) != 0)
+ return -1;
+ device_serial_to_adb_port_map_[device_serial] = port;
+ return port;
+ }
+
+ bool SendMessage(const std::string& msg, Socket* client_socket) {
+ bool result = client_socket->WriteString(msg);
+ DCHECK(result);
+ if (!result)
+ has_failed_ = true;
+ return result;
+ }
+
+ base::WeakPtrFactory<HostControllersManager> weak_ptr_factory_;
+ base::hash_map<std::string, int> device_serial_to_adb_port_map_;
+ scoped_ptr<HostControllerMap> controllers_;
+ bool has_failed_;
+ scoped_ptr<base::AtExitManager> at_exit_manager_; // Needed by base::Thread.
+ scoped_ptr<base::Thread> thread_;
+};
+
+class ServerDelegate : public Daemon::ServerDelegate {
+ public:
+ ServerDelegate() : has_failed_(false) {}
+
+ bool has_failed() const {
+ return has_failed_ || controllers_manager_.has_failed();
+ }
+
+ // Daemon::ServerDelegate:
+ virtual void Init() OVERRIDE {
+ LOG(INFO) << "Starting host process daemon (pid=" << getpid() << ")";
+ DCHECK(!g_notifier);
+ g_notifier = new PipeNotifier();
+ signal(SIGTERM, KillHandler);
+ signal(SIGINT, KillHandler);
+ }
+
+ virtual void OnClientConnected(scoped_ptr<Socket> client_socket) OVERRIDE {
+ char buf[kBufSize];
+ const int bytes_read = client_socket->Read(buf, sizeof(buf));
+ if (bytes_read <= 0) {
+ if (client_socket->DidReceiveEvent())
+ return;
+ PError("Read()");
+ has_failed_ = true;
+ return;
+ }
+ const Pickle command_pickle(buf, bytes_read);
+ PickleIterator pickle_it(command_pickle);
+ std::string device_serial;
+ CHECK(pickle_it.ReadString(&device_serial));
+ int device_port;
+ if (!pickle_it.ReadInt(&device_port)) {
+ client_socket->WriteString("ERROR: missing device port");
+ return;
+ }
+ int host_port;
+ if (!pickle_it.ReadInt(&host_port))
+ host_port = -1;
+ controllers_manager_.HandleRequest(
+ device_serial, device_port, host_port, client_socket.Pass());
+ }
+
+ private:
+ bool has_failed_;
+ HostControllersManager controllers_manager_;
+
+ DISALLOW_COPY_AND_ASSIGN(ServerDelegate);
+};
+
+class ClientDelegate : public Daemon::ClientDelegate {
+ public:
+ ClientDelegate(const Pickle& command_pickle)
+ : command_pickle_(command_pickle),
+ has_failed_(false) {
+ }
+
+ bool has_failed() const { return has_failed_; }
+
+ // Daemon::ClientDelegate:
+ virtual void OnDaemonReady(Socket* daemon_socket) OVERRIDE {
+ // Send the forward command to the daemon.
+ CHECK_EQ(command_pickle_.size(),
+ daemon_socket->WriteNumBytes(command_pickle_.data(),
+ command_pickle_.size()));
+ char buf[kBufSize];
+ const int bytes_read = daemon_socket->Read(
+ buf, sizeof(buf) - 1 /* leave space for null terminator */);
+ CHECK_GT(bytes_read, 0);
+ DCHECK(bytes_read < sizeof(buf));
+ buf[bytes_read] = 0;
+ base::StringPiece msg(buf, bytes_read);
+ if (msg.starts_with("ERROR")) {
+ LOG(ERROR) << msg;
+ has_failed_ = true;
+ return;
+ }
+ printf("%s\n", buf);
+ }
+
+ private:
+ const Pickle command_pickle_;
+ bool has_failed_;
+};
+
+void ExitWithUsage() {
+ std::cerr << "Usage: host_forwarder [options]\n\n"
+ "Options:\n"
+ " --serial-id=[0-9A-Z]{16}]\n"
+ " --map DEVICE_PORT HOST_PORT\n"
+ " --unmap DEVICE_PORT\n"
+ " --kill-server\n";
+ exit(1);
+}
+
+int PortToInt(const std::string& s) {
+ int value;
+ // Note that 0 is a valid port (used for dynamic port allocation).
+ if (!base::StringToInt(s, &value) || value < 0 ||
+ value > std::numeric_limits<uint16>::max()) {
+ LOG(ERROR) << "Could not convert string " << s << " to port";
+ ExitWithUsage();
+ }
+ return value;
+}
+
+int RunHostForwarder(int argc, char** argv) {
+ CommandLine::Init(argc, argv);
+ const CommandLine& cmd_line = *CommandLine::ForCurrentProcess();
+ bool kill_server = false;
+
+ Pickle pickle;
+ pickle.WriteString(
+ cmd_line.HasSwitch("serial-id") ?
+ cmd_line.GetSwitchValueASCII("serial-id") : std::string());
+
+ const std::vector<std::string> args = cmd_line.GetArgs();
+ if (cmd_line.HasSwitch("kill-server")) {
+ kill_server = true;
+ } else if (cmd_line.HasSwitch("unmap")) {
+ if (args.size() != 1)
+ ExitWithUsage();
+ // Note the minus sign below.
+ pickle.WriteInt(-PortToInt(args[0]));
+ } else if (cmd_line.HasSwitch("map")) {
+ if (args.size() != 2)
+ ExitWithUsage();
+ pickle.WriteInt(PortToInt(args[0]));
+ pickle.WriteInt(PortToInt(args[1]));
+ } else {
+ ExitWithUsage();
+ }
+
+ if (kill_server && args.size() > 0)
+ ExitWithUsage();
+
+ ClientDelegate client_delegate(pickle);
+ ServerDelegate daemon_delegate;
+ Daemon daemon(
+ kLogFilePath, kDaemonIdentifier, &client_delegate, &daemon_delegate,
+ &GetExitNotifierFD);
+
+ if (kill_server)
+ return !daemon.Kill();
+ if (!daemon.SpawnIfNeeded())
+ return 1;
+
+ return client_delegate.has_failed() || daemon_delegate.has_failed();
+}
+
+} // namespace
+} // namespace forwarder2
+
+int main(int argc, char** argv) {
+ return forwarder2::RunHostForwarder(argc, argv);
+}
diff --git a/tools/android/forwarder2/pipe_notifier.cc b/tools/android/forwarder2/pipe_notifier.cc
new file mode 100644
index 0000000..02842bd
--- /dev/null
+++ b/tools/android/forwarder2/pipe_notifier.cc
@@ -0,0 +1,54 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/forwarder2/pipe_notifier.h"
+
+#include <fcntl.h>
+#include <unistd.h>
+#include <sys/socket.h>
+#include <sys/types.h>
+
+#include "base/logging.h"
+#include "base/posix/eintr_wrapper.h"
+#include "base/safe_strerror_posix.h"
+
+namespace forwarder2 {
+
+PipeNotifier::PipeNotifier() {
+ int pipe_fd[2];
+ int ret = pipe(pipe_fd);
+ CHECK_EQ(0, ret);
+ receiver_fd_ = pipe_fd[0];
+ sender_fd_ = pipe_fd[1];
+ fcntl(sender_fd_, F_SETFL, O_NONBLOCK);
+}
+
+PipeNotifier::~PipeNotifier() {
+ close(receiver_fd_);
+ close(sender_fd_);
+}
+
+bool PipeNotifier::Notify() {
+ CHECK_NE(-1, sender_fd_);
+ errno = 0;
+ int ret = HANDLE_EINTR(write(sender_fd_, "1", 1));
+ if (ret < 0) {
+ PLOG(ERROR) << "write";
+ return false;
+ }
+ return true;
+}
+
+void PipeNotifier::Reset() {
+ char c;
+ int ret = HANDLE_EINTR(read(receiver_fd_, &c, 1));
+ if (ret < 0) {
+ PLOG(ERROR) << "read";
+ return;
+ }
+ DCHECK_EQ(1, ret);
+ DCHECK_EQ('1', c);
+}
+
+} // namespace forwarder
diff --git a/tools/android/forwarder2/pipe_notifier.h b/tools/android/forwarder2/pipe_notifier.h
new file mode 100644
index 0000000..aadb269
--- /dev/null
+++ b/tools/android/forwarder2/pipe_notifier.h
@@ -0,0 +1,37 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_PIPE_NOTIFIER_H_
+#define TOOLS_ANDROID_FORWARDER2_PIPE_NOTIFIER_H_
+
+#include "base/basictypes.h"
+
+namespace forwarder2 {
+
+// Helper class used to create a unix pipe that sends notifications to the
+// |receiver_fd_| file descriptor when called |Notify()|. This should be used
+// by the main thread to notify other threads that it must exit.
+// The |receiver_fd_| can be put into a fd_set and used in a select together
+// with a socket waiting to accept or read.
+class PipeNotifier {
+ public:
+ PipeNotifier();
+ ~PipeNotifier();
+
+ bool Notify();
+
+ int receiver_fd() const { return receiver_fd_; }
+
+ void Reset();
+
+ private:
+ int sender_fd_;
+ int receiver_fd_;
+
+ DISALLOW_COPY_AND_ASSIGN(PipeNotifier);
+};
+
+} // namespace forwarder
+
+#endif // TOOLS_ANDROID_FORWARDER2_PIPE_NOTIFIER_H_
diff --git a/tools/android/forwarder2/self_deleter_helper.h b/tools/android/forwarder2/self_deleter_helper.h
new file mode 100644
index 0000000..d96903d
--- /dev/null
+++ b/tools/android/forwarder2/self_deleter_helper.h
@@ -0,0 +1,141 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_SELF_DELETER_HELPER_H_
+#define TOOLS_ANDROID_FORWARDER2_SELF_DELETER_HELPER_H_
+
+#include "base/basictypes.h"
+#include "base/bind.h"
+#include "base/callback.h"
+#include "base/location.h"
+#include "base/logging.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
+#include "base/message_loop/message_loop_proxy.h"
+
+namespace base {
+
+class SingleThreadTaskRunner;
+
+} // namespace base
+
+namespace forwarder2 {
+
+// Helper template class to be used in the following case:
+// * T is the type of an object that implements some work through an internal
+// or worker thread.
+// * T wants the internal thread to invoke deletion of its own instance, on
+// the thread where the instance was created.
+//
+// To make this easier, do something like:
+// 1) Add a SelfDeleteHelper<T> member to your class T, and default-initialize
+// it in its constructor.
+// 2) In the internal thread, to trigger self-deletion, call the
+// MaybeDeleteSoon() method on this member.
+//
+// MaybeDeleteSoon() posts a task on the message loop where the T instance was
+// created to delete it. The task will be safely ignored if the instance is
+// otherwise deleted.
+//
+// Usage example:
+// class Object {
+// public:
+// typedef base::Callback<void (scoped_ptr<Object>)> ErrorCallback;
+//
+// Object(const ErrorCallback& error_callback)
+// : self_deleter_helper_(this, error_callback) {
+// }
+//
+// void StartWork() {
+// // Post a callback to DoSomethingOnWorkerThread() below to another
+// // thread.
+// }
+//
+// void DoSomethingOnWorkerThread() {
+// ...
+// if (error_happened)
+// self_deleter_helper_.MaybeDeleteSoon();
+// }
+//
+// private:
+// SelfDeleterHelper<MySelfDeletingClass> self_deleter_helper_;
+// };
+//
+// class ObjectOwner {
+// public:
+// ObjectOwner()
+// : object_(new Object(base::Bind(&ObjectOwner::DeleteObjectOnError,
+// base::Unretained(this))) {
+// // To keep this example simple base::Unretained(this) is used above but
+// // note that in a real world scenario the client would have to make sure
+// // that the ObjectOwner instance is still alive when
+// // DeleteObjectOnError() gets called below. This can be achieved by
+// // using a WeakPtr<ObjectOwner> for instance.
+// }
+//
+// void StartWork() {
+// object_->StartWork();
+// }
+//
+// private:
+// void DeleteObjectOnError(scoped_ptr<Object> object) {
+// DCHECK(thread_checker_.CalledOnValidThread());
+// DCHECK_EQ(object_, object);
+// // Do some extra work with |object| before it gets deleted...
+// object_.reset();
+// ignore_result(object.release());
+// }
+//
+// base::ThreadChecker thread_checker_;
+// scoped_ptr<Object> object_;
+// };
+//
+template <typename T>
+class SelfDeleterHelper {
+ public:
+ typedef base::Callback<void (scoped_ptr<T>)> DeletionCallback;
+
+ SelfDeleterHelper(T* self_deleting_object,
+ const DeletionCallback& deletion_callback)
+ : construction_runner_(base::MessageLoopProxy::current()),
+ self_deleting_object_(self_deleting_object),
+ deletion_callback_(deletion_callback),
+ weak_ptr_factory_(this) {
+ }
+
+ ~SelfDeleterHelper() {
+ DCHECK(construction_runner_->RunsTasksOnCurrentThread());
+ }
+
+ void MaybeSelfDeleteSoon() {
+ DCHECK(!construction_runner_->RunsTasksOnCurrentThread());
+ construction_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(&SelfDeleterHelper::SelfDelete,
+ weak_ptr_factory_.GetWeakPtr()));
+ }
+
+ private:
+ void SelfDelete() {
+ DCHECK(construction_runner_->RunsTasksOnCurrentThread());
+ deletion_callback_.Run(make_scoped_ptr(self_deleting_object_));
+ }
+
+ const scoped_refptr<base::SingleThreadTaskRunner> construction_runner_;
+ T* const self_deleting_object_;
+ const DeletionCallback deletion_callback_;
+
+ //WeakPtrFactory's documentation says:
+ // Member variables should appear before the WeakPtrFactory, to ensure
+ // that any WeakPtrs to Controller are invalidated before its members
+ // variable's destructors are executed, rendering them invalid.
+ base::WeakPtrFactory<SelfDeleterHelper<T> > weak_ptr_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(SelfDeleterHelper);
+};
+
+} // namespace forwarder2
+
+#endif // TOOLS_ANDROID_FORWARDER2_SELF_DELETER_HELPER_H_
diff --git a/tools/android/forwarder2/socket.cc b/tools/android/forwarder2/socket.cc
new file mode 100644
index 0000000..9feac84
--- /dev/null
+++ b/tools/android/forwarder2/socket.cc
@@ -0,0 +1,448 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/android/forwarder2/socket.h"
+
+#include <arpa/inet.h>
+#include <fcntl.h>
+#include <netdb.h>
+#include <netinet/in.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/socket.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "base/logging.h"
+#include "base/posix/eintr_wrapper.h"
+#include "base/safe_strerror_posix.h"
+#include "tools/android/common/net.h"
+#include "tools/android/forwarder2/common.h"
+
+namespace {
+const int kNoTimeout = -1;
+const int kConnectTimeOut = 10; // Seconds.
+
+bool FamilyIsTCP(int family) {
+ return family == AF_INET || family == AF_INET6;
+}
+} // namespace
+
+namespace forwarder2 {
+
+bool Socket::BindUnix(const std::string& path) {
+ errno = 0;
+ if (!InitUnixSocket(path) || !BindAndListen()) {
+ Close();
+ return false;
+ }
+ return true;
+}
+
+bool Socket::BindTcp(const std::string& host, int port) {
+ errno = 0;
+ if (!InitTcpSocket(host, port) || !BindAndListen()) {
+ Close();
+ return false;
+ }
+ return true;
+}
+
+bool Socket::ConnectUnix(const std::string& path) {
+ errno = 0;
+ if (!InitUnixSocket(path) || !Connect()) {
+ Close();
+ return false;
+ }
+ return true;
+}
+
+bool Socket::ConnectTcp(const std::string& host, int port) {
+ errno = 0;
+ if (!InitTcpSocket(host, port) || !Connect()) {
+ Close();
+ return false;
+ }
+ return true;
+}
+
+Socket::Socket()
+ : socket_(-1),
+ port_(0),
+ socket_error_(false),
+ family_(AF_INET),
+ addr_ptr_(reinterpret_cast<sockaddr*>(&addr_.addr4)),
+ addr_len_(sizeof(sockaddr)) {
+ memset(&addr_, 0, sizeof(addr_));
+}
+
+Socket::~Socket() {
+ Close();
+}
+
+void Socket::Shutdown() {
+ if (!IsClosed()) {
+ PRESERVE_ERRNO_HANDLE_EINTR(shutdown(socket_, SHUT_RDWR));
+ }
+}
+
+void Socket::Close() {
+ if (!IsClosed()) {
+ CloseFD(socket_);
+ socket_ = -1;
+ }
+}
+
+bool Socket::InitSocketInternal() {
+ socket_ = socket(family_, SOCK_STREAM, 0);
+ if (socket_ < 0) {
+ PLOG(ERROR) << "socket";
+ return false;
+ }
+ tools::DisableNagle(socket_);
+ int reuse_addr = 1;
+ setsockopt(socket_, SOL_SOCKET, SO_REUSEADDR, &reuse_addr,
+ sizeof(reuse_addr));
+ if (!SetNonBlocking())
+ return false;
+ return true;
+}
+
+bool Socket::SetNonBlocking() {
+ const int flags = fcntl(socket_, F_GETFL);
+ if (flags < 0) {
+ PLOG(ERROR) << "fcntl";
+ return false;
+ }
+ if (flags & O_NONBLOCK)
+ return true;
+ if (fcntl(socket_, F_SETFL, flags | O_NONBLOCK) < 0) {
+ PLOG(ERROR) << "fcntl";
+ return false;
+ }
+ return true;
+}
+
+bool Socket::InitUnixSocket(const std::string& path) {
+ static const size_t kPathMax = sizeof(addr_.addr_un.sun_path);
+ // For abstract sockets we need one extra byte for the leading zero.
+ if (path.size() + 2 /* '\0' */ > kPathMax) {
+ LOG(ERROR) << "The provided path is too big to create a unix "
+ << "domain socket: " << path;
+ return false;
+ }
+ family_ = PF_UNIX;
+ addr_.addr_un.sun_family = family_;
+ // Copied from net/socket/unix_domain_socket_posix.cc
+ // Convert the path given into abstract socket name. It must start with
+ // the '\0' character, so we are adding it. |addr_len| must specify the
+ // length of the structure exactly, as potentially the socket name may
+ // have '\0' characters embedded (although we don't support this).
+ // Note that addr_.addr_un.sun_path is already zero initialized.
+ memcpy(addr_.addr_un.sun_path + 1, path.c_str(), path.size());
+ addr_len_ = path.size() + offsetof(struct sockaddr_un, sun_path) + 1;
+ addr_ptr_ = reinterpret_cast<sockaddr*>(&addr_.addr_un);
+ return InitSocketInternal();
+}
+
+bool Socket::InitTcpSocket(const std::string& host, int port) {
+ port_ = port;
+ if (host.empty()) {
+ // Use localhost: INADDR_LOOPBACK
+ family_ = AF_INET;
+ addr_.addr4.sin_family = family_;
+ addr_.addr4.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
+ } else if (!Resolve(host)) {
+ return false;
+ }
+ CHECK(FamilyIsTCP(family_)) << "Invalid socket family.";
+ if (family_ == AF_INET) {
+ addr_.addr4.sin_port = htons(port_);
+ addr_ptr_ = reinterpret_cast<sockaddr*>(&addr_.addr4);
+ addr_len_ = sizeof(addr_.addr4);
+ } else if (family_ == AF_INET6) {
+ addr_.addr6.sin6_port = htons(port_);
+ addr_ptr_ = reinterpret_cast<sockaddr*>(&addr_.addr6);
+ addr_len_ = sizeof(addr_.addr6);
+ }
+ return InitSocketInternal();
+}
+
+bool Socket::BindAndListen() {
+ errno = 0;
+ if (HANDLE_EINTR(bind(socket_, addr_ptr_, addr_len_)) < 0 ||
+ HANDLE_EINTR(listen(socket_, SOMAXCONN)) < 0) {
+ PLOG(ERROR) << "bind/listen";
+ SetSocketError();
+ return false;
+ }
+ if (port_ == 0 && FamilyIsTCP(family_)) {
+ SockAddr addr;
+ memset(&addr, 0, sizeof(addr));
+ socklen_t addrlen = 0;
+ sockaddr* addr_ptr = NULL;
+ uint16* port_ptr = NULL;
+ if (family_ == AF_INET) {
+ addr_ptr = reinterpret_cast<sockaddr*>(&addr.addr4);
+ port_ptr = &addr.addr4.sin_port;
+ addrlen = sizeof(addr.addr4);
+ } else if (family_ == AF_INET6) {
+ addr_ptr = reinterpret_cast<sockaddr*>(&addr.addr6);
+ port_ptr = &addr.addr6.sin6_port;
+ addrlen = sizeof(addr.addr6);
+ }
+ errno = 0;
+ if (getsockname(socket_, addr_ptr, &addrlen) != 0) {
+ PLOG(ERROR) << "getsockname";
+ SetSocketError();
+ return false;
+ }
+ port_ = ntohs(*port_ptr);
+ }
+ return true;
+}
+
+bool Socket::Accept(Socket* new_socket) {
+ DCHECK(new_socket != NULL);
+ if (!WaitForEvent(READ, kNoTimeout)) {
+ SetSocketError();
+ return false;
+ }
+ errno = 0;
+ int new_socket_fd = HANDLE_EINTR(accept(socket_, NULL, NULL));
+ if (new_socket_fd < 0) {
+ SetSocketError();
+ return false;
+ }
+ tools::DisableNagle(new_socket_fd);
+ new_socket->socket_ = new_socket_fd;
+ if (!new_socket->SetNonBlocking())
+ return false;
+ return true;
+}
+
+bool Socket::Connect() {
+ DCHECK(fcntl(socket_, F_GETFL) & O_NONBLOCK);
+ errno = 0;
+ if (HANDLE_EINTR(connect(socket_, addr_ptr_, addr_len_)) < 0 &&
+ errno != EINPROGRESS) {
+ SetSocketError();
+ return false;
+ }
+ // Wait for connection to complete, or receive a notification.
+ if (!WaitForEvent(WRITE, kConnectTimeOut)) {
+ SetSocketError();
+ return false;
+ }
+ int socket_errno;
+ socklen_t opt_len = sizeof(socket_errno);
+ if (getsockopt(socket_, SOL_SOCKET, SO_ERROR, &socket_errno, &opt_len) < 0) {
+ PLOG(ERROR) << "getsockopt()";
+ SetSocketError();
+ return false;
+ }
+ if (socket_errno != 0) {
+ LOG(ERROR) << "Could not connect to host: " << safe_strerror(socket_errno);
+ SetSocketError();
+ return false;
+ }
+ return true;
+}
+
+bool Socket::Resolve(const std::string& host) {
+ struct addrinfo hints;
+ struct addrinfo* res;
+ memset(&hints, 0, sizeof(hints));
+ hints.ai_family = AF_UNSPEC;
+ hints.ai_socktype = SOCK_STREAM;
+ hints.ai_flags |= AI_CANONNAME;
+
+ int errcode = getaddrinfo(host.c_str(), NULL, &hints, &res);
+ if (errcode != 0) {
+ errno = 0;
+ SetSocketError();
+ freeaddrinfo(res);
+ return false;
+ }
+ family_ = res->ai_family;
+ switch (res->ai_family) {
+ case AF_INET:
+ memcpy(&addr_.addr4,
+ reinterpret_cast<sockaddr_in*>(res->ai_addr),
+ sizeof(sockaddr_in));
+ break;
+ case AF_INET6:
+ memcpy(&addr_.addr6,
+ reinterpret_cast<sockaddr_in6*>(res->ai_addr),
+ sizeof(sockaddr_in6));
+ break;
+ }
+ freeaddrinfo(res);
+ return true;
+}
+
+int Socket::GetPort() {
+ if (!FamilyIsTCP(family_)) {
+ LOG(ERROR) << "Can't call GetPort() on an unix domain socket.";
+ return 0;
+ }
+ return port_;
+}
+
+int Socket::ReadNumBytes(void* buffer, size_t num_bytes) {
+ int bytes_read = 0;
+ int ret = 1;
+ while (bytes_read < num_bytes && ret > 0) {
+ ret = Read(static_cast<char*>(buffer) + bytes_read, num_bytes - bytes_read);
+ if (ret >= 0)
+ bytes_read += ret;
+ }
+ return bytes_read;
+}
+
+void Socket::SetSocketError() {
+ socket_error_ = true;
+ DCHECK_NE(EAGAIN, errno);
+ DCHECK_NE(EWOULDBLOCK, errno);
+ Close();
+}
+
+int Socket::Read(void* buffer, size_t buffer_size) {
+ if (!WaitForEvent(READ, kNoTimeout)) {
+ SetSocketError();
+ return 0;
+ }
+ int ret = HANDLE_EINTR(read(socket_, buffer, buffer_size));
+ if (ret < 0) {
+ PLOG(ERROR) << "read";
+ SetSocketError();
+ }
+ return ret;
+}
+
+int Socket::NonBlockingRead(void* buffer, size_t buffer_size) {
+ DCHECK(fcntl(socket_, F_GETFL) & O_NONBLOCK);
+ int ret = HANDLE_EINTR(read(socket_, buffer, buffer_size));
+ if (ret < 0) {
+ PLOG(ERROR) << "read";
+ SetSocketError();
+ }
+ return ret;
+}
+
+int Socket::Write(const void* buffer, size_t count) {
+ if (!WaitForEvent(WRITE, kNoTimeout)) {
+ SetSocketError();
+ return 0;
+ }
+ int ret = HANDLE_EINTR(send(socket_, buffer, count, MSG_NOSIGNAL));
+ if (ret < 0) {
+ PLOG(ERROR) << "send";
+ SetSocketError();
+ }
+ return ret;
+}
+
+int Socket::NonBlockingWrite(const void* buffer, size_t count) {
+ DCHECK(fcntl(socket_, F_GETFL) & O_NONBLOCK);
+ int ret = HANDLE_EINTR(send(socket_, buffer, count, MSG_NOSIGNAL));
+ if (ret < 0) {
+ PLOG(ERROR) << "send";
+ SetSocketError();
+ }
+ return ret;
+}
+
+int Socket::WriteString(const std::string& buffer) {
+ return WriteNumBytes(buffer.c_str(), buffer.size());
+}
+
+void Socket::AddEventFd(int event_fd) {
+ Event event;
+ event.fd = event_fd;
+ event.was_fired = false;
+ events_.push_back(event);
+}
+
+bool Socket::DidReceiveEventOnFd(int fd) const {
+ for (size_t i = 0; i < events_.size(); ++i)
+ if (events_[i].fd == fd)
+ return events_[i].was_fired;
+ return false;
+}
+
+bool Socket::DidReceiveEvent() const {
+ for (size_t i = 0; i < events_.size(); ++i)
+ if (events_[i].was_fired)
+ return true;
+ return false;
+}
+
+int Socket::WriteNumBytes(const void* buffer, size_t num_bytes) {
+ int bytes_written = 0;
+ int ret = 1;
+ while (bytes_written < num_bytes && ret > 0) {
+ ret = Write(static_cast<const char*>(buffer) + bytes_written,
+ num_bytes - bytes_written);
+ if (ret >= 0)
+ bytes_written += ret;
+ }
+ return bytes_written;
+}
+
+bool Socket::WaitForEvent(EventType type, int timeout_secs) {
+ if (socket_ == -1)
+ return true;
+ DCHECK(fcntl(socket_, F_GETFL) & O_NONBLOCK);
+ fd_set read_fds;
+ fd_set write_fds;
+ FD_ZERO(&read_fds);
+ FD_ZERO(&write_fds);
+ if (type == READ)
+ FD_SET(socket_, &read_fds);
+ else
+ FD_SET(socket_, &write_fds);
+ for (size_t i = 0; i < events_.size(); ++i)
+ FD_SET(events_[i].fd, &read_fds);
+ timeval tv = {};
+ timeval* tv_ptr = NULL;
+ if (timeout_secs > 0) {
+ tv.tv_sec = timeout_secs;
+ tv.tv_usec = 0;
+ tv_ptr = &tv;
+ }
+ int max_fd = socket_;
+ for (size_t i = 0; i < events_.size(); ++i)
+ if (events_[i].fd > max_fd)
+ max_fd = events_[i].fd;
+ if (HANDLE_EINTR(
+ select(max_fd + 1, &read_fds, &write_fds, NULL, tv_ptr)) <= 0) {
+ PLOG(ERROR) << "select";
+ return false;
+ }
+ bool event_was_fired = false;
+ for (size_t i = 0; i < events_.size(); ++i) {
+ if (FD_ISSET(events_[i].fd, &read_fds)) {
+ events_[i].was_fired = true;
+ event_was_fired = true;
+ }
+ }
+ return !event_was_fired;
+}
+
+// static
+pid_t Socket::GetUnixDomainSocketProcessOwner(const std::string& path) {
+ Socket socket;
+ if (!socket.ConnectUnix(path))
+ return -1;
+ ucred ucred;
+ socklen_t len = sizeof(ucred);
+ if (getsockopt(socket.socket_, SOL_SOCKET, SO_PEERCRED, &ucred, &len) == -1) {
+ CHECK_NE(ENOPROTOOPT, errno);
+ return -1;
+ }
+ return ucred.pid;
+}
+
+} // namespace forwarder2
diff --git a/tools/android/forwarder2/socket.h b/tools/android/forwarder2/socket.h
new file mode 100644
index 0000000..6047a1c
--- /dev/null
+++ b/tools/android/forwarder2/socket.h
@@ -0,0 +1,152 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_SOCKET_H_
+#define TOOLS_ANDROID_FORWARDER2_SOCKET_H_
+
+#include <fcntl.h>
+#include <netinet/in.h>
+#include <sys/socket.h>
+#include <sys/un.h>
+
+#include <string>
+#include <vector>
+
+#include "base/basictypes.h"
+
+namespace forwarder2 {
+
+// Wrapper class around unix socket api. Can be used to create, bind or
+// connect to both Unix domain sockets and TCP sockets.
+// TODO(pliard): Split this class into TCPSocket and UnixDomainSocket.
+class Socket {
+ public:
+ Socket();
+ ~Socket();
+
+ bool BindUnix(const std::string& path);
+ bool BindTcp(const std::string& host, int port);
+ bool ConnectUnix(const std::string& path);
+ bool ConnectTcp(const std::string& host, int port);
+
+ // Just a wrapper around unix socket shutdown(), see man 2 shutdown.
+ void Shutdown();
+
+ // Just a wrapper around unix socket close(), see man 2 close.
+ void Close();
+ bool IsClosed() const { return socket_ < 0; }
+
+ int fd() const { return socket_; }
+
+ bool Accept(Socket* new_socket);
+
+ // Returns the port allocated to this socket or zero on error.
+ int GetPort();
+
+ // Just a wrapper around unix read() function.
+ // Reads up to buffer_size, but may read less then buffer_size.
+ // Returns the number of bytes read.
+ int Read(void* buffer, size_t buffer_size);
+
+ // Non-blocking version of Read() above. This must be called after a
+ // successful call to select(). The socket must also be in non-blocking mode
+ // before calling this method.
+ int NonBlockingRead(void* buffer, size_t buffer_size);
+
+ // Wrapper around send().
+ int Write(const void* buffer, size_t count);
+
+ // Same as NonBlockingRead() but for writing.
+ int NonBlockingWrite(const void* buffer, size_t count);
+
+ // Calls Read() multiple times until num_bytes is written to the provided
+ // buffer. No bounds checking is performed.
+ // Returns number of bytes read, which can be different from num_bytes in case
+ // of errror.
+ int ReadNumBytes(void* buffer, size_t num_bytes);
+
+ // Calls Write() multiple times until num_bytes is written. No bounds checking
+ // is performed. Returns number of bytes written, which can be different from
+ // num_bytes in case of errror.
+ int WriteNumBytes(const void* buffer, size_t num_bytes);
+
+ // Calls WriteNumBytes for the given std::string. Note that the null
+ // terminator is not written to the socket.
+ int WriteString(const std::string& buffer);
+
+ bool has_error() const { return socket_error_; }
+
+ // |event_fd| must be a valid pipe file descriptor created from the
+ // PipeNotifier and must live (not be closed) at least as long as this socket
+ // is alive.
+ void AddEventFd(int event_fd);
+
+ // Returns whether Accept() or Connect() was interrupted because the socket
+ // received an external event fired through the provided fd.
+ bool DidReceiveEventOnFd(int fd) const;
+
+ bool DidReceiveEvent() const;
+
+ static pid_t GetUnixDomainSocketProcessOwner(const std::string& path);
+
+ private:
+ enum EventType {
+ READ,
+ WRITE
+ };
+
+ union SockAddr {
+ // IPv4 sockaddr
+ sockaddr_in addr4;
+ // IPv6 sockaddr
+ sockaddr_in6 addr6;
+ // Unix Domain sockaddr
+ sockaddr_un addr_un;
+ };
+
+ struct Event {
+ int fd;
+ bool was_fired;
+ };
+
+ bool SetNonBlocking();
+
+ // If |host| is empty, use localhost.
+ bool InitTcpSocket(const std::string& host, int port);
+ bool InitUnixSocket(const std::string& path);
+ bool BindAndListen();
+ bool Connect();
+
+ bool Resolve(const std::string& host);
+ bool InitSocketInternal();
+ void SetSocketError();
+
+ // Waits until either the Socket or the |exit_notifier_fd_| has received an
+ // event.
+ bool WaitForEvent(EventType type, int timeout_secs);
+
+ int socket_;
+ int port_;
+ bool socket_error_;
+
+ // Family of the socket (PF_INET, PF_INET6 or PF_UNIX).
+ int family_;
+
+ SockAddr addr_;
+
+ // Points to one of the members of the above union depending on the family.
+ sockaddr* addr_ptr_;
+ // Length of one of the members of the above union depending on the family.
+ socklen_t addr_len_;
+
+ // Used to listen for external events (e.g. process received a SIGTERM) while
+ // blocking on I/O operations.
+ std::vector<Event> events_;
+
+ DISALLOW_COPY_AND_ASSIGN(Socket);
+};
+
+} // namespace forwarder
+
+#endif // TOOLS_ANDROID_FORWARDER2_SOCKET_H_
diff --git a/tools/android/forwarder2/util.h b/tools/android/forwarder2/util.h
new file mode 100644
index 0000000..9947628
--- /dev/null
+++ b/tools/android/forwarder2/util.h
@@ -0,0 +1,36 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_FORWARDER2_UTIL_H_
+#define TOOLS_ANDROID_FORWARDER2_UTIL_H_
+
+#include "base/logging.h"
+
+namespace forwarder2 {
+
+// Safely deletes a ref-counted value in a provided map by unlinking the object
+// from the map before deleting it in case its destructor would access the map.
+// Deletion will only happen by definition if the object's refcount is set to 1
+// before this function gets called. Returns whether the element could be found
+// in the map.
+template <typename Map, typename K>
+bool DeleteRefCountedValueInMap(const K& key, Map* map) {
+ const typename Map::iterator it = map->find(key);
+ if (it == map->end())
+ return false;
+ DeleteRefCountedValueInMapFromIterator(it, map);
+ return true;
+}
+
+// See DeleteRefCountedValuetInMap() above.
+template <typename Map, typename Iterator>
+void DeleteRefCountedValueInMapFromIterator(Iterator it, Map* map) {
+ DCHECK(it != map->end());
+ const typename Map::value_type::second_type shared_ptr_copy = it->second;
+ map->erase(it);
+}
+
+} // namespace forwarder2
+
+#endif // TOOLS_ANDROID_FORWARDER2_UTIL_H_
diff --git a/tools/android/heap_profiler/DEPS b/tools/android/heap_profiler/DEPS
new file mode 100644
index 0000000..458e541
--- /dev/null
+++ b/tools/android/heap_profiler/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+third_party/bsdtrees",
+]
diff --git a/tools/android/heap_profiler/heap_dump.c b/tools/android/heap_profiler/heap_dump.c
new file mode 100644
index 0000000..5d468da
--- /dev/null
+++ b/tools/android/heap_profiler/heap_dump.c
@@ -0,0 +1,350 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// The client dump tool for libheap_profiler. It attaches to a process (given
+// its pid) and dumps all the libheap_profiler tracking information in JSON.
+// The JSON output looks like this:
+// {
+// "total_allocated": 908748493, # Total bytes allocated and not freed.
+// "num_allocs": 37542, # Number of allocations.
+// "num_stacks": 3723, # Number of allocation call-sites.
+// "allocs": # Optional. Printed only with the -x arg.
+// {
+// "beef1234": {"l": 17, "f": 1, "s": "1a"},
+// ^ ^ ^ ^ Index of the corresponding entry in the
+// | | | next "stacks" section. Essentially a ref
+// | | | to the call site that created the alloc.
+// | | |
+// | | +-------> Flags (last arg of heap_profiler_alloc).
+// | +----------------> Length of the Alloc.
+// +-----------------------------> Start address of the Alloc (hex).
+// },
+// "stacks":
+// {
+// "1a": {"l": 17, "f": [1074792772, 1100849864, 1100850688, ...]},
+// ^ ^ ^
+// | | +-----> Stack frames (absolute virtual addresses).
+// | +--------------> Bytes allocated and not freed by the call site.
+// +---------------------> Index of the entry (as for "allocs" xref).
+// Indexes are hex and might not be monotonic.
+
+#include <errno.h>
+#include <fcntl.h>
+#include <inttypes.h>
+#include <stdbool.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+#include <unistd.h>
+#include <sys/ptrace.h>
+#include <sys/stat.h>
+
+#include "tools/android/heap_profiler/heap_profiler.h"
+
+
+static void lseek_abs(int fd, size_t off);
+static void read_proc_cmdline(char* cmdline, int size);
+static ssize_t read_safe(int fd, void* buf, size_t count);
+
+static int pid;
+
+
+static int dump_process_heap(
+ int mem_fd,
+ FILE* fmaps,
+ bool dump_also_allocs,
+ bool pedantic, // Enable pedantic consistency checks on memory counters.
+ char* comment) {
+ HeapStats stats;
+ time_t tm;
+ char cmdline[512];
+
+ tm = time(NULL);
+ read_proc_cmdline(cmdline, sizeof(cmdline));
+
+ // Look for the mmap which contains the HeapStats in the target process vmem.
+ // On Linux/Android, the libheap_profiler mmaps explicitly /dev/zero. The
+ // region furthermore starts with a magic marker to disambiguate.
+ bool stats_mmap_found = false;
+ for (;;) {
+ char line[1024];
+ if (fgets(line, sizeof(line), fmaps) == NULL)
+ break;
+
+ uintptr_t start;
+ uintptr_t end;
+ char map_file[32];
+ int ret = sscanf(line, "%"SCNxPTR"-%"SCNxPTR" rw-p %*s %*s %*s %31s",
+ &start, &end, map_file);
+ const size_t size = end - start + 1;
+ if (ret != 3 || strcmp(map_file, "/dev/zero") != 0 || size < sizeof(stats))
+ continue;
+
+ // The mmap looks promising. Let's check for the magic marker.
+ lseek_abs(mem_fd, start);
+ ssize_t rsize = read_safe(mem_fd, &stats, sizeof(stats));
+
+ if (rsize == -1) {
+ perror("read");
+ return -1;
+ }
+
+ if (rsize < sizeof(stats))
+ continue;
+
+ if (stats.magic_start == HEAP_PROFILER_MAGIC_MARKER) {
+ stats_mmap_found = true;
+ break;
+ }
+ }
+
+ if (!stats_mmap_found) {
+ fprintf(stderr, "Could not find the HeapStats area. "
+ "It looks like libheap_profiler is not loaded.\n");
+ return -1;
+ }
+
+ // Print JSON-formatted output.
+ printf("{\n");
+ printf(" \"pid\": %d,\n", pid);
+ printf(" \"time\": %ld,\n", tm);
+ printf(" \"comment\": \"%s\",\n", comment);
+ printf(" \"cmdline\": \"%s\",\n", cmdline);
+ printf(" \"pagesize\": %d,\n", getpagesize());
+ printf(" \"total_allocated\": %zu,\n", stats.total_alloc_bytes);
+ printf(" \"num_allocs\": %"PRIu32",\n", stats.num_allocs);
+ printf(" \"num_stacks\": %"PRIu32",\n", stats.num_stack_traces);
+
+ uint32_t dbg_counted_allocs = 0;
+ size_t dbg_counted_total_alloc_bytes = 0;
+ bool prepend_trailing_comma = false; // JSON syntax, I hate you.
+ uint32_t i;
+
+ // Dump the optional allocation table.
+ if (dump_also_allocs) {
+ printf(" \"allocs\": {");
+ lseek_abs(mem_fd, (uintptr_t) stats.allocs);
+ for (i = 0; i < stats.max_allocs; ++i) {
+ Alloc alloc;
+ if (read_safe(mem_fd, &alloc, sizeof(alloc)) != sizeof(alloc)) {
+ fprintf(stderr, "ERROR: cannot read allocation table\n");
+ perror("read");
+ return -1;
+ }
+
+ // Skip empty (i.e. freed) entries.
+ if (alloc.start == 0 && alloc.end == 0)
+ continue;
+
+ if (alloc.end < alloc.start) {
+ fprintf(stderr, "ERROR: found inconsistent alloc.\n");
+ return -1;
+ }
+
+ size_t alloc_size = alloc.end - alloc.start + 1;
+ size_t stack_idx = (
+ (uintptr_t) alloc.st - (uintptr_t) stats.stack_traces) /
+ sizeof(StacktraceEntry);
+ dbg_counted_total_alloc_bytes += alloc_size;
+ ++dbg_counted_allocs;
+
+ if (prepend_trailing_comma)
+ printf(",");
+ prepend_trailing_comma = true;
+ printf("\"%"PRIxPTR"\": {\"l\": %zu, \"f\": %"PRIu32", \"s\": \"%zx\"}",
+ alloc.start, alloc_size, alloc.flags, stack_idx);
+ }
+ printf("},\n");
+
+ if (pedantic && dbg_counted_allocs != stats.num_allocs) {
+ fprintf(stderr,
+ "ERROR: inconsistent alloc count (%"PRIu32" vs %"PRIu32").\n",
+ dbg_counted_allocs, stats.num_allocs);
+ return -1;
+ }
+
+ if (pedantic && dbg_counted_total_alloc_bytes != stats.total_alloc_bytes) {
+ fprintf(stderr, "ERROR: inconsistent alloc totals (%zu vs %zu).\n",
+ dbg_counted_total_alloc_bytes, stats.total_alloc_bytes);
+ return -1;
+ }
+ }
+
+ // Dump the distinct stack traces.
+ printf(" \"stacks\": {");
+ prepend_trailing_comma = false;
+ dbg_counted_total_alloc_bytes = 0;
+ lseek_abs(mem_fd, (uintptr_t) stats.stack_traces);
+ for (i = 0; i < stats.max_stack_traces; ++i) {
+ StacktraceEntry st;
+ if (read_safe(mem_fd, &st, sizeof(st)) != sizeof(st)) {
+ fprintf(stderr, "ERROR: cannot read stack trace table\n");
+ perror("read");
+ return -1;
+ }
+
+ // Skip empty (i.e. freed) entries.
+ if (st.alloc_bytes == 0)
+ continue;
+
+ dbg_counted_total_alloc_bytes += st.alloc_bytes;
+
+ if (prepend_trailing_comma)
+ printf(",");
+ prepend_trailing_comma = true;
+
+ printf("\"%"PRIx32"\":{\"l\": %zu, \"f\": [", i, st.alloc_bytes);
+ size_t n = 0;
+ for (;;) {
+ printf("%" PRIuPTR, st.frames[n]);
+ ++n;
+ if (n == HEAP_PROFILER_MAX_DEPTH || st.frames[n] == 0)
+ break;
+ else
+ printf(",");
+ }
+ printf("]}");
+ }
+ printf("}\n}\n");
+
+ if (pedantic && dbg_counted_total_alloc_bytes != stats.total_alloc_bytes) {
+ fprintf(stderr, "ERROR: inconsistent stacks totals (%zu vs %zu).\n",
+ dbg_counted_total_alloc_bytes, stats.total_alloc_bytes);
+ return -1;
+ }
+
+ fflush(stdout);
+ return 0;
+}
+
+// Unfortunately lseek takes a *signed* offset, which is unsuitable for large
+// files like /proc/X/mem on 64-bit.
+static void lseek_abs(int fd, size_t off) {
+#define OFF_T_MAX ((off_t) ~(((uint64_t) 1) << (8 * sizeof(off_t) - 1)))
+ if (off <= OFF_T_MAX) {
+ lseek(fd, (off_t) off, SEEK_SET);
+ return;
+ }
+ lseek(fd, (off_t) OFF_T_MAX, SEEK_SET);
+ lseek(fd, (off_t) (off - OFF_T_MAX), SEEK_CUR);
+}
+
+static ssize_t read_safe(int fd, void* buf, size_t count) {
+ ssize_t res;
+ size_t bytes_read = 0;
+ if (count < 0)
+ return -1;
+ do {
+ do {
+ res = read(fd, buf + bytes_read, count - bytes_read);
+ } while (res == -1 && errno == EINTR);
+ if (res <= 0)
+ break;
+ bytes_read += res;
+ } while (bytes_read < count);
+ return bytes_read ? bytes_read : res;
+}
+
+static int open_proc_mem_fd() {
+ char path[64];
+ snprintf(path, sizeof(path), "/proc/%d/mem", pid);
+ int mem_fd = open(path, O_RDONLY);
+ if (mem_fd < 0) {
+ fprintf(stderr, "Could not attach to target process virtual memory.\n");
+ perror("open");
+ }
+ return mem_fd;
+}
+
+static FILE* open_proc_maps() {
+ char path[64];
+ snprintf(path, sizeof(path), "/proc/%d/maps", pid);
+ FILE* fmaps = fopen(path, "r");
+ if (fmaps == NULL) {
+ fprintf(stderr, "Could not open %s.\n", path);
+ perror("fopen");
+ }
+ return fmaps;
+}
+
+static void read_proc_cmdline(char* cmdline, int size) {
+ char path[64];
+ snprintf(path, sizeof(path), "/proc/%d/cmdline", pid);
+ int cmdline_fd = open(path, O_RDONLY);
+ if (cmdline_fd < 0) {
+ fprintf(stderr, "Could not open %s.\n", path);
+ perror("open");
+ cmdline[0] = '\0';
+ return;
+ }
+ int length = read_safe(cmdline_fd, cmdline, size);
+ if (length < 0) {
+ fprintf(stderr, "Could not read %s.\n", path);
+ perror("read");
+ length = 0;
+ }
+ close(cmdline_fd);
+ cmdline[length] = '\0';
+}
+
+int main(int argc, char** argv) {
+ char c;
+ int ret = 0;
+ bool dump_also_allocs = false;
+ bool pedantic = true;
+ char comment[1024] = { '\0' };
+
+ while (((c = getopt(argc, argv, "xnc:")) & 0x80) == 0) {
+ switch (c) {
+ case 'x':
+ dump_also_allocs = true;
+ break;
+ case 'n':
+ pedantic = false;
+ break;
+ case 'c':
+ strlcpy(comment, optarg, sizeof(comment));
+ break;
+ }
+ }
+
+ if (optind >= argc) {
+ printf("Usage: %s [-n] [-x] [-c comment] pid\n"
+ " -n: Skip pedantic checks on dump consistency.\n"
+ " -x: Extended dump, includes individual allocations.\n"
+ " -c: Appends the given comment to the JSON dump.\n",
+ argv[0]);
+ return -1;
+ }
+
+ pid = atoi(argv[optind]);
+
+ if (ptrace(PTRACE_ATTACH, pid, NULL, NULL) == -1) {
+ perror("ptrace");
+ return -1;
+ }
+
+ // Wait for the process to actually freeze.
+ waitpid(pid, NULL, 0);
+
+ int mem_fd = open_proc_mem_fd();
+ if (mem_fd < 0)
+ ret = -1;
+
+ FILE* fmaps = open_proc_maps();
+ if (fmaps == NULL)
+ ret = -1;
+
+ if (ret == 0)
+ ret = dump_process_heap(mem_fd, fmaps, dump_also_allocs, pedantic, comment);
+
+ ptrace(PTRACE_DETACH, pid, NULL, NULL);
+
+ // Cleanup.
+ fflush(stdout);
+ close(mem_fd);
+ fclose(fmaps);
+ return ret;
+}
diff --git a/tools/android/heap_profiler/heap_profiler.c b/tools/android/heap_profiler/heap_profiler.c
new file mode 100644
index 0000000..aef63ba
--- /dev/null
+++ b/tools/android/heap_profiler/heap_profiler.c
@@ -0,0 +1,397 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This is a OS-independent* module which purpose is tracking allocations and
+// their call sites (stack traces). It is able to deal with hole punching
+// (read: munmap). Also, it has low overhead and its presence in the system its
+// barely noticeable, even if tracing *all* the processes.
+// This module does NOT know how to deal with stack unwinding. The caller must
+// do that and pass the addresses of the unwound stack.
+// * (Modulo three lines for mutexes.)
+//
+// Exposed API:
+// void heap_profiler_init(HeapStats*);
+// void heap_profiler_alloc(addr, size, stack_frames, depth, flags);
+// void heap_profiler_free(addr, size); (size == 0 means free entire region).
+//
+// The profiling information is tracked into two data structures:
+// 1) A RB-Tree of non-overlapping VM regions (allocs) sorted by their start
+// addr. Each entry tracks the start-end addresses and points to the stack
+// trace which created that allocation (see below).
+// 2) A (hash) table of stack traces. In general the #allocations >> #call sites
+// which create those allocations. In order to avoid duplicating the latter,
+// they are stored distinctly in this hash table and used by reference.
+//
+// / Process virtual address space \
+// +------+ +------+ +------+
+// |Alloc1| |Alloc2| |Alloc3| <- Allocs (a RB-Tree underneath)
+// +------+ +------+ +------+
+// Len: 12 Len: 4 Len: 4
+// | | | stack_traces
+// | | | +-----------+--------------+
+// | | | | Alloc tot | stack frames +
+// | | | +-----------+--------------+
+// +------------|-------------+------------> | 16 | 0x1234 .... |
+// | +-----------+--------------+
+// +--------------------------> | 4 | 0x5678 .... |
+// +-----------+--------------+
+// (A hash-table underneath)
+//
+// Final note: the memory for both 1) and 2) entries is carved out from two
+// static pools (i.e. stack_traces and allocs). The pools are treated as
+// a sbrk essentially, and are kept compact by reusing freed elements (hence
+// having a freelist for each of them).
+//
+// All the internal (static) functions here assume that the |lock| is held.
+
+#include <assert.h>
+#include <string.h>
+
+// Platform-dependent mutex boilerplate.
+#if defined(__linux__) || defined(__ANDROID__)
+#include <pthread.h>
+#define DEFINE_MUTEX(x) pthread_mutex_t x = PTHREAD_MUTEX_INITIALIZER
+#define LOCK_MUTEX(x) pthread_mutex_lock(&x)
+#define UNLOCK_MUTEX(x) pthread_mutex_unlock(&x)
+#else
+#error OS not supported.
+#endif
+
+#include "tools/android/heap_profiler/heap_profiler.h"
+
+
+static DEFINE_MUTEX(lock);
+
+// |stats| contains the global tracking metadata and is the entry point which
+// is read by the heap_dump tool.
+static HeapStats* stats;
+
+// +---------------------------------------------------------------------------+
+// + Stack traces hash-table +
+// +---------------------------------------------------------------------------+
+#define ST_ENTRIES_MAX (64 * 1024)
+#define ST_HASHTABLE_BUCKETS (64 * 1024) /* Must be a power of 2. */
+
+static StacktraceEntry stack_traces[ST_ENTRIES_MAX];
+static StacktraceEntry* stack_traces_freelist;
+static StacktraceEntry* stack_traces_ht[ST_HASHTABLE_BUCKETS];
+
+// Looks up a stack trace from the stack frames. Creates a new one if necessary.
+static StacktraceEntry* record_stacktrace(uintptr_t* frames, uint32_t depth) {
+ if (depth == 0)
+ return NULL;
+
+ if (depth > HEAP_PROFILER_MAX_DEPTH)
+ depth = HEAP_PROFILER_MAX_DEPTH;
+
+ uint32_t i;
+ uintptr_t hash = 0;
+ for (i = 0; i < depth; ++i)
+ hash = (hash << 1) ^ (frames[i]);
+ const uint32_t slot = hash & (ST_HASHTABLE_BUCKETS - 1);
+ StacktraceEntry* st = stack_traces_ht[slot];
+
+ // Look for an existing entry in the hash-table.
+ const size_t frames_length = depth * sizeof(uintptr_t);
+ while (st != NULL && st->hash != hash &&
+ memcmp(frames, st->frames, frames_length) != 0) {
+ st = st->next;
+ }
+
+ // If not found, create a new one from the stack_traces array and add it to
+ // the hash-table.
+ if (st == NULL) {
+ // Get a free element either from the freelist or from the pool.
+ if (stack_traces_freelist != NULL) {
+ st = stack_traces_freelist;
+ stack_traces_freelist = stack_traces_freelist->next;
+ } else if (stats->max_stack_traces < ST_ENTRIES_MAX) {
+ st = &stack_traces[stats->max_stack_traces];
+ ++stats->max_stack_traces;
+ } else {
+ return NULL;
+ }
+
+ memset(st, 0, sizeof(*st));
+ memcpy(st->frames, frames, frames_length);
+ st->hash = hash;
+ st->next = stack_traces_ht[slot];
+ stack_traces_ht[slot] = st;
+ ++stats->num_stack_traces;
+ }
+
+ return st;
+}
+
+// Frees up a stack trace and appends it to the corresponding freelist.
+static void free_stacktrace(StacktraceEntry* st) {
+ assert(st->alloc_bytes == 0);
+ const uint32_t slot = st->hash & (ST_HASHTABLE_BUCKETS - 1);
+
+ // The expected load factor of the hash-table is very low. Frees should be
+ // pretty rare. Hence don't bother with a doubly linked list, might cost more.
+ StacktraceEntry** prev = &stack_traces_ht[slot];
+ while (*prev != st)
+ prev = &((*prev)->next);
+
+ // Remove from the hash-table bucket.
+ assert(*prev == st);
+ *prev = st->next;
+
+ // Add to the freelist.
+ st->next = stack_traces_freelist;
+ stack_traces_freelist = st;
+ --stats->num_stack_traces;
+}
+
+// +---------------------------------------------------------------------------+
+// + Allocs RB-tree +
+// +---------------------------------------------------------------------------+
+#define ALLOCS_ENTRIES_MAX (256 * 1024)
+
+static Alloc allocs[ALLOCS_ENTRIES_MAX];
+static Alloc* allocs_freelist;
+static RB_HEAD(HeapEntriesTree, Alloc) allocs_tree =
+ RB_INITIALIZER(&allocs_tree);
+
+// Comparator used by the RB-Tree (mind the overflow, avoid arith on addresses).
+static int allocs_tree_cmp(Alloc *alloc_1, Alloc *alloc_2) {
+ if (alloc_1->start < alloc_2->start)
+ return -1;
+ if (alloc_1->start > alloc_2->start)
+ return 1;
+ return 0;
+}
+
+RB_PROTOTYPE(HeapEntriesTree, Alloc, rb_node, allocs_tree_cmp);
+RB_GENERATE(HeapEntriesTree, Alloc, rb_node, allocs_tree_cmp);
+
+// Allocates a new Alloc and inserts it in the tree.
+static Alloc* insert_alloc(
+ uintptr_t start, uintptr_t end, StacktraceEntry* st, uint32_t flags) {
+ Alloc* alloc = NULL;
+
+ // First of all, get a free element either from the freelist or from the pool.
+ if (allocs_freelist != NULL) {
+ alloc = allocs_freelist;
+ allocs_freelist = alloc->next_free;
+ } else if (stats->max_allocs < ALLOCS_ENTRIES_MAX) {
+ alloc = &allocs[stats->max_allocs];
+ ++stats->max_allocs;
+ } else {
+ return NULL; // OOM.
+ }
+
+ alloc->start = start;
+ alloc->end = end;
+ alloc->st = st;
+ alloc->flags = flags;
+ alloc->next_free = NULL;
+ RB_INSERT(HeapEntriesTree, &allocs_tree, alloc);
+ ++stats->num_allocs;
+ return alloc;
+}
+
+// Deletes all the allocs in the range [addr, addr+size[ dealing with partial
+// frees and hole punching. Note that in the general case this function might
+// need to deal with very unfortunate cases, as below:
+//
+// Alloc tree begin: [Alloc 1]----[Alloc 2]-------[Alloc 3][Alloc 4]---[Alloc 5]
+// Deletion range: [xxxxxxxxxxxxxxxxxxxx]
+// Alloc tree end: [Alloc 1]----[Al.2]----------------------[Al.4]---[Alloc 5]
+// Alloc3 has to be deleted and Alloc 2,4 shrunk.
+static uint32_t delete_allocs_in_range(void* addr, size_t size) {
+ uintptr_t del_start = (uintptr_t) addr;
+ uintptr_t del_end = del_start + size - 1;
+ uint32_t flags = 0;
+
+ Alloc* alloc = NULL;
+ Alloc* next_alloc = RB_ROOT(&allocs_tree);
+
+ // Lookup the first (by address) relevant Alloc to initiate the deletion walk.
+ // At the end of the loop next_alloc is either:
+ // - the closest alloc starting before (or exactly at) the start of the
+ // deletion range (i.e. addr == del_start).
+ // - the first alloc inside the deletion range.
+ // - the first alloc after the deletion range iff the range was already empty
+ // (in this case the next loop will just bail out doing nothing).
+ // - NULL: iff the entire tree is empty (as above).
+ while (next_alloc != NULL) {
+ alloc = next_alloc;
+ if (alloc->start > del_start) {
+ next_alloc = RB_LEFT(alloc, rb_node);
+ } else if (alloc->end < del_start) {
+ next_alloc = RB_RIGHT(alloc, rb_node);
+ } else { // alloc->start <= del_start && alloc->end >= del_start
+ break;
+ }
+ }
+
+ // Now scan the allocs linearly deleting chunks (or eventually whole allocs)
+ // until passing the end of the deleting region.
+ next_alloc = alloc;
+ while (next_alloc != NULL) {
+ alloc = next_alloc;
+ next_alloc = RB_NEXT(HeapEntriesTree, &allocs_tree, alloc);
+
+ if (size != 0) {
+ // In the general case we stop passed the end of the deletion range.
+ if (alloc->start > del_end)
+ break;
+
+ // This deals with the case of the first Alloc laying before the range.
+ if (alloc->end < del_start)
+ continue;
+ } else {
+ // size == 0 is a special case. It means deleting only the alloc which
+ // starts exactly at |del_start| if any (for dealing with free(ptr)).
+ if (alloc->start > del_start)
+ break;
+ if (alloc->start < del_start)
+ continue;
+ del_end = alloc->end;
+ }
+
+ // Reached this point the Alloc must overlap (partially or completely) with
+ // the deletion range.
+ assert(!(alloc->start > del_end || alloc->end < del_start));
+
+ StacktraceEntry* st = alloc->st;
+ flags |= alloc->flags;
+ uintptr_t freed_bytes = 0; // Bytes freed in this cycle.
+
+ if (del_start <= alloc->start) {
+ if (del_end >= alloc->end) {
+ // Complete overlap. Delete full Alloc. Note: the range might might
+ // still overlap with the next allocs.
+ // Begin: ------[alloc.start alloc.end]-[next alloc]
+ // Del range: [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx]
+ // Result: ---------------------------------[next alloc]
+ // [next alloc] will be shrinked on the next iteration.
+ freed_bytes = alloc->end - alloc->start + 1;
+ RB_REMOVE(HeapEntriesTree, &allocs_tree, alloc);
+
+ // Clean-up, so heap_dump can tell this is a free entry and skip it.
+ alloc->start = alloc->end = 0;
+ alloc->st = NULL;
+
+ // Put in the freelist.
+ alloc->next_free = allocs_freelist;
+ allocs_freelist = alloc;
+ --stats->num_allocs;
+ } else {
+ // Partial overlap at beginning. Cut first part and shrink the alloc.
+ // Begin: ------[alloc.start alloc.end]-[next alloc]
+ // Del range: [xxxxxx]
+ // Result: ------------[start alloc.end]-[next alloc]
+ freed_bytes = del_end - alloc->start + 1;
+ alloc->start = del_end + 1;
+ // No need to update the tree even if we changed the key. The keys are
+ // still monotonic (because the ranges are guaranteed to not overlap).
+ }
+ } else {
+ if (del_end >= alloc->end) {
+ // Partial overlap at end. Cut last part and shrink the alloc left.
+ // Begin: ------[alloc.start alloc.end]-[next alloc]
+ // Del range: [xxxxxxxx]
+ // Result: ------[alloc.start alloc.end]-----[next alloc]
+ // [next alloc] will be shrinked on the next iteration.
+ freed_bytes = alloc->end - del_start + 1;
+ alloc->end = del_start - 1;
+ } else {
+ // Hole punching. Requires creating an extra alloc.
+ // Begin: ------[alloc.start alloc.end]-[next alloc]
+ // Del range: [xxx]
+ // Result: ------[ alloc 1 ]-----[ alloc 2 ]-[next alloc]
+ freed_bytes = del_end - del_start + 1;
+ const uintptr_t old_end = alloc->end;
+ alloc->end = del_start - 1;
+
+ // In case of OOM, don't count the 2nd alloc we failed to allocate.
+ if (insert_alloc(del_end + 1, old_end, st, alloc->flags) == NULL)
+ freed_bytes += (old_end - del_end);
+ }
+ }
+ // Now update the StackTraceEntry the Alloc was pointing to, eventually
+ // freeing it up.
+ assert(st->alloc_bytes >= freed_bytes);
+ st->alloc_bytes -= freed_bytes;
+ if (st->alloc_bytes == 0)
+ free_stacktrace(st);
+ stats->total_alloc_bytes -= freed_bytes;
+ }
+ return flags;
+}
+
+// +---------------------------------------------------------------------------+
+// + Library entry points (refer to heap_profiler.h for API doc). +
+// +---------------------------------------------------------------------------+
+void heap_profiler_free(void* addr, size_t size, uint32_t* old_flags) {
+ assert(size == 0 || ((uintptr_t) addr + (size - 1)) >= (uintptr_t) addr);
+
+ LOCK_MUTEX(lock);
+ uint32_t flags = delete_allocs_in_range(addr, size);
+ UNLOCK_MUTEX(lock);
+
+ if (old_flags != NULL)
+ *old_flags = flags;
+}
+
+void heap_profiler_alloc(void* addr, size_t size, uintptr_t* frames,
+ uint32_t depth, uint32_t flags) {
+ if (depth > HEAP_PROFILER_MAX_DEPTH)
+ depth = HEAP_PROFILER_MAX_DEPTH;
+
+ if (size == 0) // Apps calling malloc(0), sometimes it happens.
+ return;
+
+ const uintptr_t start = (uintptr_t) addr;
+ const uintptr_t end = start + (size - 1);
+ assert(start <= end);
+
+ LOCK_MUTEX(lock);
+
+ delete_allocs_in_range(addr, size);
+
+ StacktraceEntry* st = record_stacktrace(frames, depth);
+ if (st != NULL) {
+ Alloc* alloc = insert_alloc(start, end, st, flags);
+ if (alloc != NULL) {
+ st->alloc_bytes += size;
+ stats->total_alloc_bytes += size;
+ }
+ }
+
+ UNLOCK_MUTEX(lock);
+}
+
+void heap_profiler_init(HeapStats* heap_stats) {
+ LOCK_MUTEX(lock);
+
+ assert(stats == NULL);
+ stats = heap_stats;
+ memset(stats, 0, sizeof(HeapStats));
+ stats->magic_start = HEAP_PROFILER_MAGIC_MARKER;
+ stats->allocs = &allocs[0];
+ stats->stack_traces = &stack_traces[0];
+
+ UNLOCK_MUTEX(lock);
+}
+
+void heap_profiler_cleanup(void) {
+ LOCK_MUTEX(lock);
+
+ assert(stats != NULL);
+ memset(stack_traces, 0, sizeof(StacktraceEntry) * stats->max_stack_traces);
+ memset(stack_traces_ht, 0, sizeof(stack_traces_ht));
+ stack_traces_freelist = NULL;
+
+ memset(allocs, 0, sizeof(Alloc) * stats->max_allocs);
+ allocs_freelist = NULL;
+ RB_INIT(&allocs_tree);
+
+ stats = NULL;
+
+ UNLOCK_MUTEX(lock);
+}
diff --git a/tools/android/heap_profiler/heap_profiler.gyp b/tools/android/heap_profiler/heap_profiler.gyp
new file mode 100644
index 0000000..50e6797
--- /dev/null
+++ b/tools/android/heap_profiler/heap_profiler.gyp
@@ -0,0 +1,75 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ # libheap_profiler is the library that will be preloaded in the Android
+ # Zygote and contains the black magic to hook malloc/mmap calls.
+ 'target_name': 'heap_profiler',
+ 'type': 'shared_library',
+ 'include_dirs': [ '../../..' ],
+ 'sources': [ 'heap_profiler_hooks_android.c' ],
+ 'dependencies': [ 'heap_profiler_core' ],
+ },
+ {
+ # heap_profiler_core contains only the tracking metadata code without any
+ # hooks. It is required by both the hprof library itself and the unittest.
+ 'target_name': 'heap_profiler_core',
+ 'type': 'static_library',
+ 'sources': [
+ 'heap_profiler.c',
+ 'heap_profiler.h',
+ ],
+ 'include_dirs': [ '../../..' ],
+ },
+ {
+ 'target_name': 'heap_dump',
+ 'type': 'executable',
+ 'sources': [ 'heap_dump.c' ],
+ 'include_dirs': [ '../../..' ],
+ },
+ {
+ 'target_name': 'heap_profiler_unittests',
+ 'type': '<(gtest_target_type)',
+ 'sources': [ 'heap_profiler_unittest.cc' ],
+ 'dependencies': [
+ 'heap_profiler_core',
+ '../../../testing/android/native_test.gyp:native_test_native_code',
+ '../../../testing/gtest.gyp:gtest',
+ '../../../testing/gtest.gyp:gtest_main',
+ ],
+ 'include_dirs': [ '../../..' ],
+ },
+ {
+ 'target_name': 'heap_profiler_unittests_apk',
+ 'type': 'none',
+ 'dependencies': [
+ 'heap_profiler_unittests',
+ ],
+ 'variables': {
+ 'test_suite_name': 'heap_profiler_unittests',
+ },
+ 'includes': [ '../../../build/apk_test.gypi' ],
+ },
+ {
+ 'target_name': 'heap_profiler_integrationtest',
+ 'type': 'executable',
+ 'sources': [ 'heap_profiler_integrationtest.cc' ],
+ 'dependencies': [ '../../../testing/gtest.gyp:gtest' ],
+ 'include_dirs': [ '../../..' ],
+ },
+ {
+ 'target_name': 'heap_profiler_integrationtest_stripped',
+ 'type': 'none',
+ 'dependencies': [ 'heap_profiler_integrationtest' ],
+ 'actions': [{
+ 'action_name': 'strip heap_profiler_integrationtest',
+ 'inputs': [ '<(PRODUCT_DIR)/heap_profiler_integrationtest' ],
+ 'outputs': [ '<(PRODUCT_DIR)/heap_profiler_integrationtest_stripped' ],
+ 'action': [ '<(android_strip)', '<@(_inputs)', '-o', '<@(_outputs)' ],
+ }],
+ },
+ ],
+}
diff --git a/tools/android/heap_profiler/heap_profiler.h b/tools/android/heap_profiler/heap_profiler.h
new file mode 100644
index 0000000..491081d
--- /dev/null
+++ b/tools/android/heap_profiler/heap_profiler.h
@@ -0,0 +1,90 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_ANDROID_HEAP_PROFILER_HEAP_PROFILER_H_
+#define TOOLS_ANDROID_HEAP_PROFILER_HEAP_PROFILER_H_
+
+#include <stdint.h>
+#include "third_party/bsdtrees/tree.h"
+
+#define HEAP_PROFILER_MAGIC_MARKER 0x42beef42L
+#define HEAP_PROFILER_MAX_DEPTH 12
+
+// The allocation is a result of a system malloc() invocation.
+#define HEAP_PROFILER_FLAGS_MALLOC 1
+
+// The allocation is a result of a mmap() invocation.
+#define HEAP_PROFILER_FLAGS_MMAP 2 // Allocation performed through mmap.
+
+// Only in the case of FLAGS_MMAP: The mmap is not anonymous (i.e. file backed).
+#define HEAP_PROFILER_FLAGS_MMAP_FILE 4
+
+// Android only: allocation made by the Zygote (before forking).
+#define HEAP_PROFILER_FLAGS_IN_ZYGOTE 8
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct StacktraceEntry {
+ uintptr_t frames[HEAP_PROFILER_MAX_DEPTH]; // Absolute addrs of stack frames.
+ uint32_t hash; // H(frames), used to keep these entries in a hashtable.
+
+ // Total number of bytes allocated through this code path. It is equal to the
+ // sum of Alloc instances' length which .bt == this.
+ size_t alloc_bytes;
+
+ // |next| has a dual purpose. When the entry is used (hence in the hashtable),
+ // this is a ptr to the next item in the same bucket. When the entry is free,
+ // this is a ptr to the next entry in the freelist.
+ struct StacktraceEntry* next;
+} StacktraceEntry;
+
+// Represents a contiguous range of virtual memory which has been allocated by
+// a give code path (identified by the corresponding StacktraceEntry).
+typedef struct Alloc {
+ RB_ENTRY(Alloc) rb_node; // Anchor for the RB-tree;
+ uintptr_t start;
+ uintptr_t end;
+ uint32_t flags; // See HEAP_PROFILER_FLAGS_*.
+ StacktraceEntry* st; // NULL == free entry.
+ struct Alloc* next_free;
+} Alloc;
+
+typedef struct {
+ uint32_t magic_start; // The magic marker used to locate the stats mmap.
+ uint32_t num_allocs; // The total number of allocation entries present.
+ uint32_t max_allocs; // The max number of items in |allocs|.
+ uint32_t num_stack_traces; // The total number of stack traces present.
+ uint32_t max_stack_traces; // The max number of items in |stack_traces|.
+ size_t total_alloc_bytes; // Total allocation bytes tracked.
+ Alloc* allocs; // Start of the the Alloc pool.
+ StacktraceEntry* stack_traces; // Start of the StacktraceEntry pool.
+} HeapStats;
+
+// Initialize the heap_profiler. The caller has to allocate the HeapStats
+// "superblock", since the way it is mapped is platform-specific.
+void heap_profiler_init(HeapStats* heap_stats);
+
+// Records and allocation. The caller must unwind the stack and pass the
+// frames array. Flags are optionals and don't affect the behavior of the
+// library (they're just kept along and dumped).
+void heap_profiler_alloc(void* addr,
+ size_t size,
+ uintptr_t* frames,
+ uint32_t depth,
+ uint32_t flags);
+
+// Frees any allocation (even partial) overlapping with the given range.
+// If old_flags != NULL, it will be filled with the flags of the deleted allocs.
+void heap_profiler_free(void* addr, size_t size, uint32_t* old_flags);
+
+// Cleans up the HeapStats and all the internal data structures.
+void heap_profiler_cleanup(void);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // TOOLS_ANDROID_HEAP_PROFILER_HEAP_PROFILER_H_
diff --git a/tools/android/heap_profiler/heap_profiler_hooks_android.c b/tools/android/heap_profiler/heap_profiler_hooks_android.c
new file mode 100644
index 0000000..1480780
--- /dev/null
+++ b/tools/android/heap_profiler/heap_profiler_hooks_android.c
@@ -0,0 +1,209 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <dlfcn.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <stdbool.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/mman.h>
+#include <unistd.h>
+#include <unwind.h>
+
+#include "tools/android/heap_profiler/heap_profiler.h"
+
+#define HEAP_PROFILER_EXPORT __attribute__((visibility("default")))
+
+
+static inline __attribute__((always_inline))
+uint32_t get_backtrace(uintptr_t* frames, uint32_t max_depth);
+
+// Function pointers typedefs for the hooked symbols.
+typedef void* (*mmap_t)(void*, size_t, int, int, int, off_t);
+typedef void* (*mmap2_t)(void*, size_t, int, int, int, off_t);
+typedef void* (*mmap64_t)(void*, size_t, int, int, int, off64_t);
+typedef void* (*mremap_t)(void*, size_t, size_t, unsigned long);
+typedef int (*munmap_t)(void*, size_t);
+typedef void* (*malloc_t)(size_t);
+typedef void* (*calloc_t)(size_t, size_t);
+typedef void* (*realloc_t)(void*, size_t);
+typedef void (*free_t)(void*);
+
+// And their actual definitions.
+static mmap_t real_mmap;
+static mmap2_t real_mmap2;
+static mmap64_t real_mmap64;
+static mremap_t real_mremap;
+static munmap_t real_munmap;
+static malloc_t real_malloc;
+static calloc_t real_calloc;
+static realloc_t real_realloc;
+static free_t real_free;
+static int* has_forked_off_zygote;
+
+HEAP_PROFILER_EXPORT const HeapStats* heap_profiler_stats_for_tests;
+
+// +---------------------------------------------------------------------------+
+// + Initialization of heap_profiler and lookup of hooks' addresses +
+// +---------------------------------------------------------------------------+
+__attribute__((constructor))
+static void initialize() {
+ real_mmap = (mmap_t) dlsym(RTLD_NEXT, "mmap");
+ real_mmap2 = (mmap_t) dlsym(RTLD_NEXT, "mmap2");
+ real_mmap64 = (mmap64_t) dlsym(RTLD_NEXT, "mmap64");
+ real_mremap = (mremap_t) dlsym(RTLD_NEXT, "mremap");
+ real_munmap = (munmap_t) dlsym(RTLD_NEXT, "munmap");
+ real_malloc = (malloc_t) dlsym(RTLD_NEXT, "malloc");
+ real_calloc = (calloc_t) dlsym(RTLD_NEXT, "calloc");
+ real_realloc = (realloc_t) dlsym(RTLD_NEXT, "realloc");
+ real_free = (free_t) dlsym(RTLD_NEXT, "free");
+
+ // gMallocLeakZygoteChild is an extra useful piece of information to have.
+ // When available, it tells whether we're in the zygote (=0) or forked (=1)
+ // a child off it. In the worst case it will be NULL and we'll just ignore it.
+ has_forked_off_zygote = (int*) dlsym(RTLD_NEXT, "gMallocLeakZygoteChild");
+
+ // Allocate room for the HeapStats area and initialize the heap profiler.
+ // Make an explicit map of /dev/zero (instead of MAP_ANONYMOUS), so that the
+ // heap_dump tool can easily spot the mapping in the target process.
+ int fd = open("/dev/zero", O_RDONLY);
+ if (fd < 0) {
+ abort(); // This world has gone wrong. Good night Vienna.
+ }
+
+ HeapStats* stats = (HeapStats*) real_mmap(
+ 0, sizeof(HeapStats), PROT_READ | PROT_WRITE, MAP_PRIVATE, fd, 0);
+ heap_profiler_stats_for_tests = stats;
+ heap_profiler_init(stats);
+}
+
+static inline __attribute__((always_inline)) void unwind_and_record_alloc(
+ void* start, size_t size, uint32_t flags) {
+ const int errno_save = errno;
+ uintptr_t frames[HEAP_PROFILER_MAX_DEPTH];
+ const uint32_t depth = get_backtrace(frames, HEAP_PROFILER_MAX_DEPTH);
+ if (has_forked_off_zygote != NULL && *has_forked_off_zygote == 0)
+ flags |= HEAP_PROFILER_FLAGS_IN_ZYGOTE;
+ heap_profiler_alloc(start, size, frames, depth, flags);
+ errno = errno_save;
+}
+
+static inline __attribute__((always_inline)) void discard_alloc(
+ void* start, size_t size, uint32_t* old_flags) {
+ const int errno_save = errno;
+ heap_profiler_free(start, size, old_flags);
+ errno = errno_save;
+}
+
+// Flags are non-functional extra decorators that are made available to the
+// final heap_dump tool, to get more details about the source of the allocation.
+static uint32_t get_flags_for_mmap(int fd) {
+ return HEAP_PROFILER_FLAGS_MMAP | (fd ? HEAP_PROFILER_FLAGS_MMAP_FILE : 0);
+}
+
+// +---------------------------------------------------------------------------+
+// + Actual mmap/malloc hooks +
+// +---------------------------------------------------------------------------+
+HEAP_PROFILER_EXPORT void* mmap(
+ void* addr, size_t size, int prot, int flags, int fd, off_t offset) {
+ void* ret = real_mmap(addr, size, prot, flags, fd, offset);
+ if (ret != MAP_FAILED)
+ unwind_and_record_alloc(ret, size, get_flags_for_mmap(fd));
+ return ret;
+}
+
+HEAP_PROFILER_EXPORT void* mmap2(
+ void* addr, size_t size, int prot, int flags, int fd, off_t pgoffset) {
+ void* ret = real_mmap2(addr, size, prot, flags, fd, pgoffset);
+ if (ret != MAP_FAILED)
+ unwind_and_record_alloc(ret, size, get_flags_for_mmap(fd));
+ return ret;
+}
+
+HEAP_PROFILER_EXPORT void* mmap64(
+ void* addr, size_t size, int prot, int flags, int fd, off64_t offset) {
+ void* ret = real_mmap64(addr, size, prot, flags, fd, offset);
+ if (ret != MAP_FAILED)
+ unwind_and_record_alloc(ret, size, get_flags_for_mmap(fd));
+ return ret;
+}
+
+HEAP_PROFILER_EXPORT void* mremap(
+ void* addr, size_t oldlen, size_t newlen, unsigned long flags) {
+ void* ret = real_mremap(addr, oldlen, newlen, flags);
+ if (ret != MAP_FAILED) {
+ uint32_t flags = 0;
+ if (addr)
+ discard_alloc(addr, oldlen, &flags);
+ if (newlen > 0)
+ unwind_and_record_alloc(ret, newlen, flags);
+ }
+ return ret;
+}
+
+HEAP_PROFILER_EXPORT int munmap(void* ptr, size_t size) {
+ int ret = real_munmap(ptr, size);
+ discard_alloc(ptr, size, /*old_flags=*/NULL);
+ return ret;
+}
+
+HEAP_PROFILER_EXPORT void* malloc(size_t byte_count) {
+ void* ret = real_malloc(byte_count);
+ if (ret != NULL)
+ unwind_and_record_alloc(ret, byte_count, HEAP_PROFILER_FLAGS_MALLOC);
+ return ret;
+}
+
+HEAP_PROFILER_EXPORT void* calloc(size_t nmemb, size_t size) {
+ void* ret = real_calloc(nmemb, size);
+ if (ret != NULL)
+ unwind_and_record_alloc(ret, nmemb * size, HEAP_PROFILER_FLAGS_MALLOC);
+ return ret;
+}
+
+HEAP_PROFILER_EXPORT void* realloc(void* ptr, size_t size) {
+ void* ret = real_realloc(ptr, size);
+ uint32_t flags = 0;
+ if (ptr)
+ discard_alloc(ptr, 0, &flags);
+ if (ret != NULL)
+ unwind_and_record_alloc(ret, size, flags | HEAP_PROFILER_FLAGS_MALLOC);
+ return ret;
+}
+
+HEAP_PROFILER_EXPORT void free(void* ptr) {
+ real_free(ptr);
+ discard_alloc(ptr, 0, /*old_flags=*/NULL);
+}
+
+// +---------------------------------------------------------------------------+
+// + Stack unwinder +
+// +---------------------------------------------------------------------------+
+typedef struct {
+ uintptr_t* frames;
+ uint32_t frame_count;
+ uint32_t max_depth;
+ bool have_skipped_self;
+} stack_crawl_state_t;
+
+static _Unwind_Reason_Code unwind_fn(struct _Unwind_Context* ctx, void* arg) {
+ stack_crawl_state_t* state = (stack_crawl_state_t*) arg;
+ uintptr_t ip = _Unwind_GetIP(ctx);
+
+ if (ip != 0 && !state->have_skipped_self) {
+ state->have_skipped_self = true;
+ return _URC_NO_REASON;
+ }
+
+ state->frames[state->frame_count++] = ip;
+ return (state->frame_count >= state->max_depth) ?
+ _URC_END_OF_STACK : _URC_NO_REASON;
+}
+
+static uint32_t get_backtrace(uintptr_t* frames, uint32_t max_depth) {
+ stack_crawl_state_t state = {.frames = frames, .max_depth = max_depth};
+ _Unwind_Backtrace(unwind_fn, &state);
+ return state.frame_count;
+}
diff --git a/tools/android/heap_profiler/heap_profiler_integrationtest.cc b/tools/android/heap_profiler/heap_profiler_integrationtest.cc
new file mode 100644
index 0000000..ba7ebea
--- /dev/null
+++ b/tools/android/heap_profiler/heap_profiler_integrationtest.cc
@@ -0,0 +1,179 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <dlfcn.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/mman.h>
+#include <unistd.h>
+#include <map>
+
+#include "base/compiler_specific.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/android/heap_profiler/heap_profiler.h"
+
+namespace {
+
+typedef void* (*AllocatorFn)(size_t);
+typedef int (*FreeFn)(void*, size_t);
+
+const size_t kSize1 = 499 * PAGE_SIZE;
+const size_t kSize2 = 503 * PAGE_SIZE;
+const size_t kSize3 = 509 * PAGE_SIZE;
+
+// The purpose of the four functions below is to create watermarked allocations,
+// so the test fixture can ascertain that the hooks work end-to-end.
+__attribute__((noinline)) void* MallocInner(size_t size) {
+ void* ptr = malloc(size);
+ // The memset below is to avoid tail-call elimination optimizations and ensure
+ // that this function will be part of the stack trace.
+ memset(ptr, 0, size);
+ return ptr;
+}
+
+__attribute__((noinline)) void* MallocOuter(size_t size) {
+ void* ptr = MallocInner(size);
+ memset(ptr, 0, size);
+ return ptr;
+}
+
+__attribute__((noinline)) void* DoMmap(size_t size) {
+ return mmap(
+ 0, size, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, 0, 0);
+}
+
+__attribute__((noinline)) void* MmapInner(size_t size) {
+ void* ptr = DoMmap(size);
+ memset(ptr, 0, size);
+ return ptr;
+}
+
+__attribute__((noinline)) void* MmapOuter(size_t size) {
+ void* ptr = MmapInner(size);
+ memset(ptr, 0, size);
+ return ptr;
+}
+
+const HeapStats* GetHeapStats() {
+ HeapStats* const* stats_ptr = reinterpret_cast<HeapStats* const*>(
+ dlsym(RTLD_DEFAULT, "heap_profiler_stats_for_tests"));
+ EXPECT_TRUE(stats_ptr != NULL);
+ const HeapStats* stats = *stats_ptr;
+ EXPECT_TRUE(stats != NULL);
+ EXPECT_EQ(HEAP_PROFILER_MAGIC_MARKER, stats->magic_start);
+ return stats;
+}
+
+bool StackTraceContains(const StacktraceEntry* s, AllocatorFn fn) {
+ // kExpectedFnLen is a gross estimation of the watermark functions' size.
+ // It tries to address the following problem: the addrs in the unwound stack
+ // stack frames will NOT point to the beginning of the functions, but to the
+ // PC after the call to malloc/mmap.
+ const size_t kExpectedFnLen = 16;
+ const uintptr_t fn_addr = reinterpret_cast<uintptr_t>(fn);
+ for (size_t i = 0; i < HEAP_PROFILER_MAX_DEPTH; ++i) {
+ if (s->frames[i] >= fn_addr && s->frames[i] <= fn_addr + kExpectedFnLen)
+ return true;
+ }
+ return false;
+}
+
+const StacktraceEntry* LookupStackTrace(size_t size, AllocatorFn fn) {
+ const HeapStats* stats = GetHeapStats();
+ for (size_t i = 0; i < stats->max_stack_traces; ++i) {
+ const StacktraceEntry* st = &stats->stack_traces[i];
+ if (st->alloc_bytes == size && StackTraceContains(st, fn))
+ return st;
+ }
+ return NULL;
+}
+
+int DoFree(void* addr, size_t /*size, ignored.*/) {
+ free(addr);
+ return 0;
+}
+
+void TestStackTracesWithParams(AllocatorFn outer_fn,
+ AllocatorFn inner_fn,
+ FreeFn free_fn) {
+ const HeapStats* stats = GetHeapStats();
+
+ void* m1 = outer_fn(kSize1);
+ void* m2 = inner_fn(kSize2);
+ void* m3 = inner_fn(kSize3);
+ free_fn(m3, kSize3);
+
+ const StacktraceEntry* st1 = LookupStackTrace(kSize1, inner_fn);
+ const StacktraceEntry* st2 = LookupStackTrace(kSize2, inner_fn);
+ const StacktraceEntry* st3 = LookupStackTrace(kSize3, inner_fn);
+
+ EXPECT_TRUE(st1 != NULL);
+ EXPECT_TRUE(StackTraceContains(st1, outer_fn));
+ EXPECT_TRUE(StackTraceContains(st1, inner_fn));
+
+ EXPECT_TRUE(st2 != NULL);
+ EXPECT_FALSE(StackTraceContains(st2, outer_fn));
+ EXPECT_TRUE(StackTraceContains(st2, inner_fn));
+
+ EXPECT_EQ(NULL, st3);
+
+ const size_t total_alloc_start = stats->total_alloc_bytes;
+ const size_t num_stack_traces_start = stats->num_stack_traces;
+
+ free_fn(m1, kSize1);
+ free_fn(m2, kSize2);
+
+ const size_t total_alloc_end = stats->total_alloc_bytes;
+ const size_t num_stack_traces_end = stats->num_stack_traces;
+
+ EXPECT_EQ(kSize1 + kSize2, total_alloc_start - total_alloc_end);
+ EXPECT_EQ(2, num_stack_traces_start - num_stack_traces_end);
+ EXPECT_EQ(NULL, LookupStackTrace(kSize1, inner_fn));
+ EXPECT_EQ(NULL, LookupStackTrace(kSize2, inner_fn));
+ EXPECT_EQ(NULL, LookupStackTrace(kSize3, inner_fn));
+}
+
+TEST(HeapProfilerIntegrationTest, TestMallocStackTraces) {
+ TestStackTracesWithParams(&MallocOuter, &MallocInner, &DoFree);
+}
+
+TEST(HeapProfilerIntegrationTest, TestMmapStackTraces) {
+ TestStackTracesWithParams(&MmapOuter, &MmapInner, &munmap);
+}
+
+// Returns the path of the directory containing the current executable.
+std::string GetExePath() {
+ char buf[1024];
+ ssize_t len = readlink("/proc/self/exe", buf, sizeof(buf) - 1);
+ if (len == -1)
+ return std::string();
+ std::string path(buf, len);
+ size_t sep = path.find_last_of('/');
+ if (sep == std::string::npos)
+ return std::string();
+ path.erase(sep);
+ return path;
+}
+
+} // namespace
+
+int main(int argc, char** argv) {
+ // Re-launch the process itself forcing the preload of the libheap_profiler.
+ char* ld_preload = getenv("LD_PRELOAD");
+ if (ld_preload == NULL || strstr(ld_preload, "libheap_profiler.so") == NULL) {
+ char env_ld_lib_path[256];
+ strlcpy(env_ld_lib_path, "LD_LIBRARY_PATH=", sizeof(env_ld_lib_path));
+ strlcat(env_ld_lib_path, GetExePath().c_str(), sizeof(env_ld_lib_path));
+ char env_ld_preload[] = "LD_PRELOAD=libheap_profiler.so";
+ char* const env[] = {env_ld_preload, env_ld_lib_path, 0};
+ execve("/proc/self/exe", argv, env);
+ // execve() never returns, unless something goes wrong.
+ perror("execve");
+ assert(false);
+ }
+
+ testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/tools/android/heap_profiler/heap_profiler_unittest.cc b/tools/android/heap_profiler/heap_profiler_unittest.cc
new file mode 100644
index 0000000..65c2700
--- /dev/null
+++ b/tools/android/heap_profiler/heap_profiler_unittest.cc
@@ -0,0 +1,458 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdint.h>
+#include <string.h>
+#include <map>
+
+#include "base/compiler_specific.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/android/heap_profiler/heap_profiler.h"
+
+namespace {
+
+class HeapProfilerTest : public testing::Test {
+ public:
+ virtual void SetUp() OVERRIDE { heap_profiler_init(&stats_); }
+
+ virtual void TearDown() OVERRIDE {
+ CheckAllocVsStacktaceConsistency();
+ heap_profiler_cleanup();
+ }
+
+ protected:
+ struct StackTrace {
+ uintptr_t frames[HEAP_PROFILER_MAX_DEPTH];
+ size_t depth;
+ };
+
+ StackTrace GenStackTrace(size_t depth, uintptr_t base) {
+ assert(depth <= HEAP_PROFILER_MAX_DEPTH);
+ StackTrace st;
+ for (size_t i = 0; i < depth; ++i)
+ st.frames[i] = base + i * 0x10UL;
+ st.depth = depth;
+ return st;
+ }
+
+ void ExpectAlloc(uintptr_t start,
+ uintptr_t end,
+ const StackTrace& st,
+ uint32_t flags) {
+ for (uint32_t i = 0; i < stats_.max_allocs; ++i) {
+ const Alloc& alloc = stats_.allocs[i];
+ if (alloc.start != start || alloc.end != end)
+ continue;
+ // Check that the stack trace match.
+ for (uint32_t j = 0; j < st.depth; ++j) {
+ EXPECT_EQ(st.frames[j], alloc.st->frames[j])
+ << "Stacktrace not matching @ depth " << j;
+ }
+ EXPECT_EQ(flags, alloc.flags);
+ return;
+ }
+
+ FAIL() << "Alloc not found [" << std::hex << start << "," << end << "]";
+ }
+
+ void CheckAllocVsStacktaceConsistency() {
+ uint32_t allocs_seen = 0;
+ uint32_t stack_traces_seen = 0;
+ std::map<StacktraceEntry*, uintptr_t> stacktrace_bytes_by_alloc;
+
+ for (uint32_t i = 0; i < stats_.max_allocs; ++i) {
+ Alloc* alloc = &stats_.allocs[i];
+ if (alloc->start == 0 && alloc->end == 0)
+ continue;
+ ++allocs_seen;
+ stacktrace_bytes_by_alloc[alloc->st] += alloc->end - alloc->start + 1;
+ }
+
+ for (uint32_t i = 0; i < stats_.max_stack_traces; ++i) {
+ StacktraceEntry* st = &stats_.stack_traces[i];
+ if (st->alloc_bytes == 0)
+ continue;
+ ++stack_traces_seen;
+ EXPECT_EQ(1, stacktrace_bytes_by_alloc.count(st));
+ EXPECT_EQ(stacktrace_bytes_by_alloc[st], st->alloc_bytes);
+ }
+
+ EXPECT_EQ(allocs_seen, stats_.num_allocs);
+ EXPECT_EQ(stack_traces_seen, stats_.num_stack_traces);
+ }
+
+ HeapStats stats_;
+};
+
+TEST_F(HeapProfilerTest, SimpleAlloc) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0);
+
+ EXPECT_EQ(2, stats_.num_allocs);
+ EXPECT_EQ(1, stats_.num_stack_traces);
+ EXPECT_EQ(1024 + 2048, stats_.total_alloc_bytes);
+ ExpectAlloc(0x1000, 0x13ff, st1, 0);
+ ExpectAlloc(0x2000, 0x27ff, st1, 0);
+}
+
+TEST_F(HeapProfilerTest, AllocMultipleStacks) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(4, 0x1000);
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x2000, 2048, st2.frames, st2.depth, 0);
+ heap_profiler_alloc((void*)0x3000, 32, st1.frames, st1.depth, 0);
+
+ EXPECT_EQ(3, stats_.num_allocs);
+ EXPECT_EQ(2, stats_.num_stack_traces);
+ EXPECT_EQ(1024 + 2048 + 32, stats_.total_alloc_bytes);
+ ExpectAlloc(0x1000, 0x13ff, st1, 0);
+ ExpectAlloc(0x2000, 0x27ff, st2, 0);
+ ExpectAlloc(0x3000, 0x301f, st1, 0);
+}
+
+TEST_F(HeapProfilerTest, SimpleAllocAndFree) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
+ heap_profiler_free((void*)0x1000, 1024, NULL);
+
+ EXPECT_EQ(0, stats_.num_allocs);
+ EXPECT_EQ(0, stats_.num_stack_traces);
+ EXPECT_EQ(0, stats_.total_alloc_bytes);
+}
+
+TEST_F(HeapProfilerTest, Realloc) {
+ StackTrace st1 = GenStackTrace(8, 0);
+ heap_profiler_alloc((void*)0, 32, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0, 32, st1.frames, st1.depth, 0);
+}
+
+TEST_F(HeapProfilerTest, AllocAndFreeMultipleStacks) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(6, 0x1000);
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x3000, 32, st2.frames, st2.depth, 0);
+ heap_profiler_alloc((void*)0x4000, 64, st2.frames, st2.depth, 0);
+
+ heap_profiler_free((void*)0x1000, 1024, NULL);
+ heap_profiler_free((void*)0x3000, 32, NULL);
+
+ EXPECT_EQ(2, stats_.num_allocs);
+ EXPECT_EQ(2, stats_.num_stack_traces);
+ EXPECT_EQ(2048 + 64, stats_.total_alloc_bytes);
+ ExpectAlloc(0x2000, 0x27ff, st1, 0);
+ ExpectAlloc(0x4000, 0x403f, st2, 0);
+}
+
+TEST_F(HeapProfilerTest, AllocAndFreeAll) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(6, 0x1000);
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x3000, 32, st2.frames, st2.depth, 0);
+ heap_profiler_alloc((void*)0x4000, 64, st2.frames, st2.depth, 0);
+
+ heap_profiler_free((void*)0x1000, 1024, NULL);
+ heap_profiler_free((void*)0x2000, 2048, NULL);
+ heap_profiler_free((void*)0x3000, 32, NULL);
+ heap_profiler_free((void*)0x4000, 64, NULL);
+
+ EXPECT_EQ(0, stats_.num_allocs);
+ EXPECT_EQ(0, stats_.num_stack_traces);
+ EXPECT_EQ(0, stats_.total_alloc_bytes);
+}
+
+TEST_F(HeapProfilerTest, AllocAndFreeWithZeroSize) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(6, 0x1000);
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x2000, 2048, st2.frames, st2.depth, 0);
+
+ heap_profiler_free((void*)0x1000, 0, NULL);
+
+ EXPECT_EQ(1, stats_.num_allocs);
+ EXPECT_EQ(1, stats_.num_stack_traces);
+ EXPECT_EQ(2048, stats_.total_alloc_bytes);
+}
+
+TEST_F(HeapProfilerTest, AllocAndFreeContiguous) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(6, 0x1000);
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0);
+
+ heap_profiler_free((void*)0x1000, 8192, NULL);
+
+ EXPECT_EQ(0, stats_.num_allocs);
+ EXPECT_EQ(0, stats_.num_stack_traces);
+ EXPECT_EQ(0, stats_.total_alloc_bytes);
+}
+
+TEST_F(HeapProfilerTest, SparseAllocsOneLargeOuterFree) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(6, 0x1000);
+
+ heap_profiler_alloc((void*)0x1010, 1, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x1400, 2, st2.frames, st2.depth, 0);
+ heap_profiler_alloc((void*)0x1600, 5, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x9000, 4096, st2.frames, st2.depth, 0);
+
+ heap_profiler_free((void*)0x0800, 8192, NULL);
+ EXPECT_EQ(1, stats_.num_allocs);
+ EXPECT_EQ(1, stats_.num_stack_traces);
+ EXPECT_EQ(4096, stats_.total_alloc_bytes);
+ ExpectAlloc(0x9000, 0x9fff, st2, 0);
+}
+
+TEST_F(HeapProfilerTest, SparseAllocsOneLargePartiallyOverlappingFree) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(6, 0x1000);
+ StackTrace st3 = GenStackTrace(4, 0x2000);
+
+ // This will be untouched.
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
+
+ // These will be partially freed in one shot (% 64 a bytes "margin").
+ heap_profiler_alloc((void*)0x2000, 128, st2.frames, st2.depth, 0);
+ heap_profiler_alloc((void*)0x2400, 128, st2.frames, st2.depth, 0);
+ heap_profiler_alloc((void*)0x2f80, 128, st2.frames, st2.depth, 0);
+
+ // This will be untouched.
+ heap_profiler_alloc((void*)0x3000, 1024, st3.frames, st3.depth, 0);
+
+ heap_profiler_free((void*)0x2040, 4096 - 64 - 64, NULL);
+ EXPECT_EQ(4, stats_.num_allocs);
+ EXPECT_EQ(3, stats_.num_stack_traces);
+ EXPECT_EQ(1024 + 64 + 64 + 1024, stats_.total_alloc_bytes);
+
+ ExpectAlloc(0x1000, 0x13ff, st1, 0);
+ ExpectAlloc(0x2000, 0x203f, st2, 0);
+ ExpectAlloc(0x2fc0, 0x2fff, st2, 0);
+ ExpectAlloc(0x3000, 0x33ff, st3, 0);
+}
+
+TEST_F(HeapProfilerTest, AllocAndFreeScattered) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x2000, 4096, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x3000, 4096, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x4000, 4096, st1.frames, st1.depth, 0);
+
+ heap_profiler_free((void*)0x800, 4096, NULL);
+ EXPECT_EQ(4, stats_.num_allocs);
+ EXPECT_EQ(2048 + 4096 + 4096 + 4096, stats_.total_alloc_bytes);
+
+ heap_profiler_free((void*)0x1800, 4096, NULL);
+ EXPECT_EQ(3, stats_.num_allocs);
+ EXPECT_EQ(2048 + 4096 + 4096, stats_.total_alloc_bytes);
+
+ heap_profiler_free((void*)0x2800, 4096, NULL);
+ EXPECT_EQ(2, stats_.num_allocs);
+ EXPECT_EQ(2048 + 4096, stats_.total_alloc_bytes);
+
+ heap_profiler_free((void*)0x3800, 4096, NULL);
+ EXPECT_EQ(1, stats_.num_allocs);
+ EXPECT_EQ(2048, stats_.total_alloc_bytes);
+
+ heap_profiler_free((void*)0x4800, 4096, NULL);
+ EXPECT_EQ(0, stats_.num_allocs);
+ EXPECT_EQ(0, stats_.num_stack_traces);
+ EXPECT_EQ(0, stats_.total_alloc_bytes);
+}
+
+TEST_F(HeapProfilerTest, AllocAndOverFreeContiguous) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(6, 0x1000);
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0);
+
+ heap_profiler_free((void*)0, 16834, NULL);
+
+ EXPECT_EQ(0, stats_.num_allocs);
+ EXPECT_EQ(0, stats_.num_stack_traces);
+ EXPECT_EQ(0, stats_.total_alloc_bytes);
+}
+
+TEST_F(HeapProfilerTest, AllocContiguousAndPunchHole) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(6, 0x1000);
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0);
+
+ // Punch a 4k hole in the middle of the two contiguous 4k allocs.
+ heap_profiler_free((void*)0x1800, 4096, NULL);
+
+ EXPECT_EQ(2, stats_.num_allocs);
+ EXPECT_EQ(2, stats_.num_stack_traces);
+ EXPECT_EQ(4096, stats_.total_alloc_bytes);
+}
+
+TEST_F(HeapProfilerTest, AllocAndPartialFree) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(6, 0x1000);
+ StackTrace st3 = GenStackTrace(7, 0x2000);
+ StackTrace st4 = GenStackTrace(9, 0x3000);
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
+ heap_profiler_alloc((void*)0x2000, 1024, st2.frames, st2.depth, 0);
+ heap_profiler_alloc((void*)0x3000, 1024, st3.frames, st3.depth, 0);
+ heap_profiler_alloc((void*)0x4000, 1024, st4.frames, st4.depth, 0);
+
+ heap_profiler_free((void*)0x1000, 128, NULL); // Shrink left by 128B.
+ heap_profiler_free((void*)0x2380, 128, NULL); // Shrink right by 128B.
+ heap_profiler_free((void*)0x3100, 512, NULL); // 512B hole in the middle.
+ heap_profiler_free((void*)0x4000, 512, NULL); // Free up the 4th alloc...
+ heap_profiler_free((void*)0x4200, 512, NULL); // ...but do it in two halves.
+
+ EXPECT_EQ(4, stats_.num_allocs); // 1 + 2 + two sides around the hole 3.
+ EXPECT_EQ(3, stats_.num_stack_traces); // st4 should be gone.
+ EXPECT_EQ(896 + 896 + 512, stats_.total_alloc_bytes);
+}
+
+TEST_F(HeapProfilerTest, RandomIndividualAllocAndFrees) {
+ static const size_t NUM_ST = 128;
+ static const size_t NUM_OPS = 1000;
+
+ StackTrace st[NUM_ST];
+ for (uint32_t i = 0; i < NUM_ST; ++i)
+ st[i] = GenStackTrace((i % 10) + 2, i * 128);
+
+ for (size_t i = 0; i < NUM_OPS; ++i) {
+ uintptr_t start = ((i + 7) << 8) & (0xffffff);
+ size_t size = (start >> 16) & 0x0fff;
+ if (i & 1) {
+ StackTrace* s = &st[start % NUM_ST];
+ heap_profiler_alloc((void*)start, size, s->frames, s->depth, 0);
+ } else {
+ heap_profiler_free((void*)start, size, NULL);
+ }
+ CheckAllocVsStacktaceConsistency();
+ }
+}
+
+TEST_F(HeapProfilerTest, RandomAllocAndFreesBatches) {
+ static const size_t NUM_ST = 128;
+ static const size_t NUM_ALLOCS = 100;
+
+ StackTrace st[NUM_ST];
+ for (size_t i = 0; i < NUM_ST; ++i)
+ st[i] = GenStackTrace((i % 10) + 2, i * NUM_ST);
+
+ for (int repeat = 0; repeat < 5; ++repeat) {
+ for (size_t i = 0; i < NUM_ALLOCS; ++i) {
+ StackTrace* s = &st[i % NUM_ST];
+ heap_profiler_alloc(
+ (void*)(i * 4096), ((i + 1) * 32) % 4097, s->frames, s->depth, 0);
+ CheckAllocVsStacktaceConsistency();
+ }
+
+ for (size_t i = 0; i < NUM_ALLOCS; ++i) {
+ heap_profiler_free((void*)(i * 1024), ((i + 1) * 64) % 16000, NULL);
+ CheckAllocVsStacktaceConsistency();
+ }
+ }
+}
+
+TEST_F(HeapProfilerTest, UnwindStackTooLargeShouldSaturate) {
+ StackTrace st1 = GenStackTrace(HEAP_PROFILER_MAX_DEPTH, 0x0);
+ uintptr_t many_frames[100] = {};
+ memcpy(many_frames, st1.frames, sizeof(uintptr_t) * st1.depth);
+ heap_profiler_alloc((void*)0x1000, 1024, many_frames, 100, 0);
+ ExpectAlloc(0x1000, 0x13ff, st1, 0);
+}
+
+TEST_F(HeapProfilerTest, NoUnwindShouldNotCrashButNoop) {
+ heap_profiler_alloc((void*)0x1000, 1024, NULL, 0, 0);
+ EXPECT_EQ(0, stats_.num_allocs);
+ EXPECT_EQ(0, stats_.num_stack_traces);
+ EXPECT_EQ(0, stats_.total_alloc_bytes);
+}
+
+TEST_F(HeapProfilerTest, FreeNonExisting) {
+ StackTrace st1 = GenStackTrace(5, 0x0);
+ heap_profiler_free((void*)0x1000, 1024, NULL);
+ heap_profiler_free((void*)0x1400, 1024, NULL);
+ EXPECT_EQ(0, stats_.num_allocs);
+ EXPECT_EQ(0, stats_.num_stack_traces);
+ EXPECT_EQ(0, stats_.total_alloc_bytes);
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
+ EXPECT_EQ(1, stats_.num_allocs);
+ EXPECT_EQ(1024, stats_.total_alloc_bytes);
+}
+
+TEST_F(HeapProfilerTest, FlagsConsistency) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ uint32_t flags = 0;
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 42);
+ heap_profiler_alloc((void*)0x2000, 4096, st1.frames, st1.depth, 142);
+
+ ExpectAlloc(0x1000, 0x1fff, st1, 42);
+ ExpectAlloc(0x2000, 0x2fff, st1, 142);
+
+ // Punch a 4k hole in the middle of the two contiguous 4k allocs.
+ heap_profiler_free((void*)0x1800, 4096, NULL);
+
+ ExpectAlloc(0x1000, 0x17ff, st1, 42);
+ heap_profiler_free((void*)0x1000, 2048, &flags);
+ EXPECT_EQ(42, flags);
+
+ ExpectAlloc(0x2800, 0x2fff, st1, 142);
+ heap_profiler_free((void*)0x2800, 2048, &flags);
+ EXPECT_EQ(142, flags);
+}
+
+TEST_F(HeapProfilerTest, BeConsistentOnOOM) {
+ static const size_t NUM_ALLOCS = 512 * 1024;
+ uintptr_t frames[1];
+
+ for (uintptr_t i = 0; i < NUM_ALLOCS; ++i) {
+ frames[0] = i;
+ heap_profiler_alloc((void*)(i * 32), 32, frames, 1, 0);
+ }
+
+ CheckAllocVsStacktaceConsistency();
+ // Check that we're saturating, otherwise this entire test is pointless.
+ EXPECT_LT(stats_.num_allocs, NUM_ALLOCS);
+ EXPECT_LT(stats_.num_stack_traces, NUM_ALLOCS);
+
+ for (uintptr_t i = 0; i < NUM_ALLOCS; ++i)
+ heap_profiler_free((void*)(i * 32), 32, NULL);
+
+ EXPECT_EQ(0, stats_.num_allocs);
+ EXPECT_EQ(0, stats_.total_alloc_bytes);
+ EXPECT_EQ(0, stats_.num_stack_traces);
+}
+
+#ifdef __LP64__
+TEST_F(HeapProfilerTest, Test64Bit) {
+ StackTrace st1 = GenStackTrace(8, 0x0);
+ StackTrace st2 = GenStackTrace(10, 0x7fffffff70000000L);
+ StackTrace st3 = GenStackTrace(10, 0xffffffff70000000L);
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
+ heap_profiler_alloc(
+ (void*)0x7ffffffffffff000L, 4096, st2.frames, st2.depth, 0);
+ heap_profiler_alloc(
+ (void*)0xfffffffffffff000L, 4096, st3.frames, st3.depth, 0);
+ EXPECT_EQ(3, stats_.num_allocs);
+ EXPECT_EQ(3, stats_.num_stack_traces);
+ EXPECT_EQ(4096 + 4096 + 4096, stats_.total_alloc_bytes);
+
+ heap_profiler_free((void*)0x1000, 4096, NULL);
+ EXPECT_EQ(2, stats_.num_allocs);
+ EXPECT_EQ(2, stats_.num_stack_traces);
+ EXPECT_EQ(4096 + 4096, stats_.total_alloc_bytes);
+
+ heap_profiler_free((void*)0x7ffffffffffff000L, 4096, NULL);
+ EXPECT_EQ(1, stats_.num_allocs);
+ EXPECT_EQ(1, stats_.num_stack_traces);
+ EXPECT_EQ(4096, stats_.total_alloc_bytes);
+
+ heap_profiler_free((void*)0xfffffffffffff000L, 4096, NULL);
+ EXPECT_EQ(0, stats_.num_allocs);
+ EXPECT_EQ(0, stats_.num_stack_traces);
+ EXPECT_EQ(0, stats_.total_alloc_bytes);
+}
+#endif
+
+} // namespace
diff --git a/tools/android/md5sum/BUILD.gn b/tools/android/md5sum/BUILD.gn
new file mode 100644
index 0000000..5f91a88
--- /dev/null
+++ b/tools/android/md5sum/BUILD.gn
@@ -0,0 +1,53 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# GYP: //tools/android/md5sum/md5sum.gyp:md5sum
+group("md5sum") {
+ datadeps = [
+ ":md5sum_bin($host_toolchain)",
+ ":md5sum_bin($default_toolchain)",
+ ":md5sum_prepare_dist($default_toolchain)",
+ ":md5sum_copy_host($host_toolchain)",
+ ]
+ # TODO(cjhopman): Remove once group datadeps are fixed.
+ deps = datadeps
+}
+
+# GYP: //tools/android/md5sum/md5sum.gyp:md5sum_bin_device (and md5sum_bin_host)
+executable("md5sum_bin") {
+ sources = [
+ "md5sum.cc"
+ ]
+ deps = [
+ "//base"
+ ]
+
+ # TODO(GYP)
+ #'conditions': [
+ #[ 'order_profiling!=0 and OS=="android"', {
+ #'dependencies': [ '../../../tools/cygprofile/cygprofile.gyp:cygprofile', ],
+ #}],
+ #],
+}
+
+if (current_toolchain == default_toolchain) {
+ import("//build/config/android/rules.gni")
+
+ # GYP: //tools/android/md5sum/md5sum.gyp:md5sum_stripped_device_bin
+ create_native_executable_dist("md5sum_prepare_dist") {
+ dist_dir = "$root_build_dir/md5sum_dist"
+ binary = "$root_build_dir/exe.stripped/md5sum_bin"
+ }
+} else {
+ # GYP: //tools/android/md5sum/md5sum.gyp:md5sum_bin_host
+ copy("md5sum_copy_host") {
+ sources = [
+ "$root_out_dir/md5sum_bin"
+ ]
+ outputs = [
+ "$root_build_dir/md5sum_bin_host"
+ ]
+ }
+}
+
diff --git a/tools/android/md5sum/md5sum.cc b/tools/android/md5sum/md5sum.cc
new file mode 100644
index 0000000..07ce2c2
--- /dev/null
+++ b/tools/android/md5sum/md5sum.cc
@@ -0,0 +1,93 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Md5sum implementation for Android. This version handles files as well as
+// directories. Its output is sorted by file path.
+
+#include <fstream>
+#include <iostream>
+#include <set>
+#include <string>
+
+#include "base/files/file_enumerator.h"
+#include "base/files/file_path.h"
+#include "base/files/file_util.h"
+#include "base/logging.h"
+#include "base/md5.h"
+
+namespace {
+
+const int kBufferSize = 1024;
+
+// Returns whether |path|'s MD5 was successfully written to |digest_string|.
+bool MD5Sum(const char* path, std::string* digest_string) {
+ std::ifstream stream(path);
+ if (!stream.good()) {
+ LOG(ERROR) << "Could not open file " << path;
+ return false;
+ }
+ base::MD5Context ctx;
+ base::MD5Init(&ctx);
+ char buf[kBufferSize];
+ while (stream.good()) {
+ std::streamsize bytes_read = stream.readsome(buf, sizeof(buf));
+ if (bytes_read == 0)
+ break;
+ base::MD5Update(&ctx, base::StringPiece(buf, bytes_read));
+ }
+ if (stream.fail()) {
+ LOG(ERROR) << "Error reading file " << path;
+ return false;
+ }
+ base::MD5Digest digest;
+ base::MD5Final(&digest, &ctx);
+ *digest_string = base::MD5DigestToBase16(digest);
+ return true;
+}
+
+// Returns the set of all files contained in |files|. This handles directories
+// by walking them recursively. Excludes, .svn directories and file under them.
+std::set<std::string> MakeFileSet(const char** files) {
+ const std::string svn_dir_component = FILE_PATH_LITERAL("/.svn/");
+ std::set<std::string> file_set;
+ for (const char** file = files; *file; ++file) {
+ base::FilePath file_path(*file);
+ if (base::DirectoryExists(file_path)) {
+ base::FileEnumerator file_enumerator(
+ file_path, true /* recurse */, base::FileEnumerator::FILES);
+ for (base::FilePath child, empty;
+ (child = file_enumerator.Next()) != empty; ) {
+ // If the path contains /.svn/, ignore it.
+ if (child.value().find(svn_dir_component) == std::string::npos) {
+ child = base::MakeAbsoluteFilePath(child);
+ file_set.insert(child.value());
+ }
+ }
+ } else {
+ file_set.insert(*file);
+ }
+ }
+ return file_set;
+}
+
+} // namespace
+
+int main(int argc, const char* argv[]) {
+ if (argc < 2) {
+ LOG(ERROR) << "Usage: md5sum <path/to/file_or_dir>...";
+ return 1;
+ }
+ const std::set<std::string> files = MakeFileSet(argv + 1);
+ bool failed = false;
+ std::string digest;
+ for (std::set<std::string>::const_iterator it = files.begin();
+ it != files.end(); ++it) {
+ if (!MD5Sum(it->c_str(), &digest))
+ failed = true;
+ base::FilePath file_path(*it);
+ std::cout << digest << " "
+ << base::MakeAbsoluteFilePath(file_path).value() << std::endl;
+ }
+ return failed;
+}
diff --git a/tools/android/md5sum/md5sum.gyp b/tools/android/md5sum/md5sum.gyp
new file mode 100644
index 0000000..75d664e
--- /dev/null
+++ b/tools/android/md5sum/md5sum.gyp
@@ -0,0 +1,81 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ # GN: //tools/android/md5sum:md5sum
+ 'target_name': 'md5sum',
+ 'type': 'none',
+ 'dependencies': [
+ 'md5sum_stripped_device_bin',
+ 'md5sum_bin_host#host',
+ ],
+ # For the component build, ensure dependent shared libraries are stripped
+ # and put alongside md5sum to simplify pushing to the device.
+ 'variables': {
+ 'output_dir': '<(PRODUCT_DIR)/md5sum_dist/',
+ 'native_binary': '<(PRODUCT_DIR)/md5sum_bin',
+ },
+ 'includes': ['../../../build/android/native_app_dependencies.gypi'],
+ },
+ {
+ # GN: //tools/android/md5sum:md5sum_bin($default_toolchain)
+ 'target_name': 'md5sum_device_bin',
+ 'type': 'executable',
+ 'dependencies': [
+ '../../../base/base.gyp:base',
+ ],
+ 'include_dirs': [
+ '../../..',
+ ],
+ 'sources': [
+ 'md5sum.cc',
+ ],
+ 'conditions': [
+ [ 'order_profiling!=0 and OS=="android"', {
+ 'dependencies': [ '../../../tools/cygprofile/cygprofile.gyp:cygprofile', ],
+ }],
+ ],
+ },
+ {
+ # GN: //tools/android/md5sum:md5sum_prepare_dist
+ 'target_name': 'md5sum_stripped_device_bin',
+ 'type': 'none',
+ 'dependencies': [
+ 'md5sum_device_bin',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'strip_md5sum_device_bin',
+ 'inputs': ['<(PRODUCT_DIR)/md5sum_device_bin'],
+ 'outputs': ['<(PRODUCT_DIR)/md5sum_bin'],
+ 'action': [
+ '<(android_strip)',
+ '--strip-unneeded',
+ '<@(_inputs)',
+ '-o',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ # Same binary but for the host rather than the device.
+ {
+ # GN: //tools/android/md5sum:md5sum_copy_host($default_toolchain)
+ 'target_name': 'md5sum_bin_host',
+ 'toolsets': ['host'],
+ 'type': 'executable',
+ 'dependencies': [
+ '../../../base/base.gyp:base',
+ ],
+ 'include_dirs': [
+ '../../..',
+ ],
+ 'sources': [
+ 'md5sum.cc',
+ ],
+ },
+ ],
+}
diff --git a/tools/android/memconsumer/java/AndroidManifest.xml b/tools/android/memconsumer/java/AndroidManifest.xml
new file mode 100644
index 0000000..c7f12e4
--- /dev/null
+++ b/tools/android/memconsumer/java/AndroidManifest.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!-- Copyright 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+ -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.chromium.memconsumer" android:versionCode="1"
+ android:versionName="1.0">
+
+ <application
+ android:label="MemConsumer">
+ <activity android:name=".MemConsumer" android:icon="@drawable/icon" android:launchMode="singleTop">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN"/>
+ <category android:name="android.intent.category.LAUNCHER"/>
+ </intent-filter>
+ </activity>
+ <service android:name="ResidentService" android:enabled="true" />
+ </application>
+
+</manifest>
diff --git a/tools/android/memconsumer/java/res/drawable/icon.png b/tools/android/memconsumer/java/res/drawable/icon.png
new file mode 100644
index 0000000..cb10c9b
--- /dev/null
+++ b/tools/android/memconsumer/java/res/drawable/icon.png
Binary files differ
diff --git a/tools/android/memconsumer/java/res/drawable/notification_icon.png b/tools/android/memconsumer/java/res/drawable/notification_icon.png
new file mode 100644
index 0000000..7fc92c3
--- /dev/null
+++ b/tools/android/memconsumer/java/res/drawable/notification_icon.png
Binary files differ
diff --git a/tools/android/memconsumer/java/src/org/chromium/memconsumer/MemConsumer.java b/tools/android/memconsumer/java/src/org/chromium/memconsumer/MemConsumer.java
new file mode 100644
index 0000000..17566f8
--- /dev/null
+++ b/tools/android/memconsumer/java/src/org/chromium/memconsumer/MemConsumer.java
@@ -0,0 +1,107 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.memconsumer;
+
+import android.app.Activity;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.os.Bundle;
+import android.os.IBinder;
+import android.view.Gravity;
+import android.view.KeyEvent;
+import android.view.View;
+import android.widget.EditText;
+import android.widget.NumberPicker;
+import android.widget.TextView;
+
+public class MemConsumer extends Activity {
+ public static final String NOTIFICATION_ACTION =
+ MemConsumer.class.toString() + ".NOTIFICATION";
+
+ private ResidentService mResidentService;
+ private int mMemory = 0;
+ private NumberPicker mMemoryPicker;
+
+ private ServiceConnection mServiceConnection = new ServiceConnection() {
+ @Override
+ public void onServiceConnected(ComponentName name, IBinder binder) {
+ mResidentService = ((ResidentService.ServiceBinder) binder).getService();
+ mResidentService.useMemory(mMemory);
+ }
+
+ @Override
+ public void onServiceDisconnected(ComponentName name) {
+ mResidentService = null;
+ }
+ };
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ mMemoryPicker = new NumberPicker(this);
+ mMemoryPicker.setGravity(Gravity.CENTER);
+ mMemoryPicker.setMaxValue(Integer.MAX_VALUE);
+ mMemoryPicker.setMinValue(0);
+ mMemoryPicker.setOnValueChangedListener(new NumberPicker.OnValueChangeListener() {
+ @Override
+ public void onValueChange(NumberPicker picker, int oldVal, int newVal) {
+ updateMemoryConsumption(picker.getValue());
+ }
+ });
+ for (int i = 0; i < mMemoryPicker.getChildCount(); i++) {
+ View child = mMemoryPicker.getChildAt(i);
+ if (child instanceof EditText) {
+ EditText editText = (EditText) child;
+ editText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
+ @Override
+ public boolean onEditorAction (TextView v, int actionId, KeyEvent event) {
+ if (v.getText().length() > 0) {
+ updateMemoryConsumption(Integer.parseInt(v.getText().toString()));
+ }
+ return false;
+ }
+ });
+ }
+ }
+ setContentView(mMemoryPicker);
+ onNewIntent(getIntent());
+ }
+
+ @Override
+ protected void onNewIntent(Intent intent) {
+ super.onNewIntent(intent);
+ if (intent.getAction() == NOTIFICATION_ACTION) {
+ updateMemoryConsumption(0);
+ return;
+ }
+ if (!intent.hasExtra("memory")) return;
+ updateMemoryConsumption(intent.getIntExtra("memory", 0));
+ }
+
+ void updateMemoryConsumption(int memory) {
+ if (memory == mMemory || memory < 0) return;
+ mMemory = memory;
+ mMemoryPicker.setValue(mMemory);
+ if (mResidentService == null) {
+ if (mMemory > 0) {
+ Intent resident = new Intent();
+ resident.setClass(this, ResidentService.class);
+ startService(resident);
+ bindService(new Intent(this, ResidentService.class),
+ mServiceConnection,
+ Context.BIND_AUTO_CREATE);
+ }
+ } else {
+ mResidentService.useMemory(mMemory);
+ if (mMemory == 0) {
+ unbindService(mServiceConnection);
+ stopService(new Intent(this, ResidentService.class));
+ mResidentService = null;
+ }
+ }
+ }
+}
diff --git a/tools/android/memconsumer/java/src/org/chromium/memconsumer/ResidentService.java b/tools/android/memconsumer/java/src/org/chromium/memconsumer/ResidentService.java
new file mode 100644
index 0000000..e40fbfe
--- /dev/null
+++ b/tools/android/memconsumer/java/src/org/chromium/memconsumer/ResidentService.java
@@ -0,0 +1,62 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.memconsumer;
+
+import android.app.Notification;
+import android.app.PendingIntent;
+import android.app.Service;
+import android.content.Intent;
+import android.os.Binder;
+import android.os.IBinder;
+
+public class ResidentService extends Service {
+ static {
+ // Loading the native library.
+ System.loadLibrary("memconsumer");
+ }
+
+ public class ServiceBinder extends Binder {
+ ResidentService getService() {
+ return ResidentService.this;
+ }
+ }
+
+ private static final int RESIDENT_NOTIFICATION_ID = 1;
+
+ private final IBinder mBinder = new ServiceBinder();
+ private boolean mIsInForeground = false;
+
+ @Override
+ public IBinder onBind(Intent intent) {
+ return mBinder;
+ }
+
+ public void useMemory(long memory) {
+ if (memory > 0) {
+ Intent notificationIntent = new Intent(this, MemConsumer.class);
+ notificationIntent.setAction(MemConsumer.NOTIFICATION_ACTION);
+ PendingIntent pendingIntent =
+ PendingIntent.getActivity(this, 0, notificationIntent, 0);
+ Notification notification =
+ new Notification.Builder(getApplicationContext()).
+ setContentTitle("MC running (" + memory + "Mb)").
+ setSmallIcon(R.drawable.notification_icon).
+ setDeleteIntent(pendingIntent).
+ setContentIntent(pendingIntent).
+ build();
+ startForeground(RESIDENT_NOTIFICATION_ID, notification);
+ mIsInForeground = true;
+ }
+ if (mIsInForeground && memory == 0) {
+ stopForeground(true);
+ mIsInForeground = false;
+ }
+ nativeUseMemory(memory * 1024 * 1024);
+ }
+
+ // Allocate the amount of memory in native code. Otherwise the memory
+ // allocation is limited by the framework.
+ private native void nativeUseMemory(long memory);
+}
diff --git a/tools/android/memconsumer/memconsumer.gyp b/tools/android/memconsumer/memconsumer.gyp
new file mode 100644
index 0000000..f721fc1
--- /dev/null
+++ b/tools/android/memconsumer/memconsumer.gyp
@@ -0,0 +1,39 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'memconsumer',
+ 'type': 'none',
+ 'dependencies': [
+ 'memconsumer_apk',
+ ],
+ },
+ {
+ 'target_name': 'memconsumer_apk',
+ 'type': 'none',
+ 'variables': {
+ 'apk_name': 'MemConsumer',
+ 'java_in_dir': 'java',
+ 'resource_dir': 'java/res',
+ 'native_lib_target': 'libmemconsumer',
+ },
+ 'dependencies': [
+ 'libmemconsumer',
+ ],
+ 'includes': [ '../../../build/java_apk.gypi' ],
+ },
+ {
+ 'target_name': 'libmemconsumer',
+ 'type': 'shared_library',
+ 'sources': [
+ 'memconsumer_hook.cc',
+ ],
+ 'libraries': [
+ '-llog',
+ ],
+ },
+ ],
+}
diff --git a/tools/android/memconsumer/memconsumer_hook.cc b/tools/android/memconsumer/memconsumer_hook.cc
new file mode 100644
index 0000000..9ae0bc1
--- /dev/null
+++ b/tools/android/memconsumer/memconsumer_hook.cc
@@ -0,0 +1,55 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <android/log.h>
+#include <jni.h>
+#include <stdio.h>
+#include <string.h>
+
+extern "C" {
+JNIEXPORT void JNICALL
+ Java_org_chromium_memconsumer_ResidentService_nativeUseMemory(JNIEnv* env,
+ jobject clazz,
+ jlong memory);
+}
+
+namespace {
+
+uint32_t get_random() {
+ static uint32_t m_w = 1;
+ static uint32_t m_z = 1;
+ m_z = 36969 * (m_z & 65535) + (m_z >> 16);
+ m_w = 18000 * (m_w & 65535) + (m_w >> 16);
+ return (m_z << 16) + m_w;
+}
+
+} // namespace
+
+JNIEXPORT void JNICALL
+ Java_org_chromium_memconsumer_ResidentService_nativeUseMemory(
+ JNIEnv* env,
+ jobject clazz,
+ jlong memory) {
+ static uint32_t* g_memory = NULL;
+ if (g_memory)
+ free(g_memory);
+ if (memory == 0) {
+ g_memory = NULL;
+ return;
+ }
+ g_memory = static_cast<uint32_t*>(malloc(memory));
+ if (!g_memory) {
+ __android_log_print(ANDROID_LOG_WARN,
+ "MemConsumer",
+ "Unable to allocate %ld bytes",
+ memory);
+ }
+ // If memory allocation failed, try to allocate as much as possible.
+ while (!g_memory) {
+ memory /= 2;
+ g_memory = static_cast<uint32_t*>(malloc(memory));
+ }
+ for (int i = 0; i < memory / sizeof(uint32_t); ++i)
+ *(g_memory + i) = get_random();
+}
diff --git a/tools/android/memdump/memdump.cc b/tools/android/memdump/memdump.cc
new file mode 100644
index 0000000..cb4b05a
--- /dev/null
+++ b/tools/android/memdump/memdump.cc
@@ -0,0 +1,534 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <fcntl.h>
+#include <signal.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <algorithm>
+#include <cstring>
+#include <fstream>
+#include <iostream>
+#include <limits>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "base/base64.h"
+#include "base/basictypes.h"
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "base/containers/hash_tables.h"
+#include "base/files/file_util.h"
+#include "base/files/scoped_file.h"
+#include "base/format_macros.h"
+#include "base/logging.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_split.h"
+#include "base/strings/stringprintf.h"
+
+const unsigned int kPageSize = getpagesize();
+
+namespace {
+
+class BitSet {
+ public:
+ void resize(size_t nbits) {
+ data_.resize((nbits + 7) / 8);
+ }
+
+ void set(uint32 bit) {
+ const uint32 byte_idx = bit / 8;
+ CHECK(byte_idx < data_.size());
+ data_[byte_idx] |= (1 << (bit & 7));
+ }
+
+ std::string AsB64String() const {
+ // Simple optimization: strip trailing zero bytes from the bitmap.
+ // For instance, if a region has 32 pages but only the first 9 are resident,
+ // The full bitmap would be 0xff 0x01 0x00 0x00, the stripped one 0xff 0x01.
+ // It can save up to some seconds when printing large mmaps, in particular
+ // in presence of large virtual address space reservations (where none of
+ // the pages are resident).
+ size_t end = data_.size();
+ while (end > 0 && data_[end - 1] == '\0')
+ --end;
+ std::string bits(&data_[0], end);
+ std::string b64_string;
+ base::Base64Encode(bits, &b64_string);
+ return b64_string;
+ }
+
+ private:
+ std::vector<char> data_;
+};
+
+// An entry in /proc/<pid>/pagemap.
+struct PageMapEntry {
+ uint64 page_frame_number : 55;
+ uint unused : 8;
+ uint present : 1;
+};
+
+// Describes a memory page.
+struct PageInfo {
+ int64 page_frame_number; // Physical page id, also known as PFN.
+ int64 flags;
+ int32 times_mapped;
+};
+
+struct PageCount {
+ PageCount() : total_count(0), unevictable_count(0) {}
+
+ int total_count;
+ int unevictable_count;
+};
+
+struct MemoryMap {
+ std::string name;
+ std::string flags;
+ uint64 start_address;
+ uint64 end_address;
+ uint64 offset;
+ PageCount private_pages;
+ // app_shared_pages[i] contains the number of pages mapped in i+2 processes
+ // (only among the processes that are being analyzed).
+ std::vector<PageCount> app_shared_pages;
+ PageCount other_shared_pages;
+ std::vector<PageInfo> committed_pages;
+ // committed_pages_bits is a bitset reflecting the present bit for all the
+ // virtual pages of the mapping.
+ BitSet committed_pages_bits;
+};
+
+struct ProcessMemory {
+ pid_t pid;
+ std::vector<MemoryMap> memory_maps;
+};
+
+bool PageIsUnevictable(const PageInfo& page_info) {
+ // These constants are taken from kernel-page-flags.h.
+ const int KPF_DIRTY = 4; // Note that only file-mapped pages can be DIRTY.
+ const int KPF_ANON = 12; // Anonymous pages are dirty per definition.
+ const int KPF_UNEVICTABLE = 18;
+ const int KPF_MLOCKED = 33;
+
+ return (page_info.flags & ((1ll << KPF_DIRTY) |
+ (1ll << KPF_ANON) |
+ (1ll << KPF_UNEVICTABLE) |
+ (1ll << KPF_MLOCKED))) ?
+ true : false;
+}
+
+// Number of times a physical page is mapped in a process.
+typedef base::hash_map<uint64, int> PFNMap;
+
+// Parses lines from /proc/<PID>/maps, e.g.:
+// 401e7000-401f5000 r-xp 00000000 103:02 158 /system/bin/linker
+bool ParseMemoryMapLine(const std::string& line,
+ std::vector<std::string>* tokens,
+ MemoryMap* memory_map) {
+ tokens->clear();
+ base::SplitString(line, ' ', tokens);
+ if (tokens->size() < 2)
+ return false;
+ const std::string& addr_range = tokens->at(0);
+ std::vector<std::string> range_tokens;
+ base::SplitString(addr_range, '-', &range_tokens);
+ const std::string& start_address_token = range_tokens.at(0);
+ if (!base::HexStringToUInt64(start_address_token,
+ &memory_map->start_address)) {
+ return false;
+ }
+ const std::string& end_address_token = range_tokens.at(1);
+ if (!base::HexStringToUInt64(end_address_token, &memory_map->end_address)) {
+ return false;
+ }
+ if (tokens->at(1).size() != strlen("rwxp"))
+ return false;
+ memory_map->flags.swap(tokens->at(1));
+ if (!base::HexStringToUInt64(tokens->at(2), &memory_map->offset))
+ return false;
+ memory_map->committed_pages_bits.resize(
+ (memory_map->end_address - memory_map->start_address) / kPageSize);
+ const int map_name_index = 5;
+ if (tokens->size() >= map_name_index + 1) {
+ for (std::vector<std::string>::const_iterator it =
+ tokens->begin() + map_name_index; it != tokens->end(); ++it) {
+ if (!it->empty()) {
+ if (!memory_map->name.empty())
+ memory_map->name.append(" ");
+ memory_map->name.append(*it);
+ }
+ }
+ }
+ return true;
+}
+
+// Reads sizeof(T) bytes from file |fd| at |offset|.
+template <typename T>
+bool ReadFromFileAtOffset(int fd, off_t offset, T* value) {
+ if (lseek64(fd, offset * sizeof(*value), SEEK_SET) < 0) {
+ PLOG(ERROR) << "lseek";
+ return false;
+ }
+ ssize_t bytes = read(fd, value, sizeof(*value));
+ if (bytes != sizeof(*value) && bytes != 0) {
+ PLOG(ERROR) << "read";
+ return false;
+ }
+ return true;
+}
+
+// Fills |process_maps| in with the process memory maps identified by |pid|.
+bool GetProcessMaps(pid_t pid, std::vector<MemoryMap>* process_maps) {
+ std::ifstream maps_file(base::StringPrintf("/proc/%d/maps", pid).c_str());
+ if (!maps_file.good()) {
+ PLOG(ERROR) << "open";
+ return false;
+ }
+ std::string line;
+ std::vector<std::string> tokens;
+ while (std::getline(maps_file, line) && !line.empty()) {
+ MemoryMap memory_map = {};
+ if (!ParseMemoryMapLine(line, &tokens, &memory_map)) {
+ LOG(ERROR) << "Could not parse line: " << line;
+ return false;
+ }
+ process_maps->push_back(memory_map);
+ }
+ return true;
+}
+
+// Fills |committed_pages| in with the set of committed pages contained in the
+// provided memory map.
+bool GetPagesForMemoryMap(int pagemap_fd,
+ const MemoryMap& memory_map,
+ std::vector<PageInfo>* committed_pages,
+ BitSet* committed_pages_bits) {
+ const off64_t offset = memory_map.start_address / kPageSize;
+ if (lseek64(pagemap_fd, offset * sizeof(PageMapEntry), SEEK_SET) < 0) {
+ PLOG(ERROR) << "lseek";
+ return false;
+ }
+ for (uint64 addr = memory_map.start_address, page_index = 0;
+ addr < memory_map.end_address;
+ addr += kPageSize, ++page_index) {
+ DCHECK_EQ(0, addr % kPageSize);
+ PageMapEntry page_map_entry = {};
+ COMPILE_ASSERT(sizeof(PageMapEntry) == sizeof(uint64), unexpected_size);
+ ssize_t bytes = read(pagemap_fd, &page_map_entry, sizeof(page_map_entry));
+ if (bytes != sizeof(PageMapEntry) && bytes != 0) {
+ PLOG(ERROR) << "read";
+ return false;
+ }
+ if (page_map_entry.present) { // Ignore non-committed pages.
+ if (page_map_entry.page_frame_number == 0)
+ continue;
+ PageInfo page_info = {};
+ page_info.page_frame_number = page_map_entry.page_frame_number;
+ committed_pages->push_back(page_info);
+ committed_pages_bits->set(page_index);
+ }
+ }
+ return true;
+}
+
+// Fills |committed_pages| with mapping count and flags information gathered
+// looking-up /proc/kpagecount and /proc/kpageflags.
+bool SetPagesInfo(int pagecount_fd,
+ int pageflags_fd,
+ std::vector<PageInfo>* pages) {
+ for (std::vector<PageInfo>::iterator it = pages->begin();
+ it != pages->end(); ++it) {
+ PageInfo* const page_info = &*it;
+ int64 times_mapped;
+ if (!ReadFromFileAtOffset(
+ pagecount_fd, page_info->page_frame_number, ×_mapped)) {
+ return false;
+ }
+ DCHECK(times_mapped <= std::numeric_limits<int32_t>::max());
+ page_info->times_mapped = static_cast<int32>(times_mapped);
+
+ int64 page_flags;
+ if (!ReadFromFileAtOffset(
+ pageflags_fd, page_info->page_frame_number, &page_flags)) {
+ return false;
+ }
+ page_info->flags = page_flags;
+ }
+ return true;
+}
+
+// Fills in the provided vector of Page Frame Number maps. This lets
+// ClassifyPages() know how many times each page is mapped in the processes.
+void FillPFNMaps(const std::vector<ProcessMemory>& processes_memory,
+ std::vector<PFNMap>* pfn_maps) {
+ int current_process_index = 0;
+ for (std::vector<ProcessMemory>::const_iterator it = processes_memory.begin();
+ it != processes_memory.end(); ++it, ++current_process_index) {
+ const std::vector<MemoryMap>& memory_maps = it->memory_maps;
+ for (std::vector<MemoryMap>::const_iterator it = memory_maps.begin();
+ it != memory_maps.end(); ++it) {
+ const std::vector<PageInfo>& pages = it->committed_pages;
+ for (std::vector<PageInfo>::const_iterator it = pages.begin();
+ it != pages.end(); ++it) {
+ const PageInfo& page_info = *it;
+ PFNMap* const pfn_map = &(*pfn_maps)[current_process_index];
+ const std::pair<PFNMap::iterator, bool> result = pfn_map->insert(
+ std::make_pair(page_info.page_frame_number, 0));
+ ++result.first->second;
+ }
+ }
+ }
+}
+
+// Sets the private_count/app_shared_pages/other_shared_count fields of the
+// provided memory maps for each process.
+void ClassifyPages(std::vector<ProcessMemory>* processes_memory) {
+ std::vector<PFNMap> pfn_maps(processes_memory->size());
+ FillPFNMaps(*processes_memory, &pfn_maps);
+ // Hash set keeping track of the physical pages mapped in a single process so
+ // that they can be counted only once.
+ base::hash_set<uint64> physical_pages_mapped_in_process;
+
+ for (std::vector<ProcessMemory>::iterator it = processes_memory->begin();
+ it != processes_memory->end(); ++it) {
+ std::vector<MemoryMap>* const memory_maps = &it->memory_maps;
+ physical_pages_mapped_in_process.clear();
+ for (std::vector<MemoryMap>::iterator it = memory_maps->begin();
+ it != memory_maps->end(); ++it) {
+ MemoryMap* const memory_map = &*it;
+ const size_t processes_count = processes_memory->size();
+ memory_map->app_shared_pages.resize(processes_count - 1);
+ const std::vector<PageInfo>& pages = memory_map->committed_pages;
+ for (std::vector<PageInfo>::const_iterator it = pages.begin();
+ it != pages.end(); ++it) {
+ const PageInfo& page_info = *it;
+ if (page_info.times_mapped == 1) {
+ ++memory_map->private_pages.total_count;
+ if (PageIsUnevictable(page_info))
+ ++memory_map->private_pages.unevictable_count;
+ continue;
+ }
+ const uint64 page_frame_number = page_info.page_frame_number;
+ const std::pair<base::hash_set<uint64>::iterator, bool> result =
+ physical_pages_mapped_in_process.insert(page_frame_number);
+ const bool did_insert = result.second;
+ if (!did_insert) {
+ // This physical page (mapped multiple times in the same process) was
+ // already counted.
+ continue;
+ }
+ // See if the current physical page is also mapped in the other
+ // processes that are being analyzed.
+ int times_mapped = 0;
+ int mapped_in_processes_count = 0;
+ for (std::vector<PFNMap>::const_iterator it = pfn_maps.begin();
+ it != pfn_maps.end(); ++it) {
+ const PFNMap& pfn_map = *it;
+ const PFNMap::const_iterator found_it = pfn_map.find(
+ page_frame_number);
+ if (found_it == pfn_map.end())
+ continue;
+ ++mapped_in_processes_count;
+ times_mapped += found_it->second;
+ }
+ PageCount* page_count_to_update = NULL;
+ if (times_mapped == page_info.times_mapped) {
+ // The physical page is only mapped in the processes that are being
+ // analyzed.
+ if (mapped_in_processes_count > 1) {
+ // The physical page is mapped in multiple processes.
+ page_count_to_update =
+ &memory_map->app_shared_pages[mapped_in_processes_count - 2];
+ } else {
+ // The physical page is mapped multiple times in the same process.
+ page_count_to_update = &memory_map->private_pages;
+ }
+ } else {
+ page_count_to_update = &memory_map->other_shared_pages;
+ }
+ ++page_count_to_update->total_count;
+ if (PageIsUnevictable(page_info))
+ ++page_count_to_update->unevictable_count;
+ }
+ }
+ }
+}
+
+void AppendAppSharedField(const std::vector<PageCount>& app_shared_pages,
+ std::string* out) {
+ out->append("[");
+ for (std::vector<PageCount>::const_iterator it = app_shared_pages.begin();
+ it != app_shared_pages.end(); ++it) {
+ out->append(base::IntToString(it->total_count * kPageSize));
+ out->append(":");
+ out->append(base::IntToString(it->unevictable_count * kPageSize));
+ if (it + 1 != app_shared_pages.end())
+ out->append(",");
+ }
+ out->append("]");
+}
+
+void DumpProcessesMemoryMapsInShortFormat(
+ const std::vector<ProcessMemory>& processes_memory) {
+ const int KB_PER_PAGE = kPageSize >> 10;
+ std::vector<int> totals_app_shared(processes_memory.size());
+ std::string buf;
+ std::cout << "pid\tprivate\t\tshared_app\tshared_other (KB)\n";
+ for (std::vector<ProcessMemory>::const_iterator it = processes_memory.begin();
+ it != processes_memory.end(); ++it) {
+ const ProcessMemory& process_memory = *it;
+ std::fill(totals_app_shared.begin(), totals_app_shared.end(), 0);
+ int total_private = 0, total_other_shared = 0;
+ const std::vector<MemoryMap>& memory_maps = process_memory.memory_maps;
+ for (std::vector<MemoryMap>::const_iterator it = memory_maps.begin();
+ it != memory_maps.end(); ++it) {
+ const MemoryMap& memory_map = *it;
+ total_private += memory_map.private_pages.total_count;
+ for (size_t i = 0; i < memory_map.app_shared_pages.size(); ++i)
+ totals_app_shared[i] += memory_map.app_shared_pages[i].total_count;
+ total_other_shared += memory_map.other_shared_pages.total_count;
+ }
+ double total_app_shared = 0;
+ for (size_t i = 0; i < totals_app_shared.size(); ++i)
+ total_app_shared += static_cast<double>(totals_app_shared[i]) / (i + 2);
+ base::SStringPrintf(
+ &buf, "%d\t%d\t\t%d\t\t%d\n",
+ process_memory.pid,
+ total_private * KB_PER_PAGE,
+ static_cast<int>(total_app_shared) * KB_PER_PAGE,
+ total_other_shared * KB_PER_PAGE);
+ std::cout << buf;
+ }
+}
+
+void DumpProcessesMemoryMapsInExtendedFormat(
+ const std::vector<ProcessMemory>& processes_memory) {
+ std::string buf;
+ std::string app_shared_buf;
+ for (std::vector<ProcessMemory>::const_iterator it = processes_memory.begin();
+ it != processes_memory.end(); ++it) {
+ const ProcessMemory& process_memory = *it;
+ std::cout << "[ PID=" << process_memory.pid << "]" << '\n';
+ const std::vector<MemoryMap>& memory_maps = process_memory.memory_maps;
+ for (std::vector<MemoryMap>::const_iterator it = memory_maps.begin();
+ it != memory_maps.end(); ++it) {
+ const MemoryMap& memory_map = *it;
+ app_shared_buf.clear();
+ AppendAppSharedField(memory_map.app_shared_pages, &app_shared_buf);
+ base::SStringPrintf(
+ &buf,
+ "%"PRIx64"-%"PRIx64" %s %"PRIx64" private_unevictable=%d private=%d "
+ "shared_app=%s shared_other_unevictable=%d shared_other=%d "
+ "\"%s\" [%s]\n",
+ memory_map.start_address,
+ memory_map.end_address,
+ memory_map.flags.c_str(),
+ memory_map.offset,
+ memory_map.private_pages.unevictable_count * kPageSize,
+ memory_map.private_pages.total_count * kPageSize,
+ app_shared_buf.c_str(),
+ memory_map.other_shared_pages.unevictable_count * kPageSize,
+ memory_map.other_shared_pages.total_count * kPageSize,
+ memory_map.name.c_str(),
+ memory_map.committed_pages_bits.AsB64String().c_str());
+ std::cout << buf;
+ }
+ }
+}
+
+bool CollectProcessMemoryInformation(int page_count_fd,
+ int page_flags_fd,
+ ProcessMemory* process_memory) {
+ const pid_t pid = process_memory->pid;
+ base::ScopedFD pagemap_fd(HANDLE_EINTR(open(
+ base::StringPrintf("/proc/%d/pagemap", pid).c_str(), O_RDONLY)));
+ if (!pagemap_fd.is_valid()) {
+ PLOG(ERROR) << "open";
+ return false;
+ }
+ std::vector<MemoryMap>* const process_maps = &process_memory->memory_maps;
+ if (!GetProcessMaps(pid, process_maps))
+ return false;
+ for (std::vector<MemoryMap>::iterator it = process_maps->begin();
+ it != process_maps->end(); ++it) {
+ std::vector<PageInfo>* const committed_pages = &it->committed_pages;
+ BitSet* const pages_bits = &it->committed_pages_bits;
+ GetPagesForMemoryMap(pagemap_fd.get(), *it, committed_pages, pages_bits);
+ SetPagesInfo(page_count_fd, page_flags_fd, committed_pages);
+ }
+ return true;
+}
+
+void KillAll(const std::vector<pid_t>& pids, int signal_number) {
+ for (std::vector<pid_t>::const_iterator it = pids.begin(); it != pids.end();
+ ++it) {
+ kill(*it, signal_number);
+ }
+}
+
+void ExitWithUsage() {
+ LOG(ERROR) << "Usage: memdump [-a] <PID1>... <PIDN>";
+ exit(EXIT_FAILURE);
+}
+
+} // namespace
+
+int main(int argc, char** argv) {
+ if (argc == 1)
+ ExitWithUsage();
+ const bool short_output = !strncmp(argv[1], "-a", 2);
+ if (short_output) {
+ if (argc == 2)
+ ExitWithUsage();
+ ++argv;
+ }
+ std::vector<pid_t> pids;
+ for (const char* const* ptr = argv + 1; *ptr; ++ptr) {
+ pid_t pid;
+ if (!base::StringToInt(*ptr, &pid))
+ return EXIT_FAILURE;
+ pids.push_back(pid);
+ }
+
+ std::vector<ProcessMemory> processes_memory(pids.size());
+ {
+ base::ScopedFD page_count_fd(
+ HANDLE_EINTR(open("/proc/kpagecount", O_RDONLY)));
+ if (!page_count_fd.is_valid()) {
+ PLOG(ERROR) << "open /proc/kpagecount";
+ return EXIT_FAILURE;
+ }
+
+ base::ScopedFD page_flags_fd(open("/proc/kpageflags", O_RDONLY));
+ if (!page_flags_fd.is_valid()) {
+ PLOG(ERROR) << "open /proc/kpageflags";
+ return EXIT_FAILURE;
+ }
+
+ base::ScopedClosureRunner auto_resume_processes(
+ base::Bind(&KillAll, pids, SIGCONT));
+ KillAll(pids, SIGSTOP);
+ for (std::vector<pid_t>::const_iterator it = pids.begin(); it != pids.end();
+ ++it) {
+ ProcessMemory* const process_memory =
+ &processes_memory[it - pids.begin()];
+ process_memory->pid = *it;
+ if (!CollectProcessMemoryInformation(
+ page_count_fd.get(), page_flags_fd.get(), process_memory)) {
+ return EXIT_FAILURE;
+ }
+ }
+ }
+
+ ClassifyPages(&processes_memory);
+ if (short_output)
+ DumpProcessesMemoryMapsInShortFormat(processes_memory);
+ else
+ DumpProcessesMemoryMapsInExtendedFormat(processes_memory);
+ return EXIT_SUCCESS;
+}
diff --git a/tools/android/memdump/memdump.gyp b/tools/android/memdump/memdump.gyp
new file mode 100644
index 0000000..f47cedf
--- /dev/null
+++ b/tools/android/memdump/memdump.gyp
@@ -0,0 +1,39 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'memdump-unstripped',
+ 'type': 'executable',
+ 'dependencies': [
+ '../../../base/base.gyp:base',
+ ],
+ 'sources': [
+ 'memdump.cc',
+ ],
+ },
+ {
+ 'target_name': 'memdump',
+ 'type': 'none',
+ 'dependencies': [
+ 'memdump-unstripped',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'strip_memdump',
+ 'inputs': ['<(PRODUCT_DIR)/memdump-unstripped'],
+ 'outputs': ['<(PRODUCT_DIR)/memdump'],
+ 'action': [
+ '<(android_strip)',
+ '--strip-unneeded',
+ '<@(_inputs)',
+ '-o',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/tools/android/memdump/memsymbols.py b/tools/android/memdump/memsymbols.py
new file mode 100755
index 0000000..3721963
--- /dev/null
+++ b/tools/android/memdump/memsymbols.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import os
+import sys
+import re
+
+from optparse import OptionParser
+
+"""Extracts the list of resident symbols of a library loaded in a process.
+
+This scripts combines the extended output of memdump for a given process
+(obtained through memdump -x PID) and the symbol table of a .so loaded in that
+process (obtained through nm -C lib-with-symbols.so), filtering out only those
+symbols that, at the time of the snapshot, were resident in memory (that are,
+the symbols which start address belongs to a mapped page of the .so which was
+resident at the time of the snapshot).
+The aim is to perform a "code coverage"-like profiling of a binary, intersecting
+run-time information (list of resident pages) and debug symbols.
+"""
+
+_PAGE_SIZE = 4096
+
+
+def _TestBit(word, bit):
+ assert(bit >= 0 and bit < 8)
+ return not not ((word >> bit) & 1)
+
+
+def _HexAddr(addr):
+ return hex(addr)[2:].zfill(8)
+
+
+def _GetResidentPagesSet(memdump_contents, lib_name, verbose):
+ """Parses the memdump output and extracts the resident page set for lib_name.
+ Args:
+ memdump_contents: Array of strings (lines) of a memdump output.
+ lib_name: A string containing the name of the library.so to be matched.
+ verbose: Print a verbose header for each mapping matched.
+
+ Returns:
+ A set of resident pages (the key is the page index) for all the
+ mappings matching .*lib_name.
+ """
+ resident_pages = set()
+ MAP_RX = re.compile(
+ r'^([0-9a-f]+)-([0-9a-f]+) ([\w-]+) ([0-9a-f]+) .* "(.*)" \[(.*)\]$')
+ for line in memdump_contents:
+ line = line.rstrip('\r\n')
+ if line.startswith('[ PID'):
+ continue
+
+ r = MAP_RX.match(line)
+ if not r:
+ sys.stderr.write('Skipping %s from %s\n' % (line, memdump_file))
+ continue
+
+ map_start = int(r.group(1), 16)
+ map_end = int(r.group(2), 16)
+ prot = r.group(3)
+ offset = int(r.group(4), 16)
+ assert(offset % _PAGE_SIZE == 0)
+ lib = r.group(5)
+ enc_bitmap = r.group(6)
+
+ if not lib.endswith(lib_name):
+ continue
+
+ bitmap = base64.b64decode(enc_bitmap)
+ map_pages_count = (map_end - map_start + 1) / _PAGE_SIZE
+ bitmap_pages_count = len(bitmap) * 8
+
+ if verbose:
+ print 'Found %s: mapped %d pages in mode %s @ offset %s.' % (
+ lib, map_pages_count, prot, _HexAddr(offset))
+ print ' Map range in the process VA: [%s - %s]. Len: %s' % (
+ _HexAddr(map_start),
+ _HexAddr(map_end),
+ _HexAddr(map_pages_count * _PAGE_SIZE))
+ print ' Corresponding addresses in the binary: [%s - %s]. Len: %s' % (
+ _HexAddr(offset),
+ _HexAddr(offset + map_end - map_start),
+ _HexAddr(map_pages_count * _PAGE_SIZE))
+ print ' Bitmap: %d pages' % bitmap_pages_count
+ print ''
+
+ assert(bitmap_pages_count >= map_pages_count)
+ for i in xrange(map_pages_count):
+ bitmap_idx = i / 8
+ bitmap_off = i % 8
+ if (bitmap_idx < len(bitmap) and
+ _TestBit(ord(bitmap[bitmap_idx]), bitmap_off)):
+ resident_pages.add(offset / _PAGE_SIZE + i)
+ return resident_pages
+
+
+def main(argv):
+ NM_RX = re.compile(r'^([0-9a-f]+)\s+.*$')
+
+ parser = OptionParser()
+ parser.add_option("-r", "--reverse",
+ action="store_true", dest="reverse", default=False,
+ help="Print out non present symbols")
+ parser.add_option("-v", "--verbose",
+ action="store_true", dest="verbose", default=False,
+ help="Print out verbose debug information.")
+
+ (options, args) = parser.parse_args()
+
+ if len(args) != 3:
+ print 'Usage: %s [-v] memdump.file nm.file library.so' % (
+ os.path.basename(argv[0]))
+ return 1
+
+ memdump_file = args[0]
+ nm_file = args[1]
+ lib_name = args[2]
+
+ if memdump_file == '-':
+ memdump_contents = sys.stdin.readlines()
+ else:
+ memdump_contents = open(memdump_file, 'r').readlines()
+ resident_pages = _GetResidentPagesSet(memdump_contents,
+ lib_name,
+ options.verbose)
+
+ # Process the nm symbol table, filtering out the resident symbols.
+ nm_fh = open(nm_file, 'r')
+ for line in nm_fh:
+ line = line.rstrip('\r\n')
+ # Skip undefined symbols (lines with no address).
+ if line.startswith(' '):
+ continue
+
+ r = NM_RX.match(line)
+ if not r:
+ sys.stderr.write('Skipping %s from %s\n' % (line, nm_file))
+ continue
+
+ sym_addr = int(r.group(1), 16)
+ sym_page = sym_addr / _PAGE_SIZE
+ last_sym_matched = (sym_page in resident_pages)
+ if (sym_page in resident_pages) != options.reverse:
+ print line
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/tools/android/mempressure.py b/tools/android/mempressure.py
new file mode 100755
index 0000000..fa3daba
--- /dev/null
+++ b/tools/android/mempressure.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import logging
+import optparse
+import os
+import sys
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__),
+ os.pardir,
+ os.pardir,
+ 'build',
+ 'android')
+sys.path.append(BUILD_ANDROID_DIR)
+from pylib import android_commands
+from pylib import constants
+from pylib import flag_changer
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+# Browser Constants
+DEFAULT_BROWSER = 'chrome'
+
+# Action Constants
+ACTION_PACKAGE = 'org.chromium.base'
+ACTION_TRIM = {
+ 'moderate' : ACTION_PACKAGE + '.ACTION_TRIM_MEMORY_MODERATE',
+ 'critical' : ACTION_PACKAGE + '.ACTION_TRIM_MEMORY_RUNNING_CRITICAL',
+ 'complete' : ACTION_PACKAGE + '.ACTION_TRIM_MEMORY'
+}
+ACTION_LOW = ACTION_PACKAGE + '.ACTION_LOW_MEMORY'
+
+# Command Line Constants
+ENABLE_TEST_INTENTS_FLAG = '--enable-test-intents'
+
+def main(argv):
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('-l',
+ '--low',
+ help='Simulate Activity#onLowMemory()',
+ action='store_true')
+ option_parser.add_option('-t',
+ '--trim',
+ help=('Simulate Activity#onTrimMemory(...) with ' +
+ ', '.join(ACTION_TRIM.keys())),
+ type='string')
+ option_parser.add_option('-b',
+ '--browser',
+ default=DEFAULT_BROWSER,
+ help=('Which browser to use. One of ' +
+ ', '.join(constants.PACKAGE_INFO.keys()) +
+ ' [default: %default]'),
+ type='string')
+
+ (options, args) = option_parser.parse_args(argv)
+
+ if len(args) > 1:
+ print 'Unknown argument: ', args[1:]
+ option_parser.print_help()
+ sys.exit(1)
+
+ if options.low and options.trim:
+ option_parser.error('options --low and --trim are mutually exclusive')
+
+ if not options.low and not options.trim:
+ option_parser.print_help()
+ sys.exit(1)
+
+ action = None
+ if options.low:
+ action = ACTION_LOW
+ elif options.trim in ACTION_TRIM.keys():
+ action = ACTION_TRIM[options.trim]
+
+ if action is None:
+ option_parser.print_help()
+ sys.exit(1)
+
+ if not options.browser in constants.PACKAGE_INFO.keys():
+ option_parser.error('Unknown browser option ' + options.browser)
+
+ package_info = constants.PACKAGE_INFO[options.browser]
+
+ package = package_info.package
+ activity = package_info.activity
+
+ devices = android_commands.GetAttachedDevices()
+ if not devices:
+ raise device_errors.NoDevicesError()
+ elif len(devices) > 1:
+ logging.warning('Multiple devices attached. Using %s.' % devices[0])
+ device = device_utils.DeviceUtils(devices[0])
+
+ try:
+ device.EnableRoot()
+ except device_errors.CommandFailedError as e:
+ # Try to change the flags and start the activity anyway.
+ # TODO(jbudorick) Handle this exception appropriately after interface
+ # conversions are finished.
+ logging.error(str(e))
+ flags = flag_changer.FlagChanger(device, package_info.cmdline_file)
+ if ENABLE_TEST_INTENTS_FLAG not in flags.Get():
+ flags.AddFlags([ENABLE_TEST_INTENTS_FLAG])
+
+ device.StartActivity(intent.Intent(package=package, activity=activity,
+ action=action))
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/tools/android/ps_ext/ps_ext.c b/tools/android/ps_ext/ps_ext.c
new file mode 100644
index 0000000..06cf7bc
--- /dev/null
+++ b/tools/android/ps_ext/ps_ext.c
@@ -0,0 +1,192 @@
+/*
+ * Copyright 2014 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <ctype.h>
+#include <dirent.h>
+#include <stdbool.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+#include <unistd.h>
+
+/*
+ * This tool is essentially an extended version of ps with JSON output.
+ * Its output is meant consumed by scripts / tools for gathering OS/ps stats.
+ * Output units:
+ * All times are expressed in ticks.
+ * All memory counters are expressed in Kb.
+ */
+
+static void dump_time(void) {
+ float uptime_secs = 0.0F;
+ const long rate = sysconf(_SC_CLK_TCK);
+ FILE *f = fopen("/proc/uptime", "r");
+ if (!f)
+ return;
+ fscanf(f, "%f", &uptime_secs);
+ fclose(f);
+ const long ticks = (long) (rate * uptime_secs);
+ printf(" \"time\": { \"ticks\": %ld, \"rate\": %ld}", ticks, rate);
+}
+
+static void dump_cpu_stats(void) {
+ FILE *f = fopen("/proc/stat", "r");
+ if (!f)
+ return;
+ printf(" \"cpu\":\n [\n");
+
+ bool terminate_prev_line = false;
+ while (!feof(f)) {
+ char line[256];
+ char cpu[8];
+ long unsigned t_usr = 0;
+ long unsigned t_nice = 0;
+ long unsigned t_sys = 0;
+ long unsigned t_idle = 0;
+ fgets(line, sizeof(line), f);
+
+ /* Skip the total 'cpu ' line and the other irrelevant ones. */
+ if (strncmp(line, "cpu", 3) != 0 || line[3] == ' ')
+ continue;
+ if (sscanf(line, "%s %lu %lu %lu %lu",
+ cpu, &t_usr, &t_nice, &t_sys, &t_idle) != 5) {
+ continue;
+ }
+
+ if (terminate_prev_line)
+ printf(",\n");
+ terminate_prev_line = true;
+ printf(" {\"usr\": %lu, \"sys\": %lu, \"idle\": %lu}",
+ t_usr + t_nice, t_sys, t_idle);
+ }
+ fclose(f);
+ printf("\n ]");
+}
+
+static void dump_mem_stats(void) {
+ FILE *f = fopen("/proc/meminfo", "r");
+ if (!f)
+ return;
+ printf(" \"mem\":\n {\n");
+
+ bool terminate_prev_line = false;
+ while (!feof(f)) {
+ char line[256];
+ char key[32];
+ long value = 0;
+
+ fgets(line, sizeof(line), f);
+ if (sscanf(line, "%s %lu %*s", key, &value) < 2)
+ continue;
+
+ if (terminate_prev_line)
+ printf(",\n");
+ terminate_prev_line = true;
+ printf(" \"%s\": %lu", key, value);
+ }
+ fclose(f);
+ printf("\n }");
+}
+
+static void dump_proc_stats(void) {
+ struct dirent *de;
+ DIR *d = opendir("/proc");
+ if (!d)
+ return;
+
+ const long kb_per_page = sysconf(_SC_PAGESIZE) / 1024;
+ bool terminate_prev_line = false;
+ printf(" \"processes\":\n {\n");
+ while ((de = readdir(d))) {
+ if (!isdigit(de->d_name[0]))
+ continue;
+ const int pid = atoi(de->d_name);
+
+ /* Don't print out ourselves (how civilized). */
+ if (pid == getpid())
+ continue;
+
+ char cmdline[64];
+ char fpath[32];
+ FILE *f;
+
+ /* Read full process path / package from cmdline. */
+ sprintf(fpath, "/proc/%d/cmdline", pid);
+ f = fopen(fpath, "r");
+ if (!f)
+ continue;
+ cmdline[0] = '\0';
+ fgets(cmdline, sizeof(cmdline), f);
+ fclose(f);
+
+ /* Read cpu/io/mem stats. */
+ char proc_name[256];
+ long num_threads = 0;
+ long unsigned min_faults = 0;
+ long unsigned maj_faults = 0;
+ long unsigned utime = 0;
+ long unsigned ktime = 0;
+ long unsigned vm_rss = 0;
+ long long unsigned start_time = 0;
+
+ sprintf(fpath, "/proc/%d/stat", pid);
+ f = fopen(fpath, "r");
+ if (!f)
+ continue;
+ fscanf(f, "%*d %s %*c %*d %*d %*d %*d %*d %*u %lu %*u %lu %*u %lu %lu "
+ "%*d %*d %*d %*d %ld %*d %llu %*u %ld", proc_name, &min_faults,
+ &maj_faults, &utime, &ktime, &num_threads, &start_time, &vm_rss);
+ fclose(f);
+
+ /* Prefer the cmdline when available, since it contains the package name. */
+ char const * const cmd = (strlen(cmdline) > 0) ? cmdline : proc_name;
+
+ if (terminate_prev_line)
+ printf(",\n");
+ terminate_prev_line = true;
+ printf(" \"%d\": {"
+ "\"name\": \"%s\", "
+ "\"n_threads\": %ld, "
+ "\"start_time\": %llu, "
+ "\"user_time\": %lu, "
+ "\"sys_time\": %lu, "
+ "\"min_faults\": %lu, "
+ "\"maj_faults\": %lu, "
+ "\"vm_rss\": %lu"
+ "}",
+ pid,
+ cmd,
+ num_threads,
+ start_time,
+ utime,
+ ktime,
+ min_faults,
+ maj_faults,
+ vm_rss * kb_per_page);
+ }
+ closedir(d);
+ printf("\n }");
+}
+
+int main()
+{
+ printf("{\n");
+
+ dump_time();
+ printf(",\n");
+
+ dump_mem_stats();
+ printf(",\n");
+
+ dump_cpu_stats();
+ printf(",\n");
+
+ dump_proc_stats();
+ printf("\n}\n");
+
+ return 0;
+}
diff --git a/tools/android/ps_ext/ps_ext.gyp b/tools/android/ps_ext/ps_ext.gyp
new file mode 100644
index 0000000..f467d93
--- /dev/null
+++ b/tools/android/ps_ext/ps_ext.gyp
@@ -0,0 +1,36 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'ps_ext-unstripped',
+ 'type': 'executable',
+ 'sources': [
+ 'ps_ext.c',
+ ],
+ },
+ {
+ 'target_name': 'ps_ext',
+ 'type': 'none',
+ 'dependencies': [
+ 'ps_ext-unstripped',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'strip_ps_ext',
+ 'inputs': ['<(PRODUCT_DIR)/ps_ext-unstripped'],
+ 'outputs': ['<(PRODUCT_DIR)/ps_ext'],
+ 'action': [
+ '<(android_strip)',
+ '--strip-unneeded',
+ '<@(_inputs)',
+ '-o',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/tools/android/purge_ashmem/purge_ashmem.c b/tools/android/purge_ashmem/purge_ashmem.c
new file mode 100644
index 0000000..0de582d
--- /dev/null
+++ b/tools/android/purge_ashmem/purge_ashmem.c
@@ -0,0 +1,20 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#include "third_party/ashmem/ashmem.h"
+
+int main(void) {
+ const int pages_purged = ashmem_purge_all();
+ if (pages_purged < 0) {
+ perror("ashmem_purge_all");
+ return EXIT_FAILURE;
+ }
+ printf("Purged %d pages (%d KBytes)\n",
+ pages_purged, pages_purged * getpagesize() / 1024);
+ return EXIT_SUCCESS;
+}
diff --git a/tools/android/purge_ashmem/purge_ashmem.gyp b/tools/android/purge_ashmem/purge_ashmem.gyp
new file mode 100644
index 0000000..d563b1f
--- /dev/null
+++ b/tools/android/purge_ashmem/purge_ashmem.gyp
@@ -0,0 +1,21 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'purge_ashmem',
+ 'type': 'executable',
+ 'dependencies': [
+ '../../../third_party/ashmem/ashmem.gyp:ashmem',
+ ],
+ 'include_dirs': [
+ '../../../',
+ ],
+ 'sources': [
+ 'purge_ashmem.c',
+ ],
+ },
+ ],
+}
diff --git a/tools/android/remove_strings.py b/tools/android/remove_strings.py
new file mode 100755
index 0000000..b8c4807
--- /dev/null
+++ b/tools/android/remove_strings.py
@@ -0,0 +1,49 @@
+#!/usr/bin/python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Remove strings by name from a GRD file."""
+
+import optparse
+import re
+import sys
+
+
+def RemoveStrings(grd_path, string_names):
+ """Removes strings with the given names from a GRD file. Overwrites the file.
+
+ Args:
+ grd_path: path to the GRD file.
+ string_names: a list of string names to be removed.
+ """
+ with open(grd_path, 'r') as f:
+ grd = f.read()
+ names_pattern = '|'.join(map(re.escape, string_names))
+ pattern = r'<message [^>]*name="(%s)".*?</message>\s*' % names_pattern
+ grd = re.sub(pattern, '', grd, flags=re.DOTALL)
+ with open(grd_path, 'w') as f:
+ f.write(grd)
+
+
+def ParseArgs(args):
+ usage = 'usage: %prog GRD_PATH...'
+ parser = optparse.OptionParser(
+ usage=usage, description='Remove strings from GRD files. Reads string '
+ 'names from stdin, and removes strings with those names from the listed '
+ 'GRD files.')
+ options, args = parser.parse_args(args=args)
+ if not args:
+ parser.error('must provide GRD_PATH argument(s)')
+ return args
+
+
+def main(args=None):
+ grd_paths = ParseArgs(args)
+ strings_to_remove = filter(None, map(str.strip, sys.stdin.readlines()))
+ for grd_path in grd_paths:
+ RemoveStrings(grd_path, strings_to_remove)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/android/run_pie/run_pie.c b/tools/android/run_pie/run_pie.c
new file mode 100644
index 0000000..ee1a622
--- /dev/null
+++ b/tools/android/run_pie/run_pie.c
@@ -0,0 +1,68 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <dlfcn.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/prctl.h>
+#include <unistd.h>
+
+// This is a wrapper to run position independent executables on Android ICS,
+// where the linker doesn't support PIE. This requires the PIE binaries to be
+// built with CFLAGS +=-fvisibility=default -fPIE, and LDFLAGS += -rdynamic -pie
+// such that the main() symbol remains exported and can be dlsym-ed.
+
+#define ERR_PREFIX "[PIE Loader] "
+
+typedef int (*main_t)(int, char**);
+
+
+int main(int argc, char** argv) {
+ if (argc < 2) {
+ printf("Usage: %s path_to_pie_executable [args]\n", argv[0]);
+ return -1;
+ }
+
+ // Shift left the argv[]. argv is what /proc/PID/cmdline prints out. In turn
+ // cmdline is what Android "ps" prints out. In turn "ps" is what many scripts
+ // look for to decide which processes to kill / killall.
+ int i;
+ char* next_argv_start = argv[0];
+ for (i = 1; i < argc; ++i) {
+ const size_t argv_len = strlen(argv[i]) + 1;
+ memcpy(argv[i - 1], argv[i], argv_len);
+ next_argv_start += argv_len;
+ argv[i] = next_argv_start;
+ }
+ argv[argc - 1] = NULL; // The last argv must be a NULL ptr.
+
+ // Set also the proc name accordingly (/proc/PID/comm).
+ prctl(PR_SET_NAME, (long) argv[0]);
+
+ // dlopen should not fail, unless:
+ // - The target binary does not exists:
+ // - The dependent .so libs cannot be loaded.
+ // In both cases, just bail out with an explicit error message.
+ void* handle = dlopen(argv[0], RTLD_NOW);
+ if (handle == NULL) {
+ printf(ERR_PREFIX "dlopen() failed: %s.\n", dlerror());
+ return -1;
+ }
+
+ main_t pie_main = (main_t) dlsym(handle, "main");
+ if (pie_main) {
+ return pie_main(argc - 1, argv);
+ }
+
+ // If we reached this point dlsym failed, very likely because the target
+ // binary has not been compiled with the proper CFLAGS / LDFLAGS.
+ // At this point the most sensible thing to do is running that normally
+ // via exec and hope that the target binary wasn't a PIE.
+ execv(argv[0], argv);
+
+ // exevc is supposed to never return, unless it fails.
+ printf(ERR_PREFIX "Both dlsym() and the execv() fallback failed.\n");
+ perror("execv");
+ return -1;
+}
diff --git a/tools/android/run_pie/run_pie.gyp b/tools/android/run_pie/run_pie.gyp
new file mode 100644
index 0000000..b713dc4
--- /dev/null
+++ b/tools/android/run_pie/run_pie.gyp
@@ -0,0 +1,49 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'run_pie-unstripped',
+ 'type': 'executable',
+ 'sources': [
+ 'run_pie.c',
+ ],
+ # See crbug.com/373219. This is the only Android executable which must be
+ # non PIE.
+ 'cflags!': [
+ '-fPIE',
+ ],
+ 'ldflags!': [
+ '-pie',
+ ],
+ # Don't inherit unneeded dependencies on stlport.so, so the binary remains
+ # self-contained also in component=shared_library builds.
+ 'libraries!': [
+ '-l<(android_stlport_library)',
+ ],
+ },
+ {
+ 'target_name': 'run_pie',
+ 'type': 'none',
+ 'dependencies': [
+ 'run_pie-unstripped',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'strip_run_pie',
+ 'inputs': ['<(PRODUCT_DIR)/run_pie-unstripped'],
+ 'outputs': ['<(PRODUCT_DIR)/run_pie'],
+ 'action': [
+ '<(android_strip)',
+ '--strip-unneeded',
+ '<@(_inputs)',
+ '-o',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ ],
+}
diff --git a/tools/clang/CMakeLists.txt b/tools/clang/CMakeLists.txt
new file mode 100644
index 0000000..1b16704
--- /dev/null
+++ b/tools/clang/CMakeLists.txt
@@ -0,0 +1,60 @@
+cmake_minimum_required(VERSION 2.8.8)
+project(ChromeExtras)
+enable_testing()
+
+list(APPEND CMAKE_MODULE_PATH "${LLVM_BUILD_DIR}/share/llvm/cmake")
+
+# These tools are built using LLVM's build system, not Chromium's.
+# It expects LLVM_SRC_DIR and LLVM_BUILD_DIR to be set.
+# For example:
+#
+# cmake -GNinja \
+# -DLLVM_BUILD_DIR=$CHROMIUM_SRC_DIR/third_party/llvm-build/Release+Asserts \
+# -DLLVM_SRC_DIR=$CHROMIUM_SRC_DIR/third_party/llvm \
+# -DCHROMIUM_TOOLS=blink_gc_plugin;plugin \
+# $CHROMIUM_SRC_DIR/tools/clang/
+# ninja
+
+
+include(LLVMConfig)
+include(AddLLVM)
+include(HandleLLVMOptions)
+
+# Use rpath to find the bundled standard C++ library.
+set(CMAKE_BUILD_WITH_INSTALL_RPATH ON)
+if (APPLE)
+ set(CMAKE_INSTALL_NAME_DIR "@rpath")
+ set(CMAKE_INSTALL_RPATH "@executable_path/../lib")
+else(UNIX)
+ set(CMAKE_INSTALL_RPATH "\$ORIGIN/../lib")
+endif()
+
+set(LLVM_RUNTIME_OUTPUT_INTDIR "${CMAKE_BINARY_DIR}/${CMAKE_CFG_INTDIR}/bin")
+set(LLVM_LIBRARY_OUTPUT_INTDIR "${CMAKE_BINARY_DIR}/${CMAKE_CFG_INTDIR}/lib")
+
+include_directories("${LLVM_SRC_DIR}/include"
+ "${LLVM_SRC_DIR}/tools/clang/include"
+ "${LLVM_BUILD_DIR}/include"
+ "${LLVM_BUILD_DIR}/tools/clang/include")
+
+link_directories("${LLVM_SRC_DIR}/lib"
+ "${LLVM_SRC_DIR}/tools/clang/lib"
+ "${LLVM_BUILD_DIR}/lib"
+ "${LLVM_BUILD_DIR}/tools/clang/lib")
+
+# cr_add_test(
+# name
+# testprog
+# arguments...
+# )
+function(cr_add_test name testprog)
+ add_test(NAME ${name} COMMAND ${testprog} ${ARGN})
+ add_dependencies(check-all ${name})
+endfunction(cr_add_test)
+
+# Tests for all enabled tools can be run by building this target.
+add_custom_target(check-all COMMAND ${CMAKE_CTEST_COMMAND} -V)
+
+foreach(tool ${CHROMIUM_TOOLS})
+ add_subdirectory(${tool})
+endforeach(tool)
diff --git a/tools/clang/OWNERS b/tools/clang/OWNERS
new file mode 100644
index 0000000..d86ef94
--- /dev/null
+++ b/tools/clang/OWNERS
@@ -0,0 +1,2 @@
+hans@chromium.org
+thakis@chromium.org
diff --git a/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp b/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp
new file mode 100644
index 0000000..8658b04
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp
@@ -0,0 +1,1884 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This clang plugin checks various invariants of the Blink garbage
+// collection infrastructure.
+//
+// Errors are described at:
+// http://www.chromium.org/developers/blink-gc-plugin-errors
+
+#include "Config.h"
+#include "JsonWriter.h"
+#include "RecordInfo.h"
+
+#include "clang/AST/AST.h"
+#include "clang/AST/ASTConsumer.h"
+#include "clang/AST/RecursiveASTVisitor.h"
+#include "clang/Frontend/CompilerInstance.h"
+#include "clang/Frontend/FrontendPluginRegistry.h"
+
+using namespace clang;
+using std::string;
+
+namespace {
+
+const char kClassMustLeftMostlyDeriveGC[] =
+ "[blink-gc] Class %0 must derive its GC base in the left-most position.";
+
+const char kClassRequiresTraceMethod[] =
+ "[blink-gc] Class %0 requires a trace method.";
+
+const char kBaseRequiresTracing[] =
+ "[blink-gc] Base class %0 of derived class %1 requires tracing.";
+
+const char kBaseRequiresTracingNote[] =
+ "[blink-gc] Untraced base class %0 declared here:";
+
+const char kFieldsRequireTracing[] =
+ "[blink-gc] Class %0 has untraced fields that require tracing.";
+
+const char kFieldRequiresTracingNote[] =
+ "[blink-gc] Untraced field %0 declared here:";
+
+const char kClassContainsInvalidFields[] =
+ "[blink-gc] Class %0 contains invalid fields.";
+
+const char kClassContainsGCRoot[] =
+ "[blink-gc] Class %0 contains GC root in field %1.";
+
+const char kClassRequiresFinalization[] =
+ "[blink-gc] Class %0 requires finalization.";
+
+const char kClassDoesNotRequireFinalization[] =
+ "[blink-gc] Class %0 may not require finalization.";
+
+const char kFinalizerAccessesFinalizedField[] =
+ "[blink-gc] Finalizer %0 accesses potentially finalized field %1.";
+
+const char kRawPtrToGCManagedClassNote[] =
+ "[blink-gc] Raw pointer field %0 to a GC managed class declared here:";
+
+const char kRefPtrToGCManagedClassNote[] =
+ "[blink-gc] RefPtr field %0 to a GC managed class declared here:";
+
+const char kOwnPtrToGCManagedClassNote[] =
+ "[blink-gc] OwnPtr field %0 to a GC managed class declared here:";
+
+const char kStackAllocatedFieldNote[] =
+ "[blink-gc] Stack-allocated field %0 declared here:";
+
+const char kMemberInUnmanagedClassNote[] =
+ "[blink-gc] Member field %0 in unmanaged class declared here:";
+
+const char kPartObjectToGCDerivedClassNote[] =
+ "[blink-gc] Part-object field %0 to a GC derived class declared here:";
+
+const char kPartObjectContainsGCRootNote[] =
+ "[blink-gc] Field %0 with embedded GC root in %1 declared here:";
+
+const char kFieldContainsGCRootNote[] =
+ "[blink-gc] Field %0 defining a GC root declared here:";
+
+const char kOverriddenNonVirtualTrace[] =
+ "[blink-gc] Class %0 overrides non-virtual trace of base class %1.";
+
+const char kOverriddenNonVirtualTraceNote[] =
+ "[blink-gc] Non-virtual trace method declared here:";
+
+const char kMissingTraceDispatchMethod[] =
+ "[blink-gc] Class %0 is missing manual trace dispatch.";
+
+const char kMissingFinalizeDispatchMethod[] =
+ "[blink-gc] Class %0 is missing manual finalize dispatch.";
+
+const char kVirtualAndManualDispatch[] =
+ "[blink-gc] Class %0 contains or inherits virtual methods"
+ " but implements manual dispatching.";
+
+const char kMissingTraceDispatch[] =
+ "[blink-gc] Missing dispatch to class %0 in manual trace dispatch.";
+
+const char kMissingFinalizeDispatch[] =
+ "[blink-gc] Missing dispatch to class %0 in manual finalize dispatch.";
+
+const char kFinalizedFieldNote[] =
+ "[blink-gc] Potentially finalized field %0 declared here:";
+
+const char kUserDeclaredDestructorNote[] =
+ "[blink-gc] User-declared destructor declared here:";
+
+const char kUserDeclaredFinalizerNote[] =
+ "[blink-gc] User-declared finalizer declared here:";
+
+const char kBaseRequiresFinalizationNote[] =
+ "[blink-gc] Base class %0 requiring finalization declared here:";
+
+const char kFieldRequiresFinalizationNote[] =
+ "[blink-gc] Field %0 requiring finalization declared here:";
+
+const char kManualDispatchMethodNote[] =
+ "[blink-gc] Manual dispatch %0 declared here:";
+
+const char kDerivesNonStackAllocated[] =
+ "[blink-gc] Stack-allocated class %0 derives class %1"
+ " which is not stack allocated.";
+
+const char kClassOverridesNew[] =
+ "[blink-gc] Garbage collected class %0"
+ " is not permitted to override its new operator.";
+
+const char kClassDeclaresPureVirtualTrace[] =
+ "[blink-gc] Garbage collected class %0"
+ " is not permitted to declare a pure-virtual trace method.";
+
+const char kLeftMostBaseMustBePolymorphic[] =
+ "[blink-gc] Left-most base class %0 of derived class %1"
+ " must be polymorphic.";
+
+const char kBaseClassMustDeclareVirtualTrace[] =
+ "[blink-gc] Left-most base class %0 of derived class %1"
+ " must define a virtual trace method.";
+
+struct BlinkGCPluginOptions {
+ BlinkGCPluginOptions()
+ : enable_oilpan(false)
+ , dump_graph(false)
+ , warn_raw_ptr(false)
+ , warn_unneeded_finalizer(false) {}
+ bool enable_oilpan;
+ bool dump_graph;
+ bool warn_raw_ptr;
+ bool warn_unneeded_finalizer;
+ std::set<std::string> ignored_classes;
+ std::set<std::string> checked_namespaces;
+ std::vector<std::string> ignored_directories;
+};
+
+typedef std::vector<CXXRecordDecl*> RecordVector;
+typedef std::vector<CXXMethodDecl*> MethodVector;
+
+// Test if a template specialization is an instantiation.
+static bool IsTemplateInstantiation(CXXRecordDecl* record) {
+ ClassTemplateSpecializationDecl* spec =
+ dyn_cast<ClassTemplateSpecializationDecl>(record);
+ if (!spec)
+ return false;
+ switch (spec->getTemplateSpecializationKind()) {
+ case TSK_ImplicitInstantiation:
+ case TSK_ExplicitInstantiationDefinition:
+ return true;
+ case TSK_Undeclared:
+ case TSK_ExplicitSpecialization:
+ return false;
+ // TODO: unsupported cases.
+ case TSK_ExplicitInstantiationDeclaration:
+ return false;
+ }
+ assert(false && "Unknown template specialization kind");
+}
+
+// This visitor collects the entry points for the checker.
+class CollectVisitor : public RecursiveASTVisitor<CollectVisitor> {
+ public:
+ CollectVisitor() {}
+
+ RecordVector& record_decls() { return record_decls_; }
+ MethodVector& trace_decls() { return trace_decls_; }
+
+ bool shouldVisitTemplateInstantiations() { return false; }
+
+ // Collect record declarations, including nested declarations.
+ bool VisitCXXRecordDecl(CXXRecordDecl* record) {
+ if (record->hasDefinition() && record->isCompleteDefinition())
+ record_decls_.push_back(record);
+ return true;
+ }
+
+ // Collect tracing method definitions, but don't traverse method bodies.
+ bool TraverseCXXMethodDecl(CXXMethodDecl* method) {
+ if (method->isThisDeclarationADefinition() && Config::IsTraceMethod(method))
+ trace_decls_.push_back(method);
+ return true;
+ }
+
+ private:
+ RecordVector record_decls_;
+ MethodVector trace_decls_;
+};
+
+// This visitor checks that a finalizer method does not have invalid access to
+// fields that are potentially finalized. A potentially finalized field is
+// either a Member, a heap-allocated collection or an off-heap collection that
+// contains Members. Invalid uses are currently identified as passing the field
+// as the argument of a procedure call or using the -> or [] operators on it.
+class CheckFinalizerVisitor
+ : public RecursiveASTVisitor<CheckFinalizerVisitor> {
+ private:
+ // Simple visitor to determine if the content of a field might be collected
+ // during finalization.
+ class MightBeCollectedVisitor : public EdgeVisitor {
+ public:
+ MightBeCollectedVisitor() : might_be_collected_(false) {}
+ bool might_be_collected() { return might_be_collected_; }
+ void VisitMember(Member* edge) override { might_be_collected_ = true; }
+ void VisitCollection(Collection* edge) override {
+ if (edge->on_heap()) {
+ might_be_collected_ = !edge->is_root();
+ } else {
+ edge->AcceptMembers(this);
+ }
+ }
+
+ private:
+ bool might_be_collected_;
+ };
+
+ public:
+ typedef std::vector<std::pair<MemberExpr*, FieldPoint*> > Errors;
+
+ CheckFinalizerVisitor(RecordCache* cache)
+ : blacklist_context_(false), cache_(cache) {}
+
+ Errors& finalized_fields() { return finalized_fields_; }
+
+ bool WalkUpFromCXXOperatorCallExpr(CXXOperatorCallExpr* expr) {
+ // Only continue the walk-up if the operator is a blacklisted one.
+ switch (expr->getOperator()) {
+ case OO_Arrow:
+ case OO_Subscript:
+ this->WalkUpFromCallExpr(expr);
+ default:
+ return true;
+ }
+ }
+
+ // We consider all non-operator calls to be blacklisted contexts.
+ bool WalkUpFromCallExpr(CallExpr* expr) {
+ bool prev_blacklist_context = blacklist_context_;
+ blacklist_context_ = true;
+ for (size_t i = 0; i < expr->getNumArgs(); ++i)
+ this->TraverseStmt(expr->getArg(i));
+ blacklist_context_ = prev_blacklist_context;
+ return true;
+ }
+
+ bool VisitMemberExpr(MemberExpr* member) {
+ FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl());
+ if (!field)
+ return true;
+
+ RecordInfo* info = cache_->Lookup(field->getParent());
+ if (!info)
+ return true;
+
+ RecordInfo::Fields::iterator it = info->GetFields().find(field);
+ if (it == info->GetFields().end())
+ return true;
+
+ if (blacklist_context_ && MightBeCollected(&it->second))
+ finalized_fields_.push_back(std::make_pair(member, &it->second));
+ return true;
+ }
+
+ bool MightBeCollected(FieldPoint* point) {
+ MightBeCollectedVisitor visitor;
+ point->edge()->Accept(&visitor);
+ return visitor.might_be_collected();
+ }
+
+ private:
+ bool blacklist_context_;
+ Errors finalized_fields_;
+ RecordCache* cache_;
+};
+
+// This visitor checks that a method contains within its body, a call to a
+// method on the provided receiver class. This is used to check manual
+// dispatching for trace and finalize methods.
+class CheckDispatchVisitor : public RecursiveASTVisitor<CheckDispatchVisitor> {
+ public:
+ CheckDispatchVisitor(RecordInfo* receiver)
+ : receiver_(receiver), dispatched_to_receiver_(false) {}
+
+ bool dispatched_to_receiver() { return dispatched_to_receiver_; }
+
+ bool VisitMemberExpr(MemberExpr* member) {
+ if (CXXMethodDecl* fn = dyn_cast<CXXMethodDecl>(member->getMemberDecl())) {
+ if (fn->getParent() == receiver_->record())
+ dispatched_to_receiver_ = true;
+ }
+ return true;
+ }
+
+ private:
+ RecordInfo* receiver_;
+ bool dispatched_to_receiver_;
+};
+
+// This visitor checks a tracing method by traversing its body.
+// - A member field is considered traced if it is referenced in the body.
+// - A base is traced if a base-qualified call to a trace method is found.
+class CheckTraceVisitor : public RecursiveASTVisitor<CheckTraceVisitor> {
+ public:
+ CheckTraceVisitor(CXXMethodDecl* trace, RecordInfo* info)
+ : trace_(trace), info_(info) {}
+
+ bool VisitMemberExpr(MemberExpr* member) {
+ // In weak callbacks, consider any occurrence as a correct usage.
+ // TODO: We really want to require that isAlive is checked on manually
+ // processed weak fields.
+ if (IsWeakCallback()) {
+ if (FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl()))
+ FoundField(field);
+ }
+ return true;
+ }
+
+ bool VisitCallExpr(CallExpr* call) {
+ // In weak callbacks we don't check calls (see VisitMemberExpr).
+ if (IsWeakCallback())
+ return true;
+
+ Expr* callee = call->getCallee();
+
+ // Trace calls from a templated derived class result in a
+ // DependentScopeMemberExpr because the concrete trace call depends on the
+ // instantiation of any shared template parameters. In this case the call is
+ // "unresolved" and we resort to comparing the syntactic type names.
+ if (CXXDependentScopeMemberExpr* expr =
+ dyn_cast<CXXDependentScopeMemberExpr>(callee)) {
+ CheckCXXDependentScopeMemberExpr(call, expr);
+ return true;
+ }
+
+ // A tracing call will have either a |visitor| or a |m_field| argument.
+ // A registerWeakMembers call will have a |this| argument.
+ if (call->getNumArgs() != 1)
+ return true;
+ Expr* arg = call->getArg(0);
+
+ if (UnresolvedMemberExpr* expr = dyn_cast<UnresolvedMemberExpr>(callee)) {
+ // If we find a call to registerWeakMembers which is unresolved we
+ // unsoundly consider all weak members as traced.
+ // TODO: Find out how to validate weak member tracing for unresolved call.
+ if (expr->getMemberName().getAsString() == kRegisterWeakMembersName) {
+ for (RecordInfo::Fields::iterator it = info_->GetFields().begin();
+ it != info_->GetFields().end();
+ ++it) {
+ if (it->second.edge()->IsWeakMember())
+ it->second.MarkTraced();
+ }
+ }
+
+ QualType base = expr->getBaseType();
+ if (!base->isPointerType())
+ return true;
+ CXXRecordDecl* decl = base->getPointeeType()->getAsCXXRecordDecl();
+ if (decl)
+ CheckTraceFieldCall(expr->getMemberName().getAsString(), decl, arg);
+ return true;
+ }
+
+ if (CXXMemberCallExpr* expr = dyn_cast<CXXMemberCallExpr>(call)) {
+ if (CheckTraceFieldCall(expr) || CheckRegisterWeakMembers(expr))
+ return true;
+ }
+
+ CheckTraceBaseCall(call);
+ return true;
+ }
+
+ private:
+
+ CXXRecordDecl* GetDependentTemplatedDecl(CXXDependentScopeMemberExpr* expr) {
+ NestedNameSpecifier* qual = expr->getQualifier();
+ if (!qual)
+ return 0;
+
+ const Type* type = qual->getAsType();
+ if (!type)
+ return 0;
+
+ const TemplateSpecializationType* tmpl_type =
+ type->getAs<TemplateSpecializationType>();
+ if (!tmpl_type)
+ return 0;
+
+ TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
+ if (!tmpl_decl)
+ return 0;
+
+ return dyn_cast<CXXRecordDecl>(tmpl_decl->getTemplatedDecl());
+ }
+
+ void CheckCXXDependentScopeMemberExpr(CallExpr* call,
+ CXXDependentScopeMemberExpr* expr) {
+ string fn_name = expr->getMember().getAsString();
+ CXXRecordDecl* tmpl = GetDependentTemplatedDecl(expr);
+ if (!tmpl)
+ return;
+
+ // Check for Super<T>::trace(visitor)
+ if (call->getNumArgs() == 1 && fn_name == trace_->getName()) {
+ RecordInfo::Bases::iterator it = info_->GetBases().begin();
+ for (; it != info_->GetBases().end(); ++it) {
+ if (it->first->getName() == tmpl->getName())
+ it->second.MarkTraced();
+ }
+ return;
+ }
+
+ // Check for TraceIfNeeded<T>::trace(visitor, &field)
+ if (call->getNumArgs() == 2 && fn_name == kTraceName &&
+ tmpl->getName() == kTraceIfNeededName) {
+ FindFieldVisitor finder;
+ finder.TraverseStmt(call->getArg(1));
+ if (finder.field())
+ FoundField(finder.field());
+ }
+ }
+
+ bool CheckTraceBaseCall(CallExpr* call) {
+ MemberExpr* callee = dyn_cast<MemberExpr>(call->getCallee());
+ if (!callee)
+ return false;
+
+ FunctionDecl* fn = dyn_cast<FunctionDecl>(callee->getMemberDecl());
+ if (!fn || !Config::IsTraceMethod(fn))
+ return false;
+
+ // Currently, a manually dispatched class cannot have mixin bases (having
+ // one would add a vtable which we explicitly check against). This means
+ // that we can only make calls to a trace method of the same name. Revisit
+ // this if our mixin/vtable assumption changes.
+ if (fn->getName() != trace_->getName())
+ return false;
+
+ CXXRecordDecl* decl = 0;
+ if (callee && callee->hasQualifier()) {
+ if (const Type* type = callee->getQualifier()->getAsType())
+ decl = type->getAsCXXRecordDecl();
+ }
+ if (!decl)
+ return false;
+
+ RecordInfo::Bases::iterator it = info_->GetBases().find(decl);
+ if (it != info_->GetBases().end()) {
+ it->second.MarkTraced();
+ }
+
+ return true;
+ }
+
+ bool CheckTraceFieldCall(CXXMemberCallExpr* call) {
+ return CheckTraceFieldCall(call->getMethodDecl()->getNameAsString(),
+ call->getRecordDecl(),
+ call->getArg(0));
+ }
+
+ bool CheckTraceFieldCall(string name, CXXRecordDecl* callee, Expr* arg) {
+ if (name != kTraceName || !Config::IsVisitor(callee->getName()))
+ return false;
+
+ FindFieldVisitor finder;
+ finder.TraverseStmt(arg);
+ if (finder.field())
+ FoundField(finder.field());
+
+ return true;
+ }
+
+ bool CheckRegisterWeakMembers(CXXMemberCallExpr* call) {
+ CXXMethodDecl* fn = call->getMethodDecl();
+ if (fn->getName() != kRegisterWeakMembersName)
+ return false;
+
+ if (fn->isTemplateInstantiation()) {
+ const TemplateArgumentList& args =
+ *fn->getTemplateSpecializationInfo()->TemplateArguments;
+ // The second template argument is the callback method.
+ if (args.size() > 1 &&
+ args[1].getKind() == TemplateArgument::Declaration) {
+ if (FunctionDecl* callback =
+ dyn_cast<FunctionDecl>(args[1].getAsDecl())) {
+ if (callback->hasBody()) {
+ CheckTraceVisitor nested_visitor(info_);
+ nested_visitor.TraverseStmt(callback->getBody());
+ }
+ }
+ }
+ }
+ return true;
+ }
+
+ class FindFieldVisitor : public RecursiveASTVisitor<FindFieldVisitor> {
+ public:
+ FindFieldVisitor() : member_(0), field_(0) {}
+ MemberExpr* member() const { return member_; }
+ FieldDecl* field() const { return field_; }
+ bool TraverseMemberExpr(MemberExpr* member) {
+ if (FieldDecl* field = dyn_cast<FieldDecl>(member->getMemberDecl())) {
+ member_ = member;
+ field_ = field;
+ return false;
+ }
+ return true;
+ }
+ private:
+ MemberExpr* member_;
+ FieldDecl* field_;
+ };
+
+ // Nested checking for weak callbacks.
+ CheckTraceVisitor(RecordInfo* info) : trace_(0), info_(info) {}
+
+ bool IsWeakCallback() { return !trace_; }
+
+ void MarkTraced(RecordInfo::Fields::iterator it) {
+ // In a weak callback we can't mark strong fields as traced.
+ if (IsWeakCallback() && !it->second.edge()->IsWeakMember())
+ return;
+ it->second.MarkTraced();
+ }
+
+ void FoundField(FieldDecl* field) {
+ if (IsTemplateInstantiation(info_->record())) {
+ // Pointer equality on fields does not work for template instantiations.
+ // The trace method refers to fields of the template definition which
+ // are different from the instantiated fields that need to be traced.
+ const string& name = field->getNameAsString();
+ for (RecordInfo::Fields::iterator it = info_->GetFields().begin();
+ it != info_->GetFields().end();
+ ++it) {
+ if (it->first->getNameAsString() == name) {
+ MarkTraced(it);
+ break;
+ }
+ }
+ } else {
+ RecordInfo::Fields::iterator it = info_->GetFields().find(field);
+ if (it != info_->GetFields().end())
+ MarkTraced(it);
+ }
+ }
+
+ CXXMethodDecl* trace_;
+ RecordInfo* info_;
+};
+
+// This visitor checks that the fields of a class and the fields of
+// its part objects don't define GC roots.
+class CheckGCRootsVisitor : public RecursiveEdgeVisitor {
+ public:
+ typedef std::vector<FieldPoint*> RootPath;
+ typedef std::vector<RootPath> Errors;
+
+ CheckGCRootsVisitor() {}
+
+ Errors& gc_roots() { return gc_roots_; }
+
+ bool ContainsGCRoots(RecordInfo* info) {
+ for (RecordInfo::Fields::iterator it = info->GetFields().begin();
+ it != info->GetFields().end();
+ ++it) {
+ current_.push_back(&it->second);
+ it->second.edge()->Accept(this);
+ current_.pop_back();
+ }
+ return !gc_roots_.empty();
+ }
+
+ void VisitValue(Value* edge) override {
+ // TODO: what should we do to check unions?
+ if (edge->value()->record()->isUnion())
+ return;
+
+ // If the value is a part object, then continue checking for roots.
+ for (Context::iterator it = context().begin();
+ it != context().end();
+ ++it) {
+ if (!(*it)->IsCollection())
+ return;
+ }
+ ContainsGCRoots(edge->value());
+ }
+
+ void VisitPersistent(Persistent* edge) override {
+ gc_roots_.push_back(current_);
+ }
+
+ void AtCollection(Collection* edge) override {
+ if (edge->is_root())
+ gc_roots_.push_back(current_);
+ }
+
+ protected:
+ RootPath current_;
+ Errors gc_roots_;
+};
+
+// This visitor checks that the fields of a class are "well formed".
+// - OwnPtr, RefPtr and RawPtr must not point to a GC derived types.
+// - Part objects must not be GC derived types.
+// - An on-heap class must never contain GC roots.
+// - Only stack-allocated types may point to stack-allocated types.
+class CheckFieldsVisitor : public RecursiveEdgeVisitor {
+ public:
+
+ enum Error {
+ kRawPtrToGCManaged,
+ kRawPtrToGCManagedWarning,
+ kRefPtrToGCManaged,
+ kOwnPtrToGCManaged,
+ kMemberInUnmanaged,
+ kPtrFromHeapToStack,
+ kGCDerivedPartObject
+ };
+
+ typedef std::vector<std::pair<FieldPoint*, Error> > Errors;
+
+ CheckFieldsVisitor(const BlinkGCPluginOptions& options)
+ : options_(options), current_(0), stack_allocated_host_(false) {}
+
+ Errors& invalid_fields() { return invalid_fields_; }
+
+ bool ContainsInvalidFields(RecordInfo* info) {
+ stack_allocated_host_ = info->IsStackAllocated();
+ managed_host_ = stack_allocated_host_ ||
+ info->IsGCAllocated() ||
+ info->IsNonNewable() ||
+ info->IsOnlyPlacementNewable();
+ for (RecordInfo::Fields::iterator it = info->GetFields().begin();
+ it != info->GetFields().end();
+ ++it) {
+ context().clear();
+ current_ = &it->second;
+ current_->edge()->Accept(this);
+ }
+ return !invalid_fields_.empty();
+ }
+
+ void AtMember(Member* edge) override {
+ if (managed_host_)
+ return;
+ // A member is allowed to appear in the context of a root.
+ for (Context::iterator it = context().begin();
+ it != context().end();
+ ++it) {
+ if ((*it)->Kind() == Edge::kRoot)
+ return;
+ }
+ invalid_fields_.push_back(std::make_pair(current_, kMemberInUnmanaged));
+ }
+
+ void AtValue(Value* edge) override {
+ // TODO: what should we do to check unions?
+ if (edge->value()->record()->isUnion())
+ return;
+
+ if (!stack_allocated_host_ && edge->value()->IsStackAllocated()) {
+ invalid_fields_.push_back(std::make_pair(current_, kPtrFromHeapToStack));
+ return;
+ }
+
+ if (!Parent() &&
+ edge->value()->IsGCDerived() &&
+ !edge->value()->IsGCMixin()) {
+ invalid_fields_.push_back(std::make_pair(current_, kGCDerivedPartObject));
+ return;
+ }
+
+ if (!Parent() || !edge->value()->IsGCAllocated())
+ return;
+
+ // In transition mode, disallow OwnPtr<T>, RawPtr<T> to GC allocated T's,
+ // also disallow T* in stack-allocated types.
+ if (options_.enable_oilpan) {
+ if (Parent()->IsOwnPtr() ||
+ Parent()->IsRawPtrClass() ||
+ (stack_allocated_host_ && Parent()->IsRawPtr())) {
+ invalid_fields_.push_back(std::make_pair(
+ current_, InvalidSmartPtr(Parent())));
+ return;
+ }
+ if (options_.warn_raw_ptr && Parent()->IsRawPtr()) {
+ invalid_fields_.push_back(std::make_pair(
+ current_, kRawPtrToGCManagedWarning));
+ }
+ return;
+ }
+
+ if (Parent()->IsRawPtr() || Parent()->IsRefPtr() || Parent()->IsOwnPtr()) {
+ invalid_fields_.push_back(std::make_pair(
+ current_, InvalidSmartPtr(Parent())));
+ return;
+ }
+ }
+
+ void AtCollection(Collection* edge) override {
+ if (edge->on_heap() && Parent() && Parent()->IsOwnPtr())
+ invalid_fields_.push_back(std::make_pair(current_, kOwnPtrToGCManaged));
+ }
+
+ private:
+ Error InvalidSmartPtr(Edge* ptr) {
+ if (ptr->IsRawPtr())
+ return kRawPtrToGCManaged;
+ if (ptr->IsRefPtr())
+ return kRefPtrToGCManaged;
+ if (ptr->IsOwnPtr())
+ return kOwnPtrToGCManaged;
+ assert(false && "Unknown smart pointer kind");
+ }
+
+ const BlinkGCPluginOptions& options_;
+ FieldPoint* current_;
+ bool stack_allocated_host_;
+ bool managed_host_;
+ Errors invalid_fields_;
+};
+
+class EmptyStmtVisitor
+ : public RecursiveASTVisitor<EmptyStmtVisitor> {
+public:
+ static bool isEmpty(Stmt* stmt) {
+ EmptyStmtVisitor visitor;
+ visitor.TraverseStmt(stmt);
+ return visitor.empty_;
+ }
+
+ bool WalkUpFromCompoundStmt(CompoundStmt* stmt) {
+ empty_ = stmt->body_empty();
+ return false;
+ }
+ bool VisitStmt(Stmt*) {
+ empty_ = false;
+ return false;
+ }
+private:
+ EmptyStmtVisitor() : empty_(true) {}
+ bool empty_;
+};
+
+// Main class containing checks for various invariants of the Blink
+// garbage collection infrastructure.
+class BlinkGCPluginConsumer : public ASTConsumer {
+ public:
+ BlinkGCPluginConsumer(CompilerInstance& instance,
+ const BlinkGCPluginOptions& options)
+ : instance_(instance),
+ diagnostic_(instance.getDiagnostics()),
+ options_(options),
+ json_(0) {
+
+ // Only check structures in the blink and WebKit namespaces.
+ options_.checked_namespaces.insert("blink");
+ options_.checked_namespaces.insert("WebKit");
+
+ // Ignore GC implementation files.
+ options_.ignored_directories.push_back("/heap/");
+
+ // Register warning/error messages.
+ diag_class_must_left_mostly_derive_gc_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kClassMustLeftMostlyDeriveGC);
+ diag_class_requires_trace_method_ =
+ diagnostic_.getCustomDiagID(getErrorLevel(), kClassRequiresTraceMethod);
+ diag_base_requires_tracing_ =
+ diagnostic_.getCustomDiagID(getErrorLevel(), kBaseRequiresTracing);
+ diag_fields_require_tracing_ =
+ diagnostic_.getCustomDiagID(getErrorLevel(), kFieldsRequireTracing);
+ diag_class_contains_invalid_fields_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kClassContainsInvalidFields);
+ diag_class_contains_invalid_fields_warning_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Warning, kClassContainsInvalidFields);
+ diag_class_contains_gc_root_ =
+ diagnostic_.getCustomDiagID(getErrorLevel(), kClassContainsGCRoot);
+ diag_class_requires_finalization_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kClassRequiresFinalization);
+ diag_class_does_not_require_finalization_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Warning, kClassDoesNotRequireFinalization);
+ diag_finalizer_accesses_finalized_field_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kFinalizerAccessesFinalizedField);
+ diag_overridden_non_virtual_trace_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kOverriddenNonVirtualTrace);
+ diag_missing_trace_dispatch_method_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kMissingTraceDispatchMethod);
+ diag_missing_finalize_dispatch_method_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kMissingFinalizeDispatchMethod);
+ diag_virtual_and_manual_dispatch_ =
+ diagnostic_.getCustomDiagID(getErrorLevel(), kVirtualAndManualDispatch);
+ diag_missing_trace_dispatch_ =
+ diagnostic_.getCustomDiagID(getErrorLevel(), kMissingTraceDispatch);
+ diag_missing_finalize_dispatch_ =
+ diagnostic_.getCustomDiagID(getErrorLevel(), kMissingFinalizeDispatch);
+ diag_derives_non_stack_allocated_ =
+ diagnostic_.getCustomDiagID(getErrorLevel(), kDerivesNonStackAllocated);
+ diag_class_overrides_new_ =
+ diagnostic_.getCustomDiagID(getErrorLevel(), kClassOverridesNew);
+ diag_class_declares_pure_virtual_trace_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kClassDeclaresPureVirtualTrace);
+ diag_left_most_base_must_be_polymorphic_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kLeftMostBaseMustBePolymorphic);
+ diag_base_class_must_declare_virtual_trace_ = diagnostic_.getCustomDiagID(
+ getErrorLevel(), kBaseClassMustDeclareVirtualTrace);
+
+ // Register note messages.
+ diag_base_requires_tracing_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kBaseRequiresTracingNote);
+ diag_field_requires_tracing_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kFieldRequiresTracingNote);
+ diag_raw_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kRawPtrToGCManagedClassNote);
+ diag_ref_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kRefPtrToGCManagedClassNote);
+ diag_own_ptr_to_gc_managed_class_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kOwnPtrToGCManagedClassNote);
+ diag_stack_allocated_field_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kStackAllocatedFieldNote);
+ diag_member_in_unmanaged_class_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kMemberInUnmanagedClassNote);
+ diag_part_object_to_gc_derived_class_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kPartObjectToGCDerivedClassNote);
+ diag_part_object_contains_gc_root_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kPartObjectContainsGCRootNote);
+ diag_field_contains_gc_root_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kFieldContainsGCRootNote);
+ diag_finalized_field_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kFinalizedFieldNote);
+ diag_user_declared_destructor_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kUserDeclaredDestructorNote);
+ diag_user_declared_finalizer_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kUserDeclaredFinalizerNote);
+ diag_base_requires_finalization_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kBaseRequiresFinalizationNote);
+ diag_field_requires_finalization_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kFieldRequiresFinalizationNote);
+ diag_overridden_non_virtual_trace_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kOverriddenNonVirtualTraceNote);
+ diag_manual_dispatch_method_note_ = diagnostic_.getCustomDiagID(
+ DiagnosticsEngine::Note, kManualDispatchMethodNote);
+ }
+
+ void HandleTranslationUnit(ASTContext& context) override {
+ CollectVisitor visitor;
+ visitor.TraverseDecl(context.getTranslationUnitDecl());
+
+ if (options_.dump_graph) {
+ std::error_code err;
+ // TODO: Make createDefaultOutputFile or a shorter createOutputFile work.
+ json_ = JsonWriter::from(instance_.createOutputFile(
+ "", // OutputPath
+ err, // Errors
+ true, // Binary
+ true, // RemoveFileOnSignal
+ instance_.getFrontendOpts().OutputFile, // BaseInput
+ "graph.json", // Extension
+ false, // UseTemporary
+ false, // CreateMissingDirectories
+ 0, // ResultPathName
+ 0)); // TempPathName
+ if (!err && json_) {
+ json_->OpenList();
+ } else {
+ json_ = 0;
+ llvm::errs()
+ << "[blink-gc] "
+ << "Failed to create an output file for the object graph.\n";
+ }
+ }
+
+ for (RecordVector::iterator it = visitor.record_decls().begin();
+ it != visitor.record_decls().end();
+ ++it) {
+ CheckRecord(cache_.Lookup(*it));
+ }
+
+ for (MethodVector::iterator it = visitor.trace_decls().begin();
+ it != visitor.trace_decls().end();
+ ++it) {
+ CheckTracingMethod(*it);
+ }
+
+ if (json_) {
+ json_->CloseList();
+ delete json_;
+ json_ = 0;
+ }
+ }
+
+ // Main entry for checking a record declaration.
+ void CheckRecord(RecordInfo* info) {
+ if (IsIgnored(info))
+ return;
+
+ CXXRecordDecl* record = info->record();
+
+ // TODO: what should we do to check unions?
+ if (record->isUnion())
+ return;
+
+ // If this is the primary template declaration, check its specializations.
+ if (record->isThisDeclarationADefinition() &&
+ record->getDescribedClassTemplate()) {
+ ClassTemplateDecl* tmpl = record->getDescribedClassTemplate();
+ for (ClassTemplateDecl::spec_iterator it = tmpl->spec_begin();
+ it != tmpl->spec_end();
+ ++it) {
+ CheckClass(cache_.Lookup(*it));
+ }
+ return;
+ }
+
+ CheckClass(info);
+ }
+
+ // Check a class-like object (eg, class, specialization, instantiation).
+ void CheckClass(RecordInfo* info) {
+ if (!info)
+ return;
+
+ // Check consistency of stack-allocated hierarchies.
+ if (info->IsStackAllocated()) {
+ for (RecordInfo::Bases::iterator it = info->GetBases().begin();
+ it != info->GetBases().end();
+ ++it) {
+ if (!it->second.info()->IsStackAllocated())
+ ReportDerivesNonStackAllocated(info, &it->second);
+ }
+ }
+
+ if (CXXMethodDecl* trace = info->GetTraceMethod()) {
+ if (trace->isPure())
+ ReportClassDeclaresPureVirtualTrace(info, trace);
+ } else if (info->RequiresTraceMethod()) {
+ ReportClassRequiresTraceMethod(info);
+ }
+
+ // Check polymorphic classes that are GC-derived or have a trace method.
+ if (info->record()->hasDefinition() && info->record()->isPolymorphic()) {
+ // TODO: Check classes that inherit a trace method.
+ CXXMethodDecl* trace = info->GetTraceMethod();
+ if (trace || info->IsGCDerived())
+ CheckPolymorphicClass(info, trace);
+ }
+
+ {
+ CheckFieldsVisitor visitor(options_);
+ if (visitor.ContainsInvalidFields(info))
+ ReportClassContainsInvalidFields(info, &visitor.invalid_fields());
+ }
+
+ if (info->IsGCDerived()) {
+
+ if (!info->IsGCMixin()) {
+ CheckLeftMostDerived(info);
+ CheckDispatch(info);
+ if (CXXMethodDecl* newop = info->DeclaresNewOperator())
+ ReportClassOverridesNew(info, newop);
+ }
+
+ {
+ CheckGCRootsVisitor visitor;
+ if (visitor.ContainsGCRoots(info))
+ ReportClassContainsGCRoots(info, &visitor.gc_roots());
+ }
+
+ if (info->NeedsFinalization())
+ CheckFinalization(info);
+
+ if (options_.warn_unneeded_finalizer && info->IsGCFinalized())
+ CheckUnneededFinalization(info);
+ }
+
+ DumpClass(info);
+ }
+
+ CXXRecordDecl* GetDependentTemplatedDecl(const Type& type) {
+ const TemplateSpecializationType* tmpl_type =
+ type.getAs<TemplateSpecializationType>();
+ if (!tmpl_type)
+ return 0;
+
+ TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
+ if (!tmpl_decl)
+ return 0;
+
+ return dyn_cast<CXXRecordDecl>(tmpl_decl->getTemplatedDecl());
+ }
+
+ // The GC infrastructure assumes that if the vtable of a polymorphic
+ // base-class is not initialized for a given object (ie, it is partially
+ // initialized) then the object does not need to be traced. Thus, we must
+ // ensure that any polymorphic class with a trace method does not have any
+ // tractable fields that are initialized before we are sure that the vtable
+ // and the trace method are both defined. There are two cases that need to
+ // hold to satisfy that assumption:
+ //
+ // 1. If trace is virtual, then it must be defined in the left-most base.
+ // This ensures that if the vtable is initialized then it contains a pointer
+ // to the trace method.
+ //
+ // 2. If trace is non-virtual, then the trace method is defined and we must
+ // ensure that the left-most base defines a vtable. This ensures that the
+ // first thing to be initialized when constructing the object is the vtable
+ // itself.
+ void CheckPolymorphicClass(RecordInfo* info, CXXMethodDecl* trace) {
+ CXXRecordDecl* left_most = info->record();
+ CXXRecordDecl::base_class_iterator it = left_most->bases_begin();
+ CXXRecordDecl* left_most_base = 0;
+ while (it != left_most->bases_end()) {
+ left_most_base = it->getType()->getAsCXXRecordDecl();
+ if (!left_most_base && it->getType()->isDependentType())
+ left_most_base = GetDependentTemplatedDecl(*it->getType());
+
+ // TODO: Find a way to correctly check actual instantiations
+ // for dependent types. The escape below will be hit, eg, when
+ // we have a primary template with no definition and
+ // specializations for each case (such as SupplementBase) in
+ // which case we don't succeed in checking the required
+ // properties.
+ if (!left_most_base || !left_most_base->hasDefinition())
+ return;
+
+ StringRef name = left_most_base->getName();
+ // We know GCMixin base defines virtual trace.
+ if (Config::IsGCMixinBase(name))
+ return;
+
+ // Stop with the left-most prior to a safe polymorphic base (a safe base
+ // is non-polymorphic and contains no fields).
+ if (Config::IsSafePolymorphicBase(name))
+ break;
+
+ left_most = left_most_base;
+ it = left_most->bases_begin();
+ }
+
+ if (RecordInfo* left_most_info = cache_.Lookup(left_most)) {
+
+ // Check condition (1):
+ if (trace && trace->isVirtual()) {
+ if (CXXMethodDecl* trace = left_most_info->GetTraceMethod()) {
+ if (trace->isVirtual())
+ return;
+ }
+ ReportBaseClassMustDeclareVirtualTrace(info, left_most);
+ return;
+ }
+
+ // Check condition (2):
+ if (DeclaresVirtualMethods(left_most))
+ return;
+ if (left_most_base) {
+ ++it; // Get the base next to the "safe polymorphic base"
+ if (it != left_most->bases_end()) {
+ if (CXXRecordDecl* next_base = it->getType()->getAsCXXRecordDecl()) {
+ if (CXXRecordDecl* next_left_most = GetLeftMostBase(next_base)) {
+ if (DeclaresVirtualMethods(next_left_most))
+ return;
+ ReportLeftMostBaseMustBePolymorphic(info, next_left_most);
+ return;
+ }
+ }
+ }
+ }
+ ReportLeftMostBaseMustBePolymorphic(info, left_most);
+ }
+ }
+
+ CXXRecordDecl* GetLeftMostBase(CXXRecordDecl* left_most) {
+ CXXRecordDecl::base_class_iterator it = left_most->bases_begin();
+ while (it != left_most->bases_end()) {
+ if (it->getType()->isDependentType())
+ left_most = GetDependentTemplatedDecl(*it->getType());
+ else
+ left_most = it->getType()->getAsCXXRecordDecl();
+ if (!left_most || !left_most->hasDefinition())
+ return 0;
+ it = left_most->bases_begin();
+ }
+ return left_most;
+ }
+
+ bool DeclaresVirtualMethods(CXXRecordDecl* decl) {
+ CXXRecordDecl::method_iterator it = decl->method_begin();
+ for (; it != decl->method_end(); ++it)
+ if (it->isVirtual() && !it->isPure())
+ return true;
+ return false;
+ }
+
+ void CheckLeftMostDerived(RecordInfo* info) {
+ CXXRecordDecl* left_most = info->record();
+ CXXRecordDecl::base_class_iterator it = left_most->bases_begin();
+ while (it != left_most->bases_end()) {
+ left_most = it->getType()->getAsCXXRecordDecl();
+ it = left_most->bases_begin();
+ }
+ if (!Config::IsGCBase(left_most->getName()))
+ ReportClassMustLeftMostlyDeriveGC(info);
+ }
+
+ void CheckDispatch(RecordInfo* info) {
+ bool finalized = info->IsGCFinalized();
+ CXXMethodDecl* trace_dispatch = info->GetTraceDispatchMethod();
+ CXXMethodDecl* finalize_dispatch = info->GetFinalizeDispatchMethod();
+ if (!trace_dispatch && !finalize_dispatch)
+ return;
+
+ CXXRecordDecl* base = trace_dispatch ? trace_dispatch->getParent()
+ : finalize_dispatch->getParent();
+
+ // Check that dispatch methods are defined at the base.
+ if (base == info->record()) {
+ if (!trace_dispatch)
+ ReportMissingTraceDispatchMethod(info);
+ if (finalized && !finalize_dispatch)
+ ReportMissingFinalizeDispatchMethod(info);
+ if (!finalized && finalize_dispatch) {
+ ReportClassRequiresFinalization(info);
+ NoteUserDeclaredFinalizer(finalize_dispatch);
+ }
+ }
+
+ // Check that classes implementing manual dispatch do not have vtables.
+ if (info->record()->isPolymorphic())
+ ReportVirtualAndManualDispatch(
+ info, trace_dispatch ? trace_dispatch : finalize_dispatch);
+
+ // If this is a non-abstract class check that it is dispatched to.
+ // TODO: Create a global variant of this local check. We can only check if
+ // the dispatch body is known in this compilation unit.
+ if (info->IsConsideredAbstract())
+ return;
+
+ const FunctionDecl* defn;
+
+ if (trace_dispatch && trace_dispatch->isDefined(defn)) {
+ CheckDispatchVisitor visitor(info);
+ visitor.TraverseStmt(defn->getBody());
+ if (!visitor.dispatched_to_receiver())
+ ReportMissingTraceDispatch(defn, info);
+ }
+
+ if (finalized && finalize_dispatch && finalize_dispatch->isDefined(defn)) {
+ CheckDispatchVisitor visitor(info);
+ visitor.TraverseStmt(defn->getBody());
+ if (!visitor.dispatched_to_receiver())
+ ReportMissingFinalizeDispatch(defn, info);
+ }
+ }
+
+ // TODO: Should we collect destructors similar to trace methods?
+ void CheckFinalization(RecordInfo* info) {
+ CXXDestructorDecl* dtor = info->record()->getDestructor();
+
+ // For finalized classes, check the finalization method if possible.
+ if (info->IsGCFinalized()) {
+ if (dtor && dtor->hasBody()) {
+ CheckFinalizerVisitor visitor(&cache_);
+ visitor.TraverseCXXMethodDecl(dtor);
+ if (!visitor.finalized_fields().empty()) {
+ ReportFinalizerAccessesFinalizedFields(
+ dtor, &visitor.finalized_fields());
+ }
+ }
+ return;
+ }
+
+ // Don't require finalization of a mixin that has not yet been "mixed in".
+ if (info->IsGCMixin())
+ return;
+
+ // Report the finalization error, and proceed to print possible causes for
+ // the finalization requirement.
+ ReportClassRequiresFinalization(info);
+
+ if (dtor && dtor->isUserProvided())
+ NoteUserDeclaredDestructor(dtor);
+
+ for (RecordInfo::Bases::iterator it = info->GetBases().begin();
+ it != info->GetBases().end();
+ ++it) {
+ if (it->second.info()->NeedsFinalization())
+ NoteBaseRequiresFinalization(&it->second);
+ }
+
+ for (RecordInfo::Fields::iterator it = info->GetFields().begin();
+ it != info->GetFields().end();
+ ++it) {
+ if (it->second.edge()->NeedsFinalization())
+ NoteField(&it->second, diag_field_requires_finalization_note_);
+ }
+ }
+
+ void CheckUnneededFinalization(RecordInfo* info) {
+ if (!HasNonEmptyFinalizer(info))
+ ReportClassDoesNotRequireFinalization(info);
+ }
+
+ bool HasNonEmptyFinalizer(RecordInfo* info) {
+ CXXDestructorDecl* dtor = info->record()->getDestructor();
+ if (dtor && dtor->isUserProvided()) {
+ if (!dtor->hasBody() || !EmptyStmtVisitor::isEmpty(dtor->getBody()))
+ return true;
+ }
+ for (RecordInfo::Bases::iterator it = info->GetBases().begin();
+ it != info->GetBases().end();
+ ++it) {
+ if (HasNonEmptyFinalizer(it->second.info()))
+ return true;
+ }
+ for (RecordInfo::Fields::iterator it = info->GetFields().begin();
+ it != info->GetFields().end();
+ ++it) {
+ if (it->second.edge()->NeedsFinalization())
+ return true;
+ }
+ return false;
+ }
+
+ // This is the main entry for tracing method definitions.
+ void CheckTracingMethod(CXXMethodDecl* method) {
+ RecordInfo* parent = cache_.Lookup(method->getParent());
+ if (IsIgnored(parent))
+ return;
+
+ // Check templated tracing methods by checking the template instantiations.
+ // Specialized templates are handled as ordinary classes.
+ if (ClassTemplateDecl* tmpl =
+ parent->record()->getDescribedClassTemplate()) {
+ for (ClassTemplateDecl::spec_iterator it = tmpl->spec_begin();
+ it != tmpl->spec_end();
+ ++it) {
+ // Check trace using each template instantiation as the holder.
+ if (IsTemplateInstantiation(*it))
+ CheckTraceOrDispatchMethod(cache_.Lookup(*it), method);
+ }
+ return;
+ }
+
+ CheckTraceOrDispatchMethod(parent, method);
+ }
+
+ // Determine what type of tracing method this is (dispatch or trace).
+ void CheckTraceOrDispatchMethod(RecordInfo* parent, CXXMethodDecl* method) {
+ bool isTraceAfterDispatch;
+ if (Config::IsTraceMethod(method, &isTraceAfterDispatch)) {
+ if (isTraceAfterDispatch || !parent->GetTraceDispatchMethod()) {
+ CheckTraceMethod(parent, method, isTraceAfterDispatch);
+ }
+ // Dispatch methods are checked when we identify subclasses.
+ }
+ }
+
+ // Check an actual trace method.
+ void CheckTraceMethod(RecordInfo* parent,
+ CXXMethodDecl* trace,
+ bool isTraceAfterDispatch) {
+ // A trace method must not override any non-virtual trace methods.
+ if (!isTraceAfterDispatch) {
+ for (RecordInfo::Bases::iterator it = parent->GetBases().begin();
+ it != parent->GetBases().end();
+ ++it) {
+ RecordInfo* base = it->second.info();
+ if (CXXMethodDecl* other = base->InheritsNonVirtualTrace())
+ ReportOverriddenNonVirtualTrace(parent, trace, other);
+ }
+ }
+
+ CheckTraceVisitor visitor(trace, parent);
+ visitor.TraverseCXXMethodDecl(trace);
+
+ for (RecordInfo::Bases::iterator it = parent->GetBases().begin();
+ it != parent->GetBases().end();
+ ++it) {
+ if (!it->second.IsProperlyTraced())
+ ReportBaseRequiresTracing(parent, trace, it->first);
+ }
+
+ for (RecordInfo::Fields::iterator it = parent->GetFields().begin();
+ it != parent->GetFields().end();
+ ++it) {
+ if (!it->second.IsProperlyTraced()) {
+ // Discontinue once an untraced-field error is found.
+ ReportFieldsRequireTracing(parent, trace);
+ break;
+ }
+ }
+ }
+
+ void DumpClass(RecordInfo* info) {
+ if (!json_)
+ return;
+
+ json_->OpenObject();
+ json_->Write("name", info->record()->getQualifiedNameAsString());
+ json_->Write("loc", GetLocString(info->record()->getLocStart()));
+ json_->CloseObject();
+
+ class DumpEdgeVisitor : public RecursiveEdgeVisitor {
+ public:
+ DumpEdgeVisitor(JsonWriter* json) : json_(json) {}
+ void DumpEdge(RecordInfo* src,
+ RecordInfo* dst,
+ const string& lbl,
+ const Edge::LivenessKind& kind,
+ const string& loc) {
+ json_->OpenObject();
+ json_->Write("src", src->record()->getQualifiedNameAsString());
+ json_->Write("dst", dst->record()->getQualifiedNameAsString());
+ json_->Write("lbl", lbl);
+ json_->Write("kind", kind);
+ json_->Write("loc", loc);
+ json_->Write("ptr",
+ !Parent() ? "val" :
+ Parent()->IsRawPtr() ? "raw" :
+ Parent()->IsRefPtr() ? "ref" :
+ Parent()->IsOwnPtr() ? "own" :
+ (Parent()->IsMember() ||
+ Parent()->IsWeakMember()) ? "mem" :
+ "val");
+ json_->CloseObject();
+ }
+
+ void DumpField(RecordInfo* src, FieldPoint* point, const string& loc) {
+ src_ = src;
+ point_ = point;
+ loc_ = loc;
+ point_->edge()->Accept(this);
+ }
+
+ void AtValue(Value* e) override {
+ // The liveness kind of a path from the point to this value
+ // is given by the innermost place that is non-strong.
+ Edge::LivenessKind kind = Edge::kStrong;
+ if (Config::IsIgnoreCycleAnnotated(point_->field())) {
+ kind = Edge::kWeak;
+ } else {
+ for (Context::iterator it = context().begin();
+ it != context().end();
+ ++it) {
+ Edge::LivenessKind pointer_kind = (*it)->Kind();
+ if (pointer_kind != Edge::kStrong) {
+ kind = pointer_kind;
+ break;
+ }
+ }
+ }
+ DumpEdge(
+ src_, e->value(), point_->field()->getNameAsString(), kind, loc_);
+ }
+
+ private:
+ JsonWriter* json_;
+ RecordInfo* src_;
+ FieldPoint* point_;
+ string loc_;
+ };
+
+ DumpEdgeVisitor visitor(json_);
+
+ RecordInfo::Bases& bases = info->GetBases();
+ for (RecordInfo::Bases::iterator it = bases.begin();
+ it != bases.end();
+ ++it) {
+ visitor.DumpEdge(info,
+ it->second.info(),
+ "<super>",
+ Edge::kStrong,
+ GetLocString(it->second.spec().getLocStart()));
+ }
+
+ RecordInfo::Fields& fields = info->GetFields();
+ for (RecordInfo::Fields::iterator it = fields.begin();
+ it != fields.end();
+ ++it) {
+ visitor.DumpField(info,
+ &it->second,
+ GetLocString(it->second.field()->getLocStart()));
+ }
+ }
+
+ // Adds either a warning or error, based on the current handling of -Werror.
+ DiagnosticsEngine::Level getErrorLevel() {
+ return diagnostic_.getWarningsAsErrors() ? DiagnosticsEngine::Error
+ : DiagnosticsEngine::Warning;
+ }
+
+ const string GetLocString(SourceLocation loc) {
+ const SourceManager& source_manager = instance_.getSourceManager();
+ PresumedLoc ploc = source_manager.getPresumedLoc(loc);
+ if (ploc.isInvalid())
+ return "";
+ string loc_str;
+ llvm::raw_string_ostream OS(loc_str);
+ OS << ploc.getFilename()
+ << ":" << ploc.getLine()
+ << ":" << ploc.getColumn();
+ return OS.str();
+ }
+
+ bool IsIgnored(RecordInfo* record) {
+ return !record ||
+ !InCheckedNamespace(record) ||
+ IsIgnoredClass(record) ||
+ InIgnoredDirectory(record);
+ }
+
+ bool IsIgnoredClass(RecordInfo* info) {
+ // Ignore any class prefixed by SameSizeAs. These are used in
+ // Blink to verify class sizes and don't need checking.
+ const string SameSizeAs = "SameSizeAs";
+ if (info->name().compare(0, SameSizeAs.size(), SameSizeAs) == 0)
+ return true;
+ return options_.ignored_classes.find(info->name()) !=
+ options_.ignored_classes.end();
+ }
+
+ bool InIgnoredDirectory(RecordInfo* info) {
+ string filename;
+ if (!GetFilename(info->record()->getLocStart(), &filename))
+ return false; // TODO: should we ignore non-existing file locations?
+ std::vector<string>::iterator it = options_.ignored_directories.begin();
+ for (; it != options_.ignored_directories.end(); ++it)
+ if (filename.find(*it) != string::npos)
+ return true;
+ return false;
+ }
+
+ bool InCheckedNamespace(RecordInfo* info) {
+ if (!info)
+ return false;
+ for (DeclContext* context = info->record()->getDeclContext();
+ !context->isTranslationUnit();
+ context = context->getParent()) {
+ if (NamespaceDecl* decl = dyn_cast<NamespaceDecl>(context)) {
+ if (options_.checked_namespaces.find(decl->getNameAsString()) !=
+ options_.checked_namespaces.end()) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ bool GetFilename(SourceLocation loc, string* filename) {
+ const SourceManager& source_manager = instance_.getSourceManager();
+ SourceLocation spelling_location = source_manager.getSpellingLoc(loc);
+ PresumedLoc ploc = source_manager.getPresumedLoc(spelling_location);
+ if (ploc.isInvalid()) {
+ // If we're in an invalid location, we're looking at things that aren't
+ // actually stated in the source.
+ return false;
+ }
+ *filename = ploc.getFilename();
+ return true;
+ }
+
+ void ReportClassMustLeftMostlyDeriveGC(RecordInfo* info) {
+ SourceLocation loc = info->record()->getInnerLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_class_must_left_mostly_derive_gc_)
+ << info->record();
+ }
+
+ void ReportClassRequiresTraceMethod(RecordInfo* info) {
+ SourceLocation loc = info->record()->getInnerLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_class_requires_trace_method_)
+ << info->record();
+
+ for (RecordInfo::Bases::iterator it = info->GetBases().begin();
+ it != info->GetBases().end();
+ ++it) {
+ if (it->second.NeedsTracing().IsNeeded())
+ NoteBaseRequiresTracing(&it->second);
+ }
+
+ for (RecordInfo::Fields::iterator it = info->GetFields().begin();
+ it != info->GetFields().end();
+ ++it) {
+ if (!it->second.IsProperlyTraced())
+ NoteFieldRequiresTracing(info, it->first);
+ }
+ }
+
+ void ReportBaseRequiresTracing(RecordInfo* derived,
+ CXXMethodDecl* trace,
+ CXXRecordDecl* base) {
+ SourceLocation loc = trace->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_base_requires_tracing_)
+ << base << derived->record();
+ }
+
+ void ReportFieldsRequireTracing(RecordInfo* info, CXXMethodDecl* trace) {
+ SourceLocation loc = trace->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_fields_require_tracing_)
+ << info->record();
+ for (RecordInfo::Fields::iterator it = info->GetFields().begin();
+ it != info->GetFields().end();
+ ++it) {
+ if (!it->second.IsProperlyTraced())
+ NoteFieldRequiresTracing(info, it->first);
+ }
+ }
+
+ void ReportClassContainsInvalidFields(RecordInfo* info,
+ CheckFieldsVisitor::Errors* errors) {
+ SourceLocation loc = info->record()->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ bool only_warnings = options_.warn_raw_ptr;
+ for (CheckFieldsVisitor::Errors::iterator it = errors->begin();
+ only_warnings && it != errors->end();
+ ++it) {
+ if (it->second != CheckFieldsVisitor::kRawPtrToGCManagedWarning)
+ only_warnings = false;
+ }
+ diagnostic_.Report(full_loc, only_warnings ?
+ diag_class_contains_invalid_fields_warning_ :
+ diag_class_contains_invalid_fields_)
+ << info->record();
+ for (CheckFieldsVisitor::Errors::iterator it = errors->begin();
+ it != errors->end();
+ ++it) {
+ unsigned error;
+ if (it->second == CheckFieldsVisitor::kRawPtrToGCManaged ||
+ it->second == CheckFieldsVisitor::kRawPtrToGCManagedWarning) {
+ error = diag_raw_ptr_to_gc_managed_class_note_;
+ } else if (it->second == CheckFieldsVisitor::kRefPtrToGCManaged) {
+ error = diag_ref_ptr_to_gc_managed_class_note_;
+ } else if (it->second == CheckFieldsVisitor::kOwnPtrToGCManaged) {
+ error = diag_own_ptr_to_gc_managed_class_note_;
+ } else if (it->second == CheckFieldsVisitor::kMemberInUnmanaged) {
+ error = diag_member_in_unmanaged_class_note_;
+ } else if (it->second == CheckFieldsVisitor::kPtrFromHeapToStack) {
+ error = diag_stack_allocated_field_note_;
+ } else if (it->second == CheckFieldsVisitor::kGCDerivedPartObject) {
+ error = diag_part_object_to_gc_derived_class_note_;
+ } else {
+ assert(false && "Unknown field error");
+ }
+ NoteField(it->first, error);
+ }
+ }
+
+ void ReportClassContainsGCRoots(RecordInfo* info,
+ CheckGCRootsVisitor::Errors* errors) {
+ SourceLocation loc = info->record()->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ for (CheckGCRootsVisitor::Errors::iterator it = errors->begin();
+ it != errors->end();
+ ++it) {
+ CheckGCRootsVisitor::RootPath::iterator path = it->begin();
+ FieldPoint* point = *path;
+ diagnostic_.Report(full_loc, diag_class_contains_gc_root_)
+ << info->record() << point->field();
+ while (++path != it->end()) {
+ NotePartObjectContainsGCRoot(point);
+ point = *path;
+ }
+ NoteFieldContainsGCRoot(point);
+ }
+ }
+
+ void ReportFinalizerAccessesFinalizedFields(
+ CXXMethodDecl* dtor,
+ CheckFinalizerVisitor::Errors* fields) {
+ for (CheckFinalizerVisitor::Errors::iterator it = fields->begin();
+ it != fields->end();
+ ++it) {
+ SourceLocation loc = it->first->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_finalizer_accesses_finalized_field_)
+ << dtor << it->second->field();
+ NoteField(it->second, diag_finalized_field_note_);
+ }
+ }
+
+ void ReportClassRequiresFinalization(RecordInfo* info) {
+ SourceLocation loc = info->record()->getInnerLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_class_requires_finalization_)
+ << info->record();
+ }
+
+ void ReportClassDoesNotRequireFinalization(RecordInfo* info) {
+ SourceLocation loc = info->record()->getInnerLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_class_does_not_require_finalization_)
+ << info->record();
+ }
+
+ void ReportOverriddenNonVirtualTrace(RecordInfo* info,
+ CXXMethodDecl* trace,
+ CXXMethodDecl* overridden) {
+ SourceLocation loc = trace->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_overridden_non_virtual_trace_)
+ << info->record() << overridden->getParent();
+ NoteOverriddenNonVirtualTrace(overridden);
+ }
+
+ void ReportMissingTraceDispatchMethod(RecordInfo* info) {
+ ReportMissingDispatchMethod(info, diag_missing_trace_dispatch_method_);
+ }
+
+ void ReportMissingFinalizeDispatchMethod(RecordInfo* info) {
+ ReportMissingDispatchMethod(info, diag_missing_finalize_dispatch_method_);
+ }
+
+ void ReportMissingDispatchMethod(RecordInfo* info, unsigned error) {
+ SourceLocation loc = info->record()->getInnerLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, error) << info->record();
+ }
+
+ void ReportVirtualAndManualDispatch(RecordInfo* info,
+ CXXMethodDecl* dispatch) {
+ SourceLocation loc = info->record()->getInnerLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_virtual_and_manual_dispatch_)
+ << info->record();
+ NoteManualDispatchMethod(dispatch);
+ }
+
+ void ReportMissingTraceDispatch(const FunctionDecl* dispatch,
+ RecordInfo* receiver) {
+ ReportMissingDispatch(dispatch, receiver, diag_missing_trace_dispatch_);
+ }
+
+ void ReportMissingFinalizeDispatch(const FunctionDecl* dispatch,
+ RecordInfo* receiver) {
+ ReportMissingDispatch(dispatch, receiver, diag_missing_finalize_dispatch_);
+ }
+
+ void ReportMissingDispatch(const FunctionDecl* dispatch,
+ RecordInfo* receiver,
+ unsigned error) {
+ SourceLocation loc = dispatch->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, error) << receiver->record();
+ }
+
+ void ReportDerivesNonStackAllocated(RecordInfo* info, BasePoint* base) {
+ SourceLocation loc = base->spec().getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_derives_non_stack_allocated_)
+ << info->record() << base->info()->record();
+ }
+
+ void ReportClassOverridesNew(RecordInfo* info, CXXMethodDecl* newop) {
+ SourceLocation loc = newop->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_class_overrides_new_) << info->record();
+ }
+
+ void ReportClassDeclaresPureVirtualTrace(RecordInfo* info,
+ CXXMethodDecl* trace) {
+ SourceLocation loc = trace->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_class_declares_pure_virtual_trace_)
+ << info->record();
+ }
+
+ void ReportLeftMostBaseMustBePolymorphic(RecordInfo* derived,
+ CXXRecordDecl* base) {
+ SourceLocation loc = base->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_left_most_base_must_be_polymorphic_)
+ << base << derived->record();
+ }
+
+ void ReportBaseClassMustDeclareVirtualTrace(RecordInfo* derived,
+ CXXRecordDecl* base) {
+ SourceLocation loc = base->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_base_class_must_declare_virtual_trace_)
+ << base << derived->record();
+ }
+
+ void NoteManualDispatchMethod(CXXMethodDecl* dispatch) {
+ SourceLocation loc = dispatch->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_manual_dispatch_method_note_) << dispatch;
+ }
+
+ void NoteBaseRequiresTracing(BasePoint* base) {
+ SourceLocation loc = base->spec().getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_base_requires_tracing_note_)
+ << base->info()->record();
+ }
+
+ void NoteFieldRequiresTracing(RecordInfo* holder, FieldDecl* field) {
+ NoteField(field, diag_field_requires_tracing_note_);
+ }
+
+ void NotePartObjectContainsGCRoot(FieldPoint* point) {
+ FieldDecl* field = point->field();
+ SourceLocation loc = field->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_part_object_contains_gc_root_note_)
+ << field << field->getParent();
+ }
+
+ void NoteFieldContainsGCRoot(FieldPoint* point) {
+ NoteField(point, diag_field_contains_gc_root_note_);
+ }
+
+ void NoteUserDeclaredDestructor(CXXMethodDecl* dtor) {
+ SourceLocation loc = dtor->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_user_declared_destructor_note_);
+ }
+
+ void NoteUserDeclaredFinalizer(CXXMethodDecl* dtor) {
+ SourceLocation loc = dtor->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_user_declared_finalizer_note_);
+ }
+
+ void NoteBaseRequiresFinalization(BasePoint* base) {
+ SourceLocation loc = base->spec().getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_base_requires_finalization_note_)
+ << base->info()->record();
+ }
+
+ void NoteField(FieldPoint* point, unsigned note) {
+ NoteField(point->field(), note);
+ }
+
+ void NoteField(FieldDecl* field, unsigned note) {
+ SourceLocation loc = field->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, note) << field;
+ }
+
+ void NoteOverriddenNonVirtualTrace(CXXMethodDecl* overridden) {
+ SourceLocation loc = overridden->getLocStart();
+ SourceManager& manager = instance_.getSourceManager();
+ FullSourceLoc full_loc(loc, manager);
+ diagnostic_.Report(full_loc, diag_overridden_non_virtual_trace_note_)
+ << overridden;
+ }
+
+ unsigned diag_class_must_left_mostly_derive_gc_;
+ unsigned diag_class_requires_trace_method_;
+ unsigned diag_base_requires_tracing_;
+ unsigned diag_fields_require_tracing_;
+ unsigned diag_class_contains_invalid_fields_;
+ unsigned diag_class_contains_invalid_fields_warning_;
+ unsigned diag_class_contains_gc_root_;
+ unsigned diag_class_requires_finalization_;
+ unsigned diag_class_does_not_require_finalization_;
+ unsigned diag_finalizer_accesses_finalized_field_;
+ unsigned diag_overridden_non_virtual_trace_;
+ unsigned diag_missing_trace_dispatch_method_;
+ unsigned diag_missing_finalize_dispatch_method_;
+ unsigned diag_virtual_and_manual_dispatch_;
+ unsigned diag_missing_trace_dispatch_;
+ unsigned diag_missing_finalize_dispatch_;
+ unsigned diag_derives_non_stack_allocated_;
+ unsigned diag_class_overrides_new_;
+ unsigned diag_class_declares_pure_virtual_trace_;
+ unsigned diag_left_most_base_must_be_polymorphic_;
+ unsigned diag_base_class_must_declare_virtual_trace_;
+
+ unsigned diag_base_requires_tracing_note_;
+ unsigned diag_field_requires_tracing_note_;
+ unsigned diag_raw_ptr_to_gc_managed_class_note_;
+ unsigned diag_ref_ptr_to_gc_managed_class_note_;
+ unsigned diag_own_ptr_to_gc_managed_class_note_;
+ unsigned diag_stack_allocated_field_note_;
+ unsigned diag_member_in_unmanaged_class_note_;
+ unsigned diag_part_object_to_gc_derived_class_note_;
+ unsigned diag_part_object_contains_gc_root_note_;
+ unsigned diag_field_contains_gc_root_note_;
+ unsigned diag_finalized_field_note_;
+ unsigned diag_user_declared_destructor_note_;
+ unsigned diag_user_declared_finalizer_note_;
+ unsigned diag_base_requires_finalization_note_;
+ unsigned diag_field_requires_finalization_note_;
+ unsigned diag_overridden_non_virtual_trace_note_;
+ unsigned diag_manual_dispatch_method_note_;
+
+ CompilerInstance& instance_;
+ DiagnosticsEngine& diagnostic_;
+ BlinkGCPluginOptions options_;
+ RecordCache cache_;
+ JsonWriter* json_;
+};
+
+class BlinkGCPluginAction : public PluginASTAction {
+ public:
+ BlinkGCPluginAction() {}
+
+ protected:
+ // Overridden from PluginASTAction:
+ virtual std::unique_ptr<ASTConsumer> CreateASTConsumer(
+ CompilerInstance& instance,
+ llvm::StringRef ref) {
+ return llvm::make_unique<BlinkGCPluginConsumer>(instance, options_);
+ }
+
+ virtual bool ParseArgs(const CompilerInstance& instance,
+ const std::vector<string>& args) {
+ bool parsed = true;
+
+ for (size_t i = 0; i < args.size() && parsed; ++i) {
+ if (args[i] == "enable-oilpan") {
+ options_.enable_oilpan = true;
+ } else if (args[i] == "dump-graph") {
+ options_.dump_graph = true;
+ } else if (args[i] == "warn-raw-ptr") {
+ options_.warn_raw_ptr = true;
+ } else if (args[i] == "warn-unneeded-finalizer") {
+ options_.warn_unneeded_finalizer = true;
+ } else {
+ parsed = false;
+ llvm::errs() << "Unknown blink-gc-plugin argument: " << args[i] << "\n";
+ }
+ }
+
+ return parsed;
+ }
+
+ private:
+ BlinkGCPluginOptions options_;
+};
+
+} // namespace
+
+static FrontendPluginRegistry::Add<BlinkGCPluginAction> X(
+ "blink-gc-plugin",
+ "Check Blink GC invariants");
diff --git a/tools/clang/blink_gc_plugin/CMakeLists.txt b/tools/clang/blink_gc_plugin/CMakeLists.txt
new file mode 100644
index 0000000..9dab269
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/CMakeLists.txt
@@ -0,0 +1,17 @@
+# This line is read by update.sh and other scripts in tools/clang/scripts
+# Note: The spaces are significant.
+set(LIBRARYNAME BlinkGCPlugin_12)
+
+add_llvm_loadable_module("lib${LIBRARYNAME}"
+ BlinkGCPlugin.cpp
+ Edge.cpp
+ RecordInfo.cpp
+ )
+
+install(TARGETS "lib${LIBRARYNAME}" LIBRARY DESTINATION lib)
+
+cr_add_test(blink_gc_plugin_test
+ ${CMAKE_CURRENT_SOURCE_DIR}/tests/test.sh
+ ${LLVM_BUILD_DIR}/bin/clang
+ $<TARGET_FILE:lib${LIBRARYNAME}>
+ )
diff --git a/tools/clang/blink_gc_plugin/Config.h b/tools/clang/blink_gc_plugin/Config.h
new file mode 100644
index 0000000..ed76f1a
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/Config.h
@@ -0,0 +1,203 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file defines the names used by GC infrastructure.
+
+// TODO: Restructure the name determination to use fully qualified names (ala,
+// blink::Foo) so that the plugin can be enabled for all of chromium. Doing so
+// would allow us to catch errors with structures outside of blink that might
+// have unsafe pointers to GC allocated blink structures.
+
+#ifndef TOOLS_BLINK_GC_PLUGIN_CONFIG_H_
+#define TOOLS_BLINK_GC_PLUGIN_CONFIG_H_
+
+#include "clang/AST/AST.h"
+#include "clang/AST/Attr.h"
+
+const char kNewOperatorName[] = "operator new";
+const char kCreateName[] = "create";
+const char kTraceName[] = "trace";
+const char kFinalizeName[] = "finalizeGarbageCollectedObject";
+const char kTraceAfterDispatchName[] = "traceAfterDispatch";
+const char kRegisterWeakMembersName[] = "registerWeakMembers";
+const char kHeapAllocatorName[] = "HeapAllocator";
+const char kTraceIfNeededName[] = "TraceIfNeeded";
+
+class Config {
+ public:
+ static bool IsMember(const std::string& name) {
+ return name == "Member";
+ }
+
+ static bool IsWeakMember(const std::string& name) {
+ return name == "WeakMember";
+ }
+
+ static bool IsMemberHandle(const std::string& name) {
+ return IsMember(name) ||
+ IsWeakMember(name);
+ }
+
+ static bool IsPersistent(const std::string& name) {
+ return name == "Persistent";
+ }
+
+ static bool IsPersistentHandle(const std::string& name) {
+ return IsPersistent(name) ||
+ IsPersistentGCCollection(name);
+ }
+
+ static bool IsRawPtr(const std::string& name) {
+ return name == "RawPtr";
+ }
+
+ static bool IsRefPtr(const std::string& name) {
+ return name == "RefPtr";
+ }
+
+ static bool IsOwnPtr(const std::string& name) {
+ return name == "OwnPtr";
+ }
+
+ static bool IsWTFCollection(const std::string& name) {
+ return name == "Vector" ||
+ name == "Deque" ||
+ name == "HashSet" ||
+ name == "ListHashSet" ||
+ name == "LinkedHashSet" ||
+ name == "HashCountedSet" ||
+ name == "HashMap";
+ }
+
+ static bool IsGCCollection(const std::string& name) {
+ return name == "HeapVector" ||
+ name == "HeapDeque" ||
+ name == "HeapHashSet" ||
+ name == "HeapListHashSet" ||
+ name == "HeapLinkedHashSet" ||
+ name == "HeapHashCountedSet" ||
+ name == "HeapHashMap" ||
+ IsPersistentGCCollection(name);
+ }
+
+ static bool IsPersistentGCCollection(const std::string& name) {
+ return name == "PersistentHeapVector" ||
+ name == "PersistentHeapDeque" ||
+ name == "PersistentHeapHashSet" ||
+ name == "PersistentHeapListHashSet" ||
+ name == "PersistentHeapLinkedHashSet" ||
+ name == "PersistentHeapHashCountedSet" ||
+ name == "PersistentHeapHashMap";
+ }
+
+ static bool IsHashMap(const std::string& name) {
+ return name == "HashMap" ||
+ name == "HeapHashMap" ||
+ name == "PersistentHeapHashMap";
+ }
+
+ // Following http://crrev.com/369633033 (Blink r177436),
+ // ignore blink::ScriptWrappable's destructor.
+ // TODO: remove when its non-Oilpan destructor is removed.
+ static bool HasIgnorableDestructor(const std::string& ns,
+ const std::string& name) {
+ return ns == "blink" && name == "ScriptWrappable";
+ }
+
+ // Assumes name is a valid collection name.
+ static size_t CollectionDimension(const std::string& name) {
+ return (IsHashMap(name) || name == "pair") ? 2 : 1;
+ }
+
+ static bool IsDummyBase(const std::string& name) {
+ return name == "DummyBase";
+ }
+
+ static bool IsRefCountedBase(const std::string& name) {
+ return name == "RefCounted" ||
+ name == "ThreadSafeRefCounted";
+ }
+
+ static bool IsGCMixinBase(const std::string& name) {
+ return name == "GarbageCollectedMixin";
+ }
+
+ static bool IsGCFinalizedBase(const std::string& name) {
+ return name == "GarbageCollectedFinalized" ||
+ name == "RefCountedGarbageCollected" ||
+ name == "ThreadSafeRefCountedGarbageCollected";
+ }
+
+ static bool IsGCBase(const std::string& name) {
+ return name == "GarbageCollected" ||
+ IsGCFinalizedBase(name) ||
+ IsGCMixinBase(name);
+ }
+
+ // Returns true of the base classes that do not need a vtable entry for trace
+ // because they cannot possibly initiate a GC during construction.
+ static bool IsSafePolymorphicBase(const std::string& name) {
+ return IsGCBase(name) || IsDummyBase(name) || IsRefCountedBase(name);
+ }
+
+ static bool IsAnnotated(clang::Decl* decl, const std::string& anno) {
+ clang::AnnotateAttr* attr = decl->getAttr<clang::AnnotateAttr>();
+ return attr && (attr->getAnnotation() == anno);
+ }
+
+ static bool IsStackAnnotated(clang::Decl* decl) {
+ return IsAnnotated(decl, "blink_stack_allocated");
+ }
+
+ static bool IsIgnoreAnnotated(clang::Decl* decl) {
+ return IsAnnotated(decl, "blink_gc_plugin_ignore");
+ }
+
+ static bool IsIgnoreCycleAnnotated(clang::Decl* decl) {
+ return IsAnnotated(decl, "blink_gc_plugin_ignore_cycle") ||
+ IsIgnoreAnnotated(decl);
+ }
+
+ static bool IsVisitor(const std::string& name) { return name == "Visitor"; }
+
+ static bool IsTraceMethod(clang::FunctionDecl* method,
+ bool* isTraceAfterDispatch = 0) {
+ if (method->getNumParams() != 1)
+ return false;
+
+ const std::string& name = method->getNameAsString();
+ if (name != kTraceName && name != kTraceAfterDispatchName)
+ return false;
+
+ const clang::QualType& formal_type = method->getParamDecl(0)->getType();
+ if (!formal_type->isPointerType())
+ return false;
+
+ clang::CXXRecordDecl* pointee_type =
+ formal_type->getPointeeType()->getAsCXXRecordDecl();
+ if (!pointee_type)
+ return false;
+
+ if (!IsVisitor(pointee_type->getName()))
+ return false;
+
+ if (isTraceAfterDispatch)
+ *isTraceAfterDispatch = (name == kTraceAfterDispatchName);
+ return true;
+ }
+
+ static bool StartsWith(const std::string& str, const std::string& prefix) {
+ if (prefix.size() > str.size())
+ return false;
+ return str.compare(0, prefix.size(), prefix) == 0;
+ }
+
+ static bool EndsWith(const std::string& str, const std::string& suffix) {
+ if (suffix.size() > str.size())
+ return false;
+ return str.compare(str.size() - suffix.size(), suffix.size(), suffix) == 0;
+ }
+};
+
+#endif // TOOLS_BLINK_GC_PLUGIN_CONFIG_H_
diff --git a/tools/clang/blink_gc_plugin/Edge.cpp b/tools/clang/blink_gc_plugin/Edge.cpp
new file mode 100644
index 0000000..c56a576
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/Edge.cpp
@@ -0,0 +1,75 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "Config.h"
+#include "Edge.h"
+#include "RecordInfo.h"
+
+TracingStatus Value::NeedsTracing(NeedsTracingOption option) {
+ return value_->NeedsTracing(option);
+}
+
+bool Value::NeedsFinalization() { return value_->NeedsFinalization(); }
+bool Collection::NeedsFinalization() { return info_->NeedsFinalization(); }
+
+void RecursiveEdgeVisitor::AtValue(Value*) {}
+void RecursiveEdgeVisitor::AtRawPtr(RawPtr*) {}
+void RecursiveEdgeVisitor::AtRefPtr(RefPtr*) {}
+void RecursiveEdgeVisitor::AtOwnPtr(OwnPtr*) {}
+void RecursiveEdgeVisitor::AtMember(Member*) {}
+void RecursiveEdgeVisitor::AtWeakMember(WeakMember*) {}
+void RecursiveEdgeVisitor::AtPersistent(Persistent*) {}
+void RecursiveEdgeVisitor::AtCollection(Collection*) {}
+
+void RecursiveEdgeVisitor::VisitValue(Value* e) {
+ AtValue(e);
+}
+
+void RecursiveEdgeVisitor::VisitRawPtr(RawPtr* e) {
+ AtRawPtr(e);
+ Enter(e);
+ e->ptr()->Accept(this);
+ Leave();
+}
+
+void RecursiveEdgeVisitor::VisitRefPtr(RefPtr* e) {
+ AtRefPtr(e);
+ Enter(e);
+ e->ptr()->Accept(this);
+ Leave();
+}
+void RecursiveEdgeVisitor::VisitOwnPtr(OwnPtr* e) {
+ AtOwnPtr(e);
+ Enter(e);
+ e->ptr()->Accept(this);
+ Leave();
+}
+
+void RecursiveEdgeVisitor::VisitMember(Member* e) {
+ AtMember(e);
+ Enter(e);
+ e->ptr()->Accept(this);
+ Leave();
+}
+
+void RecursiveEdgeVisitor::VisitWeakMember(WeakMember* e) {
+ AtWeakMember(e);
+ Enter(e);
+ e->ptr()->Accept(this);
+ Leave();
+}
+
+void RecursiveEdgeVisitor::VisitPersistent(Persistent* e) {
+ AtPersistent(e);
+ Enter(e);
+ e->ptr()->Accept(this);
+ Leave();
+}
+
+void RecursiveEdgeVisitor::VisitCollection(Collection* e) {
+ AtCollection(e);
+ Enter(e);
+ e->AcceptMembers(this);
+ Leave();
+}
diff --git a/tools/clang/blink_gc_plugin/Edge.h b/tools/clang/blink_gc_plugin/Edge.h
new file mode 100644
index 0000000..d0b78b5
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/Edge.h
@@ -0,0 +1,245 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_BLINK_GC_PLUGIN_EDGE_H_
+#define TOOLS_BLINK_GC_PLUGIN_EDGE_H_
+
+#include <deque>
+
+#include "TracingStatus.h"
+
+class RecordInfo;
+
+class Edge;
+class Value;
+class RawPtr;
+class RefPtr;
+class OwnPtr;
+class Member;
+class WeakMember;
+class Persistent;
+class Collection;
+
+// Bare-bones visitor.
+class EdgeVisitor {
+ public:
+ virtual void VisitValue(Value*) {}
+ virtual void VisitRawPtr(RawPtr*) {}
+ virtual void VisitRefPtr(RefPtr*) {}
+ virtual void VisitOwnPtr(OwnPtr*) {}
+ virtual void VisitMember(Member*) {}
+ virtual void VisitWeakMember(WeakMember*) {}
+ virtual void VisitPersistent(Persistent*) {}
+ virtual void VisitCollection(Collection*) {}
+};
+
+// Recursive edge visitor. The traversed path is accessible in context.
+class RecursiveEdgeVisitor : public EdgeVisitor {
+ public:
+ // Overrides that recursively walk the edges and record the path.
+ virtual void VisitValue(Value*) override;
+ virtual void VisitRawPtr(RawPtr*) override;
+ virtual void VisitRefPtr(RefPtr*) override;
+ virtual void VisitOwnPtr(OwnPtr*) override;
+ virtual void VisitMember(Member*) override;
+ virtual void VisitWeakMember(WeakMember*) override;
+ virtual void VisitPersistent(Persistent*) override;
+ virtual void VisitCollection(Collection*) override;
+
+ protected:
+ typedef std::deque<Edge*> Context;
+ Context& context() { return context_; }
+ Edge* Parent() { return context_.empty() ? 0 : context_.front(); }
+ void Enter(Edge* e) { return context_.push_front(e); }
+ void Leave() { context_.pop_front(); }
+
+ // Default callback to overwrite in visitor subclass.
+ virtual void AtValue(Value*);
+ virtual void AtRawPtr(RawPtr*);
+ virtual void AtRefPtr(RefPtr*);
+ virtual void AtOwnPtr(OwnPtr*);
+ virtual void AtMember(Member*);
+ virtual void AtWeakMember(WeakMember*);
+ virtual void AtPersistent(Persistent*);
+ virtual void AtCollection(Collection*);
+
+ private:
+ Context context_;
+};
+
+// Base class for all edges.
+class Edge {
+ public:
+ enum NeedsTracingOption { kRecursive, kNonRecursive };
+ enum LivenessKind { kWeak, kStrong, kRoot };
+
+ virtual ~Edge() {}
+ virtual LivenessKind Kind() = 0;
+ virtual void Accept(EdgeVisitor*) = 0;
+ virtual bool NeedsFinalization() = 0;
+ virtual TracingStatus NeedsTracing(NeedsTracingOption) {
+ return TracingStatus::Unknown();
+ }
+
+ virtual bool IsValue() { return false; }
+ virtual bool IsRawPtr() { return false; }
+ virtual bool IsRawPtrClass() { return false; }
+ virtual bool IsRefPtr() { return false; }
+ virtual bool IsOwnPtr() { return false; }
+ virtual bool IsMember() { return false; }
+ virtual bool IsWeakMember() { return false; }
+ virtual bool IsPersistent() { return false; }
+ virtual bool IsCollection() { return false; }
+};
+
+// A value edge is a direct edge to some type, eg, part-object edges.
+class Value : public Edge {
+ public:
+ explicit Value(RecordInfo* value) : value_(value) {};
+ bool IsValue() override { return true; }
+ LivenessKind Kind() override { return kStrong; }
+ bool NeedsFinalization() override;
+ TracingStatus NeedsTracing(NeedsTracingOption) override;
+ void Accept(EdgeVisitor* visitor) override { visitor->VisitValue(this); }
+ RecordInfo* value() { return value_; }
+
+ private:
+ RecordInfo* value_;
+};
+
+// Shared base for smart-pointer edges.
+class PtrEdge : public Edge {
+ public:
+ ~PtrEdge() { delete ptr_; }
+ Edge* ptr() { return ptr_; }
+ protected:
+ PtrEdge(Edge* ptr) : ptr_(ptr) {
+ assert(ptr && "EdgePtr pointer must be non-null");
+ }
+ private:
+ Edge* ptr_;
+};
+
+class RawPtr : public PtrEdge {
+ public:
+ explicit RawPtr(Edge* ptr, bool is_ptr_class)
+ : PtrEdge(ptr), is_ptr_class_(is_ptr_class) { }
+ bool IsRawPtr() { return true; }
+ bool IsRawPtrClass() { return is_ptr_class_; }
+ LivenessKind Kind() { return kWeak; }
+ bool NeedsFinalization() { return false; }
+ TracingStatus NeedsTracing(NeedsTracingOption) {
+ return TracingStatus::Unneeded();
+ }
+ void Accept(EdgeVisitor* visitor) { visitor->VisitRawPtr(this); }
+ private:
+ bool is_ptr_class_;
+};
+
+class RefPtr : public PtrEdge {
+ public:
+ explicit RefPtr(Edge* ptr) : PtrEdge(ptr) { }
+ bool IsRefPtr() { return true; }
+ LivenessKind Kind() { return kStrong; }
+ bool NeedsFinalization() { return true; }
+ TracingStatus NeedsTracing(NeedsTracingOption) {
+ return TracingStatus::Unneeded();
+ }
+ void Accept(EdgeVisitor* visitor) { visitor->VisitRefPtr(this); }
+};
+
+class OwnPtr : public PtrEdge {
+ public:
+ explicit OwnPtr(Edge* ptr) : PtrEdge(ptr) { }
+ bool IsOwnPtr() { return true; }
+ LivenessKind Kind() { return kStrong; }
+ bool NeedsFinalization() { return true; }
+ TracingStatus NeedsTracing(NeedsTracingOption) {
+ return TracingStatus::Unneeded();
+ }
+ void Accept(EdgeVisitor* visitor) { visitor->VisitOwnPtr(this); }
+};
+
+class Member : public PtrEdge {
+ public:
+ explicit Member(Edge* ptr) : PtrEdge(ptr) { }
+ bool IsMember() { return true; }
+ LivenessKind Kind() { return kStrong; }
+ bool NeedsFinalization() { return false; }
+ TracingStatus NeedsTracing(NeedsTracingOption) {
+ return TracingStatus::Needed();
+ }
+ void Accept(EdgeVisitor* visitor) { visitor->VisitMember(this); }
+};
+
+class WeakMember : public PtrEdge {
+ public:
+ explicit WeakMember(Edge* ptr) : PtrEdge(ptr) { }
+ bool IsWeakMember() { return true; }
+ LivenessKind Kind() { return kWeak; }
+ bool NeedsFinalization() { return false; }
+ TracingStatus NeedsTracing(NeedsTracingOption) {
+ return TracingStatus::Needed();
+ }
+ void Accept(EdgeVisitor* visitor) { visitor->VisitWeakMember(this); }
+};
+
+class Persistent : public PtrEdge {
+ public:
+ explicit Persistent(Edge* ptr) : PtrEdge(ptr) { }
+ bool IsPersistent() { return true; }
+ LivenessKind Kind() { return kRoot; }
+ bool NeedsFinalization() { return true; }
+ TracingStatus NeedsTracing(NeedsTracingOption) {
+ return TracingStatus::Unneeded();
+ }
+ void Accept(EdgeVisitor* visitor) { visitor->VisitPersistent(this); }
+};
+
+class Collection : public Edge {
+ public:
+ typedef std::vector<Edge*> Members;
+ Collection(RecordInfo* info, bool on_heap, bool is_root)
+ : info_(info),
+ on_heap_(on_heap),
+ is_root_(is_root) {}
+ ~Collection() {
+ for (Members::iterator it = members_.begin(); it != members_.end(); ++it) {
+ assert(*it && "Collection-edge members must be non-null");
+ delete *it;
+ }
+ }
+ bool IsCollection() { return true; }
+ LivenessKind Kind() { return is_root_ ? kRoot : kStrong; }
+ bool on_heap() { return on_heap_; }
+ bool is_root() { return is_root_; }
+ Members& members() { return members_; }
+ void Accept(EdgeVisitor* visitor) { visitor->VisitCollection(this); }
+ void AcceptMembers(EdgeVisitor* visitor) {
+ for (Members::iterator it = members_.begin(); it != members_.end(); ++it)
+ (*it)->Accept(visitor);
+ }
+ bool NeedsFinalization();
+ TracingStatus NeedsTracing(NeedsTracingOption) {
+ if (is_root_)
+ return TracingStatus::Unneeded();
+ if (on_heap_)
+ return TracingStatus::Needed();
+ // For off-heap collections, determine tracing status of members.
+ TracingStatus status = TracingStatus::Unneeded();
+ for (Members::iterator it = members_.begin(); it != members_.end(); ++it) {
+ // Do a non-recursive test here since members could equal the holder.
+ status = status.LUB((*it)->NeedsTracing(kNonRecursive));
+ }
+ return status;
+ }
+
+ private:
+ RecordInfo* info_;
+ Members members_;
+ bool on_heap_;
+ bool is_root_;
+};
+
+#endif // TOOLS_BLINK_GC_PLUGIN_EDGE_H_
diff --git a/tools/clang/blink_gc_plugin/JsonWriter.h b/tools/clang/blink_gc_plugin/JsonWriter.h
new file mode 100644
index 0000000..54a87aa
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/JsonWriter.h
@@ -0,0 +1,73 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_BLINK_GC_PLUGIN_JSON_WRITER_H_
+#define TOOLS_BLINK_GC_PLUGIN_JSON_WRITER_H_
+
+#include "llvm/Support/raw_ostream.h"
+
+// Helper to write information for the points-to graph.
+class JsonWriter {
+ public:
+ static JsonWriter* from(llvm::raw_fd_ostream* os) {
+ return os ? new JsonWriter(os) : 0;
+ }
+ ~JsonWriter() {
+ os_.close();
+ }
+ void OpenList() {
+ Separator();
+ os_ << "[";
+ state_.push(false);
+ }
+ void OpenList(const std::string key) {
+ Write(key);
+ os_ << ":";
+ OpenList();
+ }
+ void CloseList() {
+ os_ << "]";
+ state_.pop();
+ }
+ void OpenObject() {
+ Separator();
+ os_ << "{";
+ state_.push(false);
+ }
+ void CloseObject() {
+ os_ << "}\n";
+ state_.pop();
+ }
+ void Write(const size_t val) {
+ Separator();
+ os_ << val;
+ }
+ void Write(const std::string val) {
+ Separator();
+ os_ << "\"" << val << "\"";
+ }
+ void Write(const std::string key, const size_t val) {
+ Separator();
+ os_ << "\"" << key << "\":" << val;
+ }
+ void Write(const std::string key, const std::string val) {
+ Separator();
+ os_ << "\"" << key << "\":\"" << val << "\"";
+ }
+ private:
+ JsonWriter(llvm::raw_fd_ostream* os) : os_(*os) {}
+ void Separator() {
+ if (state_.empty())
+ return;
+ if (state_.top()) {
+ os_ << ",";
+ return;
+ }
+ state_.top() = true;
+ }
+ llvm::raw_fd_ostream& os_;
+ std::stack<bool> state_;
+};
+
+#endif // TOOLS_BLINK_GC_PLUGIN_JSON_WRITER_H_
diff --git a/tools/clang/blink_gc_plugin/NeedsTracing.h b/tools/clang/blink_gc_plugin/NeedsTracing.h
new file mode 100644
index 0000000..cf4c2c1
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/NeedsTracing.h
@@ -0,0 +1,31 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// NeedsTracing is a three-point value ordered by unneeded < unknown < needed.
+// Unneeded means that the point definitively does not need to be traced.
+
+#ifndef TOOLS_BLINK_GC_PLUGIN_NEEDS_TRACING_H_
+#define TOOLS_BLINK_GC_PLUGIN_NEEDS_TRACING_H_
+
+class NeedsTracing {
+ public:
+ static NeedsTracing Unneeded() { return kUnneeded; }
+ static NeedsTracing Unknown() { return kUnknown; }
+ static NeedsTracing Needed() { return kNeeded; }
+ bool IsUnneeded() { return value_ == kUnneeded; }
+ bool IsUnknown() { return value_ == kUnknown; }
+ bool IsNeeded() { return value_ == kNeeded; }
+ NeedsTracing LUB(const NeedsTracing& other) {
+ return value_ > other.value_ ? value_ : other.value_;
+ }
+ bool operator==(const NeedsTracing& other) {
+ return value_ == other.value_;
+ }
+ private:
+ enum Value { kUnneeded, kUnknown, kNeeded };
+ NeedsTracing(Value value) : value_(value) {}
+ Value value_;
+};
+
+#endif // TOOLS_BLINK_GC_PLUGIN_NEEDS_TRACING_H_
diff --git a/tools/clang/blink_gc_plugin/OWNERS b/tools/clang/blink_gc_plugin/OWNERS
new file mode 100644
index 0000000..4fce218
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/OWNERS
@@ -0,0 +1,4 @@
+ager@chromium.org
+haraken@chromium.org
+tkent@chromium.org
+zerny@chromium.org
diff --git a/tools/clang/blink_gc_plugin/README.chromium b/tools/clang/blink_gc_plugin/README.chromium
new file mode 100644
index 0000000..294833c
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/README.chromium
@@ -0,0 +1,2 @@
+This clang plugin checks various invariants of the Blink garbage
+collection infrastructure.
diff --git a/tools/clang/blink_gc_plugin/RecordInfo.cpp b/tools/clang/blink_gc_plugin/RecordInfo.cpp
new file mode 100644
index 0000000..b491e47
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/RecordInfo.cpp
@@ -0,0 +1,555 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "Config.h"
+#include "RecordInfo.h"
+
+using namespace clang;
+using std::string;
+
+RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache)
+ : cache_(cache),
+ record_(record),
+ name_(record->getName()),
+ fields_need_tracing_(TracingStatus::Unknown()),
+ bases_(0),
+ fields_(0),
+ is_stack_allocated_(kNotComputed),
+ is_non_newable_(kNotComputed),
+ is_only_placement_newable_(kNotComputed),
+ does_need_finalization_(kNotComputed),
+ determined_trace_methods_(false),
+ trace_method_(0),
+ trace_dispatch_method_(0),
+ finalize_dispatch_method_(0),
+ is_gc_derived_(false),
+ base_paths_(0) {}
+
+RecordInfo::~RecordInfo() {
+ delete fields_;
+ delete bases_;
+ delete base_paths_;
+}
+
+// Get |count| number of template arguments. Returns false if there
+// are fewer than |count| arguments or any of the arguments are not
+// of a valid Type structure. If |count| is non-positive, all
+// arguments are collected.
+bool RecordInfo::GetTemplateArgs(size_t count, TemplateArgs* output_args) {
+ ClassTemplateSpecializationDecl* tmpl =
+ dyn_cast<ClassTemplateSpecializationDecl>(record_);
+ if (!tmpl)
+ return false;
+ const TemplateArgumentList& args = tmpl->getTemplateArgs();
+ if (args.size() < count)
+ return false;
+ if (count <= 0)
+ count = args.size();
+ for (unsigned i = 0; i < count; ++i) {
+ TemplateArgument arg = args[i];
+ if (arg.getKind() == TemplateArgument::Type && !arg.getAsType().isNull()) {
+ output_args->push_back(arg.getAsType().getTypePtr());
+ } else {
+ return false;
+ }
+ }
+ return true;
+}
+
+// Test if a record is a HeapAllocated collection.
+bool RecordInfo::IsHeapAllocatedCollection() {
+ if (!Config::IsGCCollection(name_) && !Config::IsWTFCollection(name_))
+ return false;
+
+ TemplateArgs args;
+ if (GetTemplateArgs(0, &args)) {
+ for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
+ if (CXXRecordDecl* decl = (*it)->getAsCXXRecordDecl())
+ if (decl->getName() == kHeapAllocatorName)
+ return true;
+ }
+ }
+
+ return Config::IsGCCollection(name_);
+}
+
+static bool IsGCBaseCallback(const CXXBaseSpecifier* specifier,
+ CXXBasePath& path,
+ void* data) {
+ if (CXXRecordDecl* record = specifier->getType()->getAsCXXRecordDecl())
+ return Config::IsGCBase(record->getName());
+ return false;
+}
+
+// Test if a record is derived from a garbage collected base.
+bool RecordInfo::IsGCDerived() {
+ // If already computed, return the known result.
+ if (base_paths_)
+ return is_gc_derived_;
+
+ base_paths_ = new CXXBasePaths(true, true, false);
+
+ if (!record_->hasDefinition())
+ return false;
+
+ // The base classes are not themselves considered garbage collected objects.
+ if (Config::IsGCBase(name_))
+ return false;
+
+ // Walk the inheritance tree to find GC base classes.
+ is_gc_derived_ = record_->lookupInBases(IsGCBaseCallback, 0, *base_paths_);
+ return is_gc_derived_;
+}
+
+bool RecordInfo::IsGCFinalized() {
+ if (!IsGCDerived())
+ return false;
+ for (CXXBasePaths::paths_iterator it = base_paths_->begin();
+ it != base_paths_->end();
+ ++it) {
+ const CXXBasePathElement& elem = (*it)[it->size() - 1];
+ CXXRecordDecl* base = elem.Base->getType()->getAsCXXRecordDecl();
+ if (Config::IsGCFinalizedBase(base->getName()))
+ return true;
+ }
+ return false;
+}
+
+// A GC mixin is a class that inherits from a GC mixin base and has
+// not yet been "mixed in" with another GC base class.
+bool RecordInfo::IsGCMixin() {
+ if (!IsGCDerived() || base_paths_->begin() == base_paths_->end())
+ return false;
+ for (CXXBasePaths::paths_iterator it = base_paths_->begin();
+ it != base_paths_->end();
+ ++it) {
+ // Get the last element of the path.
+ const CXXBasePathElement& elem = (*it)[it->size() - 1];
+ CXXRecordDecl* base = elem.Base->getType()->getAsCXXRecordDecl();
+ // If it is not a mixin base we are done.
+ if (!Config::IsGCMixinBase(base->getName()))
+ return false;
+ }
+ // This is a mixin if all GC bases are mixins.
+ return true;
+}
+
+// Test if a record is allocated on the managed heap.
+bool RecordInfo::IsGCAllocated() {
+ return IsGCDerived() || IsHeapAllocatedCollection();
+}
+
+RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
+ // Ignore classes annotated with the GC_PLUGIN_IGNORE macro.
+ if (!record || Config::IsIgnoreAnnotated(record))
+ return 0;
+ Cache::iterator it = cache_.find(record);
+ if (it != cache_.end())
+ return &it->second;
+ return &cache_.insert(std::make_pair(record, RecordInfo(record, this)))
+ .first->second;
+}
+
+bool RecordInfo::IsStackAllocated() {
+ if (is_stack_allocated_ == kNotComputed) {
+ is_stack_allocated_ = kFalse;
+ for (Bases::iterator it = GetBases().begin();
+ it != GetBases().end();
+ ++it) {
+ if (it->second.info()->IsStackAllocated()) {
+ is_stack_allocated_ = kTrue;
+ return is_stack_allocated_;
+ }
+ }
+ for (CXXRecordDecl::method_iterator it = record_->method_begin();
+ it != record_->method_end();
+ ++it) {
+ if (it->getNameAsString() == kNewOperatorName &&
+ it->isDeleted() &&
+ Config::IsStackAnnotated(*it)) {
+ is_stack_allocated_ = kTrue;
+ return is_stack_allocated_;
+ }
+ }
+ }
+ return is_stack_allocated_;
+}
+
+bool RecordInfo::IsNonNewable() {
+ if (is_non_newable_ == kNotComputed) {
+ bool deleted = false;
+ bool all_deleted = true;
+ for (CXXRecordDecl::method_iterator it = record_->method_begin();
+ it != record_->method_end();
+ ++it) {
+ if (it->getNameAsString() == kNewOperatorName) {
+ deleted = it->isDeleted();
+ all_deleted = all_deleted && deleted;
+ }
+ }
+ is_non_newable_ = (deleted && all_deleted) ? kTrue : kFalse;
+ }
+ return is_non_newable_;
+}
+
+bool RecordInfo::IsOnlyPlacementNewable() {
+ if (is_only_placement_newable_ == kNotComputed) {
+ bool placement = false;
+ bool new_deleted = false;
+ for (CXXRecordDecl::method_iterator it = record_->method_begin();
+ it != record_->method_end();
+ ++it) {
+ if (it->getNameAsString() == kNewOperatorName) {
+ if (it->getNumParams() == 1) {
+ new_deleted = it->isDeleted();
+ } else if (it->getNumParams() == 2) {
+ placement = !it->isDeleted();
+ }
+ }
+ }
+ is_only_placement_newable_ = (placement && new_deleted) ? kTrue : kFalse;
+ }
+ return is_only_placement_newable_;
+}
+
+CXXMethodDecl* RecordInfo::DeclaresNewOperator() {
+ for (CXXRecordDecl::method_iterator it = record_->method_begin();
+ it != record_->method_end();
+ ++it) {
+ if (it->getNameAsString() == kNewOperatorName && it->getNumParams() == 1)
+ return *it;
+ }
+ return 0;
+}
+
+// An object requires a tracing method if it has any fields that need tracing
+// or if it inherits from multiple bases that need tracing.
+bool RecordInfo::RequiresTraceMethod() {
+ if (IsStackAllocated())
+ return false;
+ unsigned bases_with_trace = 0;
+ for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
+ if (it->second.NeedsTracing().IsNeeded())
+ ++bases_with_trace;
+ }
+ if (bases_with_trace > 1)
+ return true;
+ GetFields();
+ return fields_need_tracing_.IsNeeded();
+}
+
+// Get the actual tracing method (ie, can be traceAfterDispatch if there is a
+// dispatch method).
+CXXMethodDecl* RecordInfo::GetTraceMethod() {
+ DetermineTracingMethods();
+ return trace_method_;
+}
+
+// Get the static trace dispatch method.
+CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() {
+ DetermineTracingMethods();
+ return trace_dispatch_method_;
+}
+
+CXXMethodDecl* RecordInfo::GetFinalizeDispatchMethod() {
+ DetermineTracingMethods();
+ return finalize_dispatch_method_;
+}
+
+RecordInfo::Bases& RecordInfo::GetBases() {
+ if (!bases_)
+ bases_ = CollectBases();
+ return *bases_;
+}
+
+bool RecordInfo::InheritsTrace() {
+ if (GetTraceMethod())
+ return true;
+ for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
+ if (it->second.info()->InheritsTrace())
+ return true;
+ }
+ return false;
+}
+
+CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() {
+ if (CXXMethodDecl* trace = GetTraceMethod())
+ return trace->isVirtual() ? 0 : trace;
+ for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
+ if (CXXMethodDecl* trace = it->second.info()->InheritsNonVirtualTrace())
+ return trace;
+ }
+ return 0;
+}
+
+// A (non-virtual) class is considered abstract in Blink if it has
+// no public constructors and no create methods.
+bool RecordInfo::IsConsideredAbstract() {
+ for (CXXRecordDecl::ctor_iterator it = record_->ctor_begin();
+ it != record_->ctor_end();
+ ++it) {
+ if (!it->isCopyOrMoveConstructor() && it->getAccess() == AS_public)
+ return false;
+ }
+ for (CXXRecordDecl::method_iterator it = record_->method_begin();
+ it != record_->method_end();
+ ++it) {
+ if (it->getNameAsString() == kCreateName)
+ return false;
+ }
+ return true;
+}
+
+RecordInfo::Bases* RecordInfo::CollectBases() {
+ // Compute the collection locally to avoid inconsistent states.
+ Bases* bases = new Bases;
+ if (!record_->hasDefinition())
+ return bases;
+ for (CXXRecordDecl::base_class_iterator it = record_->bases_begin();
+ it != record_->bases_end();
+ ++it) {
+ const CXXBaseSpecifier& spec = *it;
+ RecordInfo* info = cache_->Lookup(spec.getType());
+ if (!info)
+ continue;
+ CXXRecordDecl* base = info->record();
+ TracingStatus status = info->InheritsTrace()
+ ? TracingStatus::Needed()
+ : TracingStatus::Unneeded();
+ bases->insert(std::make_pair(base, BasePoint(spec, info, status)));
+ }
+ return bases;
+}
+
+RecordInfo::Fields& RecordInfo::GetFields() {
+ if (!fields_)
+ fields_ = CollectFields();
+ return *fields_;
+}
+
+RecordInfo::Fields* RecordInfo::CollectFields() {
+ // Compute the collection locally to avoid inconsistent states.
+ Fields* fields = new Fields;
+ if (!record_->hasDefinition())
+ return fields;
+ TracingStatus fields_status = TracingStatus::Unneeded();
+ for (RecordDecl::field_iterator it = record_->field_begin();
+ it != record_->field_end();
+ ++it) {
+ FieldDecl* field = *it;
+ // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
+ if (Config::IsIgnoreAnnotated(field))
+ continue;
+ if (Edge* edge = CreateEdge(field->getType().getTypePtrOrNull())) {
+ fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
+ fields->insert(std::make_pair(field, FieldPoint(field, edge)));
+ }
+ }
+ fields_need_tracing_ = fields_status;
+ return fields;
+}
+
+void RecordInfo::DetermineTracingMethods() {
+ if (determined_trace_methods_)
+ return;
+ determined_trace_methods_ = true;
+ if (Config::IsGCBase(name_))
+ return;
+ CXXMethodDecl* trace = 0;
+ CXXMethodDecl* traceAfterDispatch = 0;
+ bool isTraceAfterDispatch;
+ for (CXXRecordDecl::method_iterator it = record_->method_begin();
+ it != record_->method_end();
+ ++it) {
+ if (Config::IsTraceMethod(*it, &isTraceAfterDispatch)) {
+ if (isTraceAfterDispatch) {
+ traceAfterDispatch = *it;
+ } else {
+ trace = *it;
+ }
+ } else if (it->getNameAsString() == kFinalizeName) {
+ finalize_dispatch_method_ = *it;
+ }
+ }
+ if (traceAfterDispatch) {
+ trace_method_ = traceAfterDispatch;
+ trace_dispatch_method_ = trace;
+ } else {
+ // TODO: Can we never have a dispatch method called trace without the same
+ // class defining a traceAfterDispatch method?
+ trace_method_ = trace;
+ trace_dispatch_method_ = 0;
+ }
+ if (trace_dispatch_method_ && finalize_dispatch_method_)
+ return;
+ // If this class does not define dispatching methods inherit them.
+ for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
+ // TODO: Does it make sense to inherit multiple dispatch methods?
+ if (CXXMethodDecl* dispatch = it->second.info()->GetTraceDispatchMethod()) {
+ assert(!trace_dispatch_method_ && "Multiple trace dispatching methods");
+ trace_dispatch_method_ = dispatch;
+ }
+ if (CXXMethodDecl* dispatch =
+ it->second.info()->GetFinalizeDispatchMethod()) {
+ assert(!finalize_dispatch_method_ &&
+ "Multiple finalize dispatching methods");
+ finalize_dispatch_method_ = dispatch;
+ }
+ }
+}
+
+// TODO: Add classes with a finalize() method that specialize FinalizerTrait.
+bool RecordInfo::NeedsFinalization() {
+ if (does_need_finalization_ == kNotComputed) {
+ // Rely on hasNonTrivialDestructor(), but if the only
+ // identifiable reason for it being true is the presence
+ // of a safely ignorable class as a direct base,
+ // or we're processing such an 'ignorable' class, then it does
+ // not need finalization.
+ does_need_finalization_ =
+ record_->hasNonTrivialDestructor() ? kTrue : kFalse;
+ if (!does_need_finalization_)
+ return does_need_finalization_;
+
+ // Processing a class with a safely-ignorable destructor.
+ NamespaceDecl* ns =
+ dyn_cast<NamespaceDecl>(record_->getDeclContext());
+ if (ns && Config::HasIgnorableDestructor(ns->getName(), name_)) {
+ does_need_finalization_ = kFalse;
+ return does_need_finalization_;
+ }
+
+ CXXDestructorDecl* dtor = record_->getDestructor();
+ if (dtor && dtor->isUserProvided())
+ return does_need_finalization_;
+ for (Fields::iterator it = GetFields().begin();
+ it != GetFields().end();
+ ++it) {
+ if (it->second.edge()->NeedsFinalization())
+ return does_need_finalization_;
+ }
+
+ for (Bases::iterator it = GetBases().begin();
+ it != GetBases().end();
+ ++it) {
+ if (it->second.info()->NeedsFinalization())
+ return does_need_finalization_;
+ }
+ // Destructor was non-trivial due to bases with destructors that
+ // can be safely ignored. Hence, no need for finalization.
+ does_need_finalization_ = kFalse;
+ }
+ return does_need_finalization_;
+}
+
+// A class needs tracing if:
+// - it is allocated on the managed heap,
+// - it is derived from a class that needs tracing, or
+// - it contains fields that need tracing.
+// TODO: Defining NeedsTracing based on whether a class defines a trace method
+// (of the proper signature) over approximates too much. The use of transition
+// types causes some classes to have trace methods without them needing to be
+// traced.
+TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) {
+ if (IsGCAllocated())
+ return TracingStatus::Needed();
+
+ if (IsStackAllocated())
+ return TracingStatus::Unneeded();
+
+ for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
+ if (it->second.info()->NeedsTracing(option).IsNeeded())
+ return TracingStatus::Needed();
+ }
+
+ if (option == Edge::kRecursive)
+ GetFields();
+
+ return fields_need_tracing_;
+}
+
+Edge* RecordInfo::CreateEdge(const Type* type) {
+ if (!type) {
+ return 0;
+ }
+
+ if (type->isPointerType()) {
+ if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
+ return new RawPtr(ptr, false);
+ return 0;
+ }
+
+ RecordInfo* info = cache_->Lookup(type);
+
+ // If the type is neither a pointer or a C++ record we ignore it.
+ if (!info) {
+ return 0;
+ }
+
+ TemplateArgs args;
+
+ if (Config::IsRawPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
+ if (Edge* ptr = CreateEdge(args[0]))
+ return new RawPtr(ptr, true);
+ return 0;
+ }
+
+ if (Config::IsRefPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
+ if (Edge* ptr = CreateEdge(args[0]))
+ return new RefPtr(ptr);
+ return 0;
+ }
+
+ if (Config::IsOwnPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
+ if (Edge* ptr = CreateEdge(args[0]))
+ return new OwnPtr(ptr);
+ return 0;
+ }
+
+ if (Config::IsMember(info->name()) && info->GetTemplateArgs(1, &args)) {
+ if (Edge* ptr = CreateEdge(args[0]))
+ return new Member(ptr);
+ return 0;
+ }
+
+ if (Config::IsWeakMember(info->name()) && info->GetTemplateArgs(1, &args)) {
+ if (Edge* ptr = CreateEdge(args[0]))
+ return new WeakMember(ptr);
+ return 0;
+ }
+
+ if (Config::IsPersistent(info->name())) {
+ // Persistent might refer to v8::Persistent, so check the name space.
+ // TODO: Consider using a more canonical identification than names.
+ NamespaceDecl* ns =
+ dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
+ if (!ns || ns->getName() != "blink")
+ return 0;
+ if (!info->GetTemplateArgs(1, &args))
+ return 0;
+ if (Edge* ptr = CreateEdge(args[0]))
+ return new Persistent(ptr);
+ return 0;
+ }
+
+ if (Config::IsGCCollection(info->name()) ||
+ Config::IsWTFCollection(info->name())) {
+ bool is_root = Config::IsPersistentGCCollection(info->name());
+ bool on_heap = is_root || info->IsHeapAllocatedCollection();
+ size_t count = Config::CollectionDimension(info->name());
+ if (!info->GetTemplateArgs(count, &args))
+ return 0;
+ Collection* edge = new Collection(info, on_heap, is_root);
+ for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
+ if (Edge* member = CreateEdge(*it)) {
+ edge->members().push_back(member);
+ }
+ // TODO: Handle the case where we fail to create an edge (eg, if the
+ // argument is a primitive type or just not fully known yet).
+ }
+ return edge;
+ }
+
+ return new Value(info);
+}
diff --git a/tools/clang/blink_gc_plugin/RecordInfo.h b/tools/clang/blink_gc_plugin/RecordInfo.h
new file mode 100644
index 0000000..7dd9a4e
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/RecordInfo.h
@@ -0,0 +1,174 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file provides a wrapper for CXXRecordDecl that accumulates GC related
+// information about a class. Accumulated information is memoized and the info
+// objects are stored in a RecordCache.
+
+#ifndef TOOLS_BLINK_GC_PLUGIN_RECORD_INFO_H_
+#define TOOLS_BLINK_GC_PLUGIN_RECORD_INFO_H_
+
+#include <map>
+#include <vector>
+
+#include "Edge.h"
+
+#include "clang/AST/AST.h"
+#include "clang/AST/CXXInheritance.h"
+
+class RecordCache;
+
+// A potentially tracable and/or lifetime affecting point in the object graph.
+class GraphPoint {
+ public:
+ GraphPoint() : traced_(false) {}
+ void MarkTraced() { traced_ = true; }
+ bool IsProperlyTraced() { return traced_ || !NeedsTracing().IsNeeded(); }
+ virtual const TracingStatus NeedsTracing() = 0;
+
+ private:
+ bool traced_;
+};
+
+class BasePoint : public GraphPoint {
+ public:
+ BasePoint(const clang::CXXBaseSpecifier& spec,
+ RecordInfo* info,
+ const TracingStatus& status)
+ : spec_(spec), info_(info), status_(status) {}
+ const TracingStatus NeedsTracing() { return status_; }
+ const clang::CXXBaseSpecifier& spec() { return spec_; }
+ RecordInfo* info() { return info_; }
+
+ private:
+ const clang::CXXBaseSpecifier& spec_;
+ RecordInfo* info_;
+ TracingStatus status_;
+};
+
+class FieldPoint : public GraphPoint {
+ public:
+ FieldPoint(clang::FieldDecl* field, Edge* edge)
+ : field_(field), edge_(edge) {}
+ const TracingStatus NeedsTracing() {
+ return edge_->NeedsTracing(Edge::kRecursive);
+ }
+ clang::FieldDecl* field() { return field_; }
+ Edge* edge() { return edge_; }
+
+ private:
+ clang::FieldDecl* field_;
+ Edge* edge_;
+
+ friend class RecordCache;
+ void deleteEdge() { delete edge_; }
+};
+
+// Wrapper class to lazily collect information about a C++ record.
+class RecordInfo {
+ public:
+ typedef std::map<clang::CXXRecordDecl*, BasePoint> Bases;
+ typedef std::map<clang::FieldDecl*, FieldPoint> Fields;
+ typedef std::vector<const clang::Type*> TemplateArgs;
+
+ ~RecordInfo();
+
+ clang::CXXRecordDecl* record() const { return record_; }
+ const std::string& name() const { return name_; }
+ Fields& GetFields();
+ Bases& GetBases();
+ clang::CXXMethodDecl* GetTraceMethod();
+ clang::CXXMethodDecl* GetTraceDispatchMethod();
+ clang::CXXMethodDecl* GetFinalizeDispatchMethod();
+
+ bool GetTemplateArgs(size_t count, TemplateArgs* output_args);
+
+ bool IsHeapAllocatedCollection();
+ bool IsGCDerived();
+ bool IsGCAllocated();
+ bool IsGCFinalized();
+ bool IsGCMixin();
+ bool IsStackAllocated();
+ bool IsNonNewable();
+ bool IsOnlyPlacementNewable();
+ clang::CXXMethodDecl* DeclaresNewOperator();
+
+ bool RequiresTraceMethod();
+ bool NeedsFinalization();
+ TracingStatus NeedsTracing(Edge::NeedsTracingOption);
+ clang::CXXMethodDecl* InheritsNonVirtualTrace();
+ bool IsConsideredAbstract();
+
+ private:
+ RecordInfo(clang::CXXRecordDecl* record, RecordCache* cache);
+
+ Fields* CollectFields();
+ Bases* CollectBases();
+ void DetermineTracingMethods();
+ bool InheritsTrace();
+
+ Edge* CreateEdge(const clang::Type* type);
+
+ RecordCache* cache_;
+ clang::CXXRecordDecl* record_;
+ const std::string name_;
+ TracingStatus fields_need_tracing_;
+ Bases* bases_;
+ Fields* fields_;
+
+ enum CachedBool { kFalse = 0, kTrue = 1, kNotComputed = 2 };
+ CachedBool is_stack_allocated_;
+ CachedBool is_non_newable_;
+ CachedBool is_only_placement_newable_;
+ CachedBool does_need_finalization_;
+
+ bool determined_trace_methods_;
+ clang::CXXMethodDecl* trace_method_;
+ clang::CXXMethodDecl* trace_dispatch_method_;
+ clang::CXXMethodDecl* finalize_dispatch_method_;
+
+ bool is_gc_derived_;
+ clang::CXXBasePaths* base_paths_;
+
+ friend class RecordCache;
+};
+
+class RecordCache {
+ public:
+ RecordInfo* Lookup(clang::CXXRecordDecl* record);
+
+ RecordInfo* Lookup(const clang::CXXRecordDecl* record) {
+ return Lookup(const_cast<clang::CXXRecordDecl*>(record));
+ }
+
+ RecordInfo* Lookup(clang::DeclContext* decl) {
+ return Lookup(clang::dyn_cast<clang::CXXRecordDecl>(decl));
+ }
+
+ RecordInfo* Lookup(const clang::Type* type) {
+ return Lookup(type->getAsCXXRecordDecl());
+ }
+
+ RecordInfo* Lookup(const clang::QualType& type) {
+ return Lookup(type.getTypePtr());
+ }
+
+ ~RecordCache() {
+ for (Cache::iterator it = cache_.begin(); it != cache_.end(); ++it) {
+ if (!it->second.fields_)
+ continue;
+ for (RecordInfo::Fields::iterator fit = it->second.fields_->begin();
+ fit != it->second.fields_->end();
+ ++fit) {
+ fit->second.deleteEdge();
+ }
+ }
+ }
+
+ private:
+ typedef std::map<clang::CXXRecordDecl*, RecordInfo> Cache;
+ Cache cache_;
+};
+
+#endif // TOOLS_BLINK_GC_PLUGIN_RECORD_INFO_H_
diff --git a/tools/clang/blink_gc_plugin/TracingStatus.h b/tools/clang/blink_gc_plugin/TracingStatus.h
new file mode 100644
index 0000000..9eb1080
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/TracingStatus.h
@@ -0,0 +1,29 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_BLINK_GC_PLUGIN_TRACING_STATUS_H_
+#define TOOLS_BLINK_GC_PLUGIN_TRACING_STATUS_H_
+
+// TracingStatus is a three-point value ordered by unneeded < unknown < needed.
+class TracingStatus {
+ public:
+ static TracingStatus Unneeded() { return kUnneeded; }
+ static TracingStatus Unknown() { return kUnknown; }
+ static TracingStatus Needed() { return kNeeded; }
+ bool IsUnneeded() const { return status_ == kUnneeded; }
+ bool IsUnknown() const { return status_ == kUnknown; }
+ bool IsNeeded() const { return status_ == kNeeded; }
+ TracingStatus LUB(const TracingStatus& other) const {
+ return status_ > other.status_ ? status_ : other.status_;
+ }
+ bool operator==(const TracingStatus& other) const {
+ return status_ == other.status_;
+ }
+ private:
+ enum Status { kUnneeded, kUnknown, kNeeded };
+ TracingStatus(Status status) : status_(status) {}
+ Status status_;
+};
+
+#endif // TOOLS_BLINK_GC_PLUGIN_TRACING_STATUS_H_
diff --git a/tools/clang/blink_gc_plugin/process-graph.py b/tools/clang/blink_gc_plugin/process-graph.py
new file mode 100755
index 0000000..b4fb1e6
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/process-graph.py
@@ -0,0 +1,464 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse, os, sys, json, subprocess, pickle, StringIO
+
+parser = argparse.ArgumentParser(
+ description =
+ "Process the Blink points-to graph generated by the Blink GC plugin.")
+
+parser.add_argument(
+ '-', dest='use_stdin', action='store_true',
+ help='Read JSON graph files from stdin')
+
+parser.add_argument(
+ '-c', '--detect-cycles', action='store_true',
+ help='Detect cycles containing GC roots')
+
+parser.add_argument(
+ '-s', '--print-stats', action='store_true',
+ help='Statistics about ref-counted and traced objects')
+
+parser.add_argument(
+ '-v', '--verbose', action='store_true',
+ help='Verbose output')
+
+parser.add_argument(
+ '--ignore-cycles', default=None, metavar='FILE',
+ help='File with cycles to ignore')
+
+parser.add_argument(
+ '--ignore-classes', nargs='*', default=[], metavar='CLASS',
+ help='Classes to ignore when detecting cycles')
+
+parser.add_argument(
+ '--pickle-graph', default=None, metavar='FILE',
+ help='File to read/save the graph from/to')
+
+parser.add_argument(
+ 'files', metavar='FILE_OR_DIR', nargs='*', default=[],
+ help='JSON graph files or directories containing them')
+
+# Command line args after parsing.
+args = None
+
+# Map from node labels to nodes.
+graph = {}
+
+# Set of root nodes.
+roots = []
+
+# List of cycles to ignore.
+ignored_cycles = []
+
+# Global flag to determine exit code.
+global_reported_error = False
+
+def set_reported_error(value):
+ global global_reported_error
+ global_reported_error = value
+
+def reported_error():
+ return global_reported_error
+
+def log(msg):
+ if args.verbose:
+ print msg
+
+global_inc_copy = 0
+def inc_copy():
+ global global_inc_copy
+ global_inc_copy += 1
+
+def get_node(name):
+ return graph.setdefault(name, Node(name))
+
+ptr_types = ('raw', 'ref', 'mem')
+
+def inc_ptr(dst, ptr):
+ if ptr in ptr_types:
+ node = graph.get(dst)
+ if not node: return
+ node.counts[ptr] += 1
+
+def add_counts(s1, s2):
+ for (k, v) in s2.iteritems():
+ s1[k] += s2[k]
+
+# Representation of graph nodes. Basically a map of directed edges.
+class Node:
+ def __init__(self, name):
+ self.name = name
+ self.edges = {}
+ self.reset()
+ def __repr__(self):
+ return "%s(%s) %s" % (self.name, self.visited, self.edges)
+ def update_node(self, decl):
+ # Currently we don't track any node info besides its edges.
+ pass
+ def update_edge(self, e):
+ new_edge = Edge(**e)
+ edge = self.edges.get(new_edge.key)
+ if edge:
+ # If an edge exist, its kind is the strongest of the two.
+ edge.kind = max(edge.kind, new_edge.kind)
+ else:
+ self.edges[new_edge.key] = new_edge
+ def super_edges(self):
+ return [ e for e in self.edges.itervalues() if e.is_super() ]
+ def subclass_edges(self):
+ return [ e for e in self.edges.itervalues() if e.is_subclass() ]
+ def reset(self):
+ self.cost = sys.maxint
+ self.visited = False
+ self.path = None
+ self.counts = {}
+ for ptr in ptr_types:
+ self.counts[ptr] = 0
+ def update_counts(self):
+ for e in self.edges.itervalues():
+ inc_ptr(e.dst, e.ptr)
+
+# Representation of directed graph edges.
+class Edge:
+ def __init__(self, **decl):
+ self.src = decl['src']
+ self.dst = decl['dst']
+ self.lbl = decl['lbl']
+ self.ptr = decl['ptr']
+ self.kind = decl['kind'] # 0 = weak, 1 = strong, 2 = root
+ self.loc = decl['loc']
+ # The label does not uniquely determine an edge from a node. We
+ # define the semi-unique key to be the concatenation of the
+ # label and dst name. This is sufficient to track the strongest
+ # edge to a particular type. For example, if the field A::m_f
+ # has type HashMap<WeakMember<B>, Member<B>> we will have a
+ # strong edge with key m_f#B from A to B.
+ self.key = '%s#%s' % (self.lbl, self.dst)
+ def __repr__(self):
+ return '%s (%s) => %s' % (self.src, self.lbl, self.dst)
+ def is_root(self):
+ return self.kind == 2
+ def is_weak(self):
+ return self.kind == 0
+ def keeps_alive(self):
+ return self.kind > 0
+ def is_subclass(self):
+ return self.lbl.startswith('<subclass>')
+ def is_super(self):
+ return self.lbl.startswith('<super>')
+
+def parse_file(filename):
+ obj = json.load(open(filename))
+ return obj
+
+def build_graphs_in_dir(dirname):
+ # TODO: Use plateform independent code, eg, os.walk
+ files = subprocess.check_output(
+ ['find', dirname, '-name', '*.graph.json']).split('\n')
+ log("Found %d files" % len(files))
+ for f in files:
+ f.strip()
+ if len(f) < 1:
+ continue
+ build_graph(f)
+
+def build_graph(filename):
+ for decl in parse_file(filename):
+ if decl.has_key('name'):
+ # Add/update a node entry
+ name = decl['name']
+ node = get_node(name)
+ node.update_node(decl)
+ else:
+ # Add/update an edge entry
+ name = decl['src']
+ node = get_node(name)
+ node.update_edge(decl)
+
+# Copy all non-weak edges from super classes to their subclasses.
+# This causes all fields of a super to be considered fields of a
+# derived class without tranitively relating derived classes with
+# each other. For example, if B <: A, C <: A, and for some D, D => B,
+# we don't want that to entail that D => C.
+def copy_super_edges(edge):
+ if edge.is_weak() or not edge.is_super():
+ return
+ inc_copy()
+ # Make the super-class edge weak (prohibits processing twice).
+ edge.kind = 0
+ # If the super class is not in our graph exit early.
+ super_node = graph.get(edge.dst)
+ if super_node is None: return
+ # Recursively copy all super-class edges.
+ for e in super_node.super_edges():
+ copy_super_edges(e)
+ # Copy strong super-class edges (ignoring sub-class edges) to the sub class.
+ sub_node = graph[edge.src]
+ for e in super_node.edges.itervalues():
+ if e.keeps_alive() and not e.is_subclass():
+ new_edge = Edge(
+ src = sub_node.name,
+ dst = e.dst,
+ lbl = '%s <: %s' % (super_node.name, e.lbl),
+ ptr = e.ptr,
+ kind = e.kind,
+ loc = e.loc,
+ )
+ sub_node.edges[new_edge.key] = new_edge
+ # Add a strong sub-class edge.
+ sub_edge = Edge(
+ src = super_node.name,
+ dst = sub_node.name,
+ lbl = '<subclass>',
+ ptr = edge.ptr,
+ kind = 1,
+ loc = edge.loc,
+ )
+ super_node.edges[sub_edge.key] = sub_edge
+
+def complete_graph():
+ for node in graph.itervalues():
+ for edge in node.super_edges():
+ copy_super_edges(edge)
+ for edge in node.edges.itervalues():
+ if edge.is_root():
+ roots.append(edge)
+ log("Copied edges down <super> edges for %d graph nodes" % global_inc_copy)
+
+def reset_graph():
+ for n in graph.itervalues():
+ n.reset()
+
+def shortest_path(start, end):
+ start.cost = 0
+ minlist = [start]
+ while len(minlist) > 0:
+ minlist.sort(key=lambda n: -n.cost)
+ current = minlist.pop()
+ current.visited = True
+ if current == end or current.cost >= end.cost + 1:
+ return
+ for e in current.edges.itervalues():
+ if not e.keeps_alive():
+ continue
+ dst = graph.get(e.dst)
+ if dst is None or dst.visited:
+ continue
+ if current.cost < dst.cost:
+ dst.cost = current.cost + 1
+ dst.path = e
+ minlist.append(dst)
+
+def detect_cycles():
+ for root_edge in roots:
+ reset_graph()
+ # Mark ignored classes as already visited
+ for ignore in args.ignore_classes:
+ name = ignore.find("::") > 0 and ignore or ("blink::" + ignore)
+ node = graph.get(name)
+ if node:
+ node.visited = True
+ src = graph[root_edge.src]
+ dst = graph.get(root_edge.dst)
+ if src.visited:
+ continue
+ if root_edge.dst == "WTF::String":
+ continue
+ if dst is None:
+ print "\nPersistent root to incomplete destination object:"
+ print root_edge
+ set_reported_error(True)
+ continue
+ # Find the shortest path from the root target (dst) to its host (src)
+ shortest_path(dst, src)
+ if src.cost < sys.maxint:
+ report_cycle(root_edge)
+
+def is_ignored_cycle(cycle):
+ for block in ignored_cycles:
+ if block_match(cycle, block):
+ return True
+
+def block_match(b1, b2):
+ if len(b1) != len(b2):
+ return False
+ for (l1, l2) in zip(b1, b2):
+ if l1 != l2:
+ return False
+ return True
+
+def report_cycle(root_edge):
+ dst = graph[root_edge.dst]
+ path = []
+ edge = root_edge
+ dst.path = None
+ while edge:
+ path.append(edge)
+ edge = graph[edge.src].path
+ path.append(root_edge)
+ path.reverse()
+ # Find the max loc length for pretty printing.
+ max_loc = 0
+ for p in path:
+ if len(p.loc) > max_loc:
+ max_loc = len(p.loc)
+ out = StringIO.StringIO()
+ for p in path[:-1]:
+ print >>out, (p.loc + ':').ljust(max_loc + 1), p
+ sout = out.getvalue()
+ if not is_ignored_cycle(sout):
+ print "\nFound a potentially leaking cycle starting from a GC root:\n", sout
+ set_reported_error(True)
+
+def load_graph():
+ global graph
+ global roots
+ log("Reading graph from pickled file: " + args.pickle_graph)
+ dump = pickle.load(open(args.pickle_graph, 'rb'))
+ graph = dump[0]
+ roots = dump[1]
+
+def save_graph():
+ log("Saving graph to pickle file: " + args.pickle_graph)
+ dump = (graph, roots)
+ pickle.dump(dump, open(args.pickle_graph, 'wb'))
+
+def read_ignored_cycles():
+ global ignored_cycles
+ if not args.ignore_cycles:
+ return
+ log("Reading ignored cycles from file: " + args.ignore_cycles)
+ block = []
+ for l in open(args.ignore_cycles):
+ line = l.strip()
+ if not line or line.startswith('Found'):
+ if len(block) > 0:
+ ignored_cycles.append(block)
+ block = []
+ else:
+ block += l
+ if len(block) > 0:
+ ignored_cycles.append(block)
+
+gc_bases = (
+ 'blink::GarbageCollected',
+ 'blink::GarbageCollectedFinalized',
+ 'blink::GarbageCollectedMixin',
+)
+ref_bases = (
+ 'WTF::RefCounted',
+ 'WTF::ThreadSafeRefCounted',
+)
+gcref_bases = (
+ 'blink::RefCountedGarbageCollected',
+ 'blink::ThreadSafeRefCountedGarbageCollected',
+)
+ref_mixins = (
+ 'blink::EventTarget',
+ 'blink::EventTargetWithInlineData',
+ 'blink::ActiveDOMObject',
+)
+
+def print_stats():
+ gcref_managed = []
+ ref_managed = []
+ gc_managed = []
+ hierarchies = []
+
+ for node in graph.itervalues():
+ node.update_counts()
+ for sup in node.super_edges():
+ if sup.dst in gcref_bases:
+ gcref_managed.append(node)
+ elif sup.dst in ref_bases:
+ ref_managed.append(node)
+ elif sup.dst in gc_bases:
+ gc_managed.append(node)
+
+ groups = [("GC manged ", gc_managed),
+ ("ref counted ", ref_managed),
+ ("in transition", gcref_managed)]
+ total = sum([len(g) for (s,g) in groups])
+ for (s, g) in groups:
+ percent = len(g) * 100 / total
+ print "%2d%% is %s (%d hierarchies)" % (percent, s, len(g))
+
+ for base in gcref_managed:
+ stats = dict({ 'classes': 0, 'ref-mixins': 0 })
+ for ptr in ptr_types: stats[ptr] = 0
+ hierarchy_stats(base, stats)
+ hierarchies.append((base, stats))
+
+ print "\nHierarchies in transition (RefCountedGarbageCollected):"
+ hierarchies.sort(key=lambda (n,s): -s['classes'])
+ for (node, stats) in hierarchies:
+ total = stats['mem'] + stats['ref'] + stats['raw']
+ print (
+ "%s %3d%% of %-30s: %3d cls, %3d mem, %3d ref, %3d raw, %3d ref-mixins" %
+ (stats['ref'] == 0 and stats['ref-mixins'] == 0 and "*" or " ",
+ total == 0 and 100 or stats['mem'] * 100 / total,
+ node.name.replace('blink::', ''),
+ stats['classes'],
+ stats['mem'],
+ stats['ref'],
+ stats['raw'],
+ stats['ref-mixins'],
+ ))
+
+def hierarchy_stats(node, stats):
+ if not node: return
+ stats['classes'] += 1
+ add_counts(stats, node.counts)
+ for edge in node.super_edges():
+ if edge.dst in ref_mixins:
+ stats['ref-mixins'] += 1
+ for edge in node.subclass_edges():
+ hierarchy_stats(graph.get(edge.dst), stats)
+
+def main():
+ global args
+ args = parser.parse_args()
+ if not (args.detect_cycles or args.print_stats):
+ print "Please select an operation to perform (eg, -c to detect cycles)"
+ parser.print_help()
+ return 1
+ if args.pickle_graph and os.path.isfile(args.pickle_graph):
+ load_graph()
+ else:
+ if args.use_stdin:
+ log("Reading files from stdin")
+ for f in sys.stdin:
+ build_graph(f.strip())
+ else:
+ log("Reading files and directories from command line")
+ if len(args.files) == 0:
+ print "Please provide files or directores for building the graph"
+ parser.print_help()
+ return 1
+ for f in args.files:
+ if os.path.isdir(f):
+ log("Building graph from files in directory: " + f)
+ build_graphs_in_dir(f)
+ else:
+ log("Building graph from file: " + f)
+ build_graph(f)
+ log("Completing graph construction (%d graph nodes)" % len(graph))
+ complete_graph()
+ if args.pickle_graph:
+ save_graph()
+ if args.detect_cycles:
+ read_ignored_cycles()
+ log("Detecting cycles containg GC roots")
+ detect_cycles()
+ if args.print_stats:
+ log("Printing statistics")
+ print_stats()
+ if reported_error():
+ return 1
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/clang/blink_gc_plugin/tests/base_class_must_define_virtual_trace.cpp b/tools/clang/blink_gc_plugin/tests/base_class_must_define_virtual_trace.cpp
new file mode 100644
index 0000000..cd38ec9
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/base_class_must_define_virtual_trace.cpp
@@ -0,0 +1,19 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base_class_must_define_virtual_trace.h"
+
+namespace blink {
+
+void PartDerived::trace(Visitor* visitor)
+{
+}
+
+void HeapDerived::trace(Visitor* visitor)
+{
+ visitor->trace(m_part);
+}
+
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/base_class_must_define_virtual_trace.h b/tools/clang/blink_gc_plugin/tests/base_class_must_define_virtual_trace.h
new file mode 100644
index 0000000..16558bb
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/base_class_must_define_virtual_trace.h
@@ -0,0 +1,38 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef BASE_CLASS_MUST_DEFINE_VIRTUAL_TRACE_H_
+#define BASE_CLASS_MUST_DEFINE_VIRTUAL_TRACE_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class PartBase {
+ DISALLOW_ALLOCATION();
+ // Missing virtual trace.
+};
+
+class PartDerived : public PartBase {
+ DISALLOW_ALLOCATION();
+public:
+ virtual void trace(Visitor*);
+};
+
+class HeapBase : public GarbageCollected<HeapBase> {
+ // Missing virtual trace.
+};
+
+
+class HeapDerived : public HeapBase {
+public:
+ virtual void trace(Visitor*);
+private:
+ PartDerived m_part;
+};
+
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/base_class_must_define_virtual_trace.txt b/tools/clang/blink_gc_plugin/tests/base_class_must_define_virtual_trace.txt
new file mode 100644
index 0000000..f8276eb
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/base_class_must_define_virtual_trace.txt
@@ -0,0 +1,8 @@
+In file included from base_class_must_define_virtual_trace.cpp:5:
+./base_class_must_define_virtual_trace.h:12:1: warning: [blink-gc] Left-most base class 'PartBase' of derived class 'PartDerived' must define a virtual trace method.
+class PartBase {
+^
+./base_class_must_define_virtual_trace.h:23:1: warning: [blink-gc] Left-most base class 'HeapBase' of derived class 'HeapDerived' must define a virtual trace method.
+class HeapBase : public GarbageCollected<HeapBase> {
+^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/base_requires_tracing.cpp b/tools/clang/blink_gc_plugin/tests/base_requires_tracing.cpp
new file mode 100644
index 0000000..87559a8
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/base_requires_tracing.cpp
@@ -0,0 +1,21 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base_requires_tracing.h"
+
+namespace blink {
+
+void A::trace(Visitor* visitor) { }
+
+void C::trace(Visitor* visitor) {
+ visitor->trace(m_a);
+ // Missing B::trace(visitor)
+}
+
+void D::trace(Visitor* visitor) {
+ visitor->trace(m_a);
+ C::trace(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/base_requires_tracing.h b/tools/clang/blink_gc_plugin/tests/base_requires_tracing.h
new file mode 100644
index 0000000..0205a08
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/base_requires_tracing.h
@@ -0,0 +1,37 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef BASE_REQUIRES_TRACING_H_
+#define BASE_REQUIRES_TRACING_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class A : public GarbageCollected<A> {
+public:
+ virtual void trace(Visitor*);
+};
+
+class B : public A {
+ // Does not need trace
+};
+
+class C : public B {
+public:
+ void trace(Visitor*);
+private:
+ Member<A> m_a;
+};
+
+class D : public C {
+public:
+ void trace(Visitor*);
+private:
+ Member<A> m_a;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/base_requires_tracing.txt b/tools/clang/blink_gc_plugin/tests/base_requires_tracing.txt
new file mode 100644
index 0000000..ee525b9
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/base_requires_tracing.txt
@@ -0,0 +1,4 @@
+base_requires_tracing.cpp:11:1: warning: [blink-gc] Base class 'B' of derived class 'C' requires tracing.
+void C::trace(Visitor* visitor) {
+^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.cpp b/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.cpp
new file mode 100644
index 0000000..9c51eca
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.cpp
@@ -0,0 +1,22 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "class_does_not_require_finalization.h"
+
+namespace blink {
+
+void DoesNotNeedFinalizer::trace(Visitor* visitor)
+{
+}
+
+DoesNotNeedFinalizer2::~DoesNotNeedFinalizer2()
+{
+}
+
+void DoesNotNeedFinalizer2::trace(Visitor* visitor)
+{
+}
+
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.flags b/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.flags
new file mode 100644
index 0000000..b0bf138
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-blink-gc-plugin -Xclang warn-unneeded-finalizer
\ No newline at end of file
diff --git a/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.h b/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.h
new file mode 100644
index 0000000..000dfd7
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.h
@@ -0,0 +1,45 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CLASS_DOES_NOT_REQUIRE_FINALIZATION_BASE_H_
+#define CLASS_DOES_NOT_REQUIRE_FINALIZATION_BASE_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class DoesNeedFinalizer : public GarbageCollectedFinalized<DoesNeedFinalizer> {
+public:
+ ~DoesNeedFinalizer() { ; }
+ void trace(Visitor*);
+};
+
+class DoesNotNeedFinalizer
+ : public GarbageCollectedFinalized<DoesNotNeedFinalizer> {
+public:
+ void trace(Visitor*);
+};
+
+class DoesNotNeedFinalizer2
+ : public GarbageCollectedFinalized<DoesNotNeedFinalizer2> {
+public:
+ ~DoesNotNeedFinalizer2();
+ void trace(Visitor*);
+};
+
+class HasEmptyDtor {
+public:
+ virtual ~HasEmptyDtor() { }
+};
+
+class DoesNotNeedFinalizer3
+ : public GarbageCollectedFinalized<DoesNotNeedFinalizer3>,
+ public HasEmptyDtor {
+public:
+ void trace(Visitor*);
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.txt b/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.txt
new file mode 100644
index 0000000..07b833b
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_does_not_require_finalization.txt
@@ -0,0 +1,11 @@
+In file included from class_does_not_require_finalization.cpp:5:
+./class_does_not_require_finalization.h:18:1: warning: [blink-gc] Class 'DoesNotNeedFinalizer' may not require finalization.
+class DoesNotNeedFinalizer
+^
+./class_does_not_require_finalization.h:24:1: warning: [blink-gc] Class 'DoesNotNeedFinalizer2' may not require finalization.
+class DoesNotNeedFinalizer2
+^
+./class_does_not_require_finalization.h:36:1: warning: [blink-gc] Class 'DoesNotNeedFinalizer3' may not require finalization.
+class DoesNotNeedFinalizer3
+^
+3 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/class_multiple_trace_bases.cpp b/tools/clang/blink_gc_plugin/tests/class_multiple_trace_bases.cpp
new file mode 100644
index 0000000..4afc011
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_multiple_trace_bases.cpp
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "class_multiple_trace_bases.h"
+
+namespace blink {
+
+void Base::trace(Visitor* visitor) { }
+
+void Mixin::trace(Visitor* visitor) { }
+
+// Missing: Derived::trace(visitor)
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/class_multiple_trace_bases.h b/tools/clang/blink_gc_plugin/tests/class_multiple_trace_bases.h
new file mode 100644
index 0000000..b362ec9
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_multiple_trace_bases.h
@@ -0,0 +1,29 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CLASS_MULTIPLE_TRACE_BASES_H_
+#define CLASS_MULTIPLE_TRACE_BASES_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class Base : public GarbageCollected<Base> {
+public:
+ virtual void trace(Visitor*);
+};
+
+class Mixin : public GarbageCollectedMixin {
+public:
+ void trace(Visitor*);
+};
+
+class Derived : public Base, public Mixin {
+ USING_GARBAGE_COLLECTED_MIXIN(HeapObject);
+ // Requires trace method.
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/class_multiple_trace_bases.txt b/tools/clang/blink_gc_plugin/tests/class_multiple_trace_bases.txt
new file mode 100644
index 0000000..2e697fe
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_multiple_trace_bases.txt
@@ -0,0 +1,11 @@
+In file included from class_multiple_trace_bases.cpp:5:
+./class_multiple_trace_bases.h:22:1: warning: [blink-gc] Class 'Derived' requires a trace method.
+class Derived : public Base, public Mixin {
+^
+./class_multiple_trace_bases.h:22:17: note: [blink-gc] Untraced base class 'Base' declared here:
+class Derived : public Base, public Mixin {
+ ^
+./class_multiple_trace_bases.h:22:30: note: [blink-gc] Untraced base class 'Mixin' declared here:
+class Derived : public Base, public Mixin {
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/class_overrides_new.cpp b/tools/clang/blink_gc_plugin/tests/class_overrides_new.cpp
new file mode 100644
index 0000000..9f47f82
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_overrides_new.cpp
@@ -0,0 +1,7 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "class_overrides_new.h"
+
+// Nothing to define.
diff --git a/tools/clang/blink_gc_plugin/tests/class_overrides_new.h b/tools/clang/blink_gc_plugin/tests/class_overrides_new.h
new file mode 100644
index 0000000..3e80e37
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_overrides_new.h
@@ -0,0 +1,20 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CLASS_OVERRIDES_NEW_H_
+#define CLASS_OVERRIDES_NEW_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject : public GarbageCollected<HeapObject> {
+ WTF_MAKE_FAST_ALLOCATED;
+public:
+ void trace(Visitor*) { }
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/class_overrides_new.txt b/tools/clang/blink_gc_plugin/tests/class_overrides_new.txt
new file mode 100644
index 0000000..17f50fe
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_overrides_new.txt
@@ -0,0 +1,8 @@
+In file included from class_overrides_new.cpp:5:
+./class_overrides_new.h:13:5: warning: [blink-gc] Garbage collected class 'HeapObject' is not permitted to override its new operator.
+ WTF_MAKE_FAST_ALLOCATED;
+ ^
+./heap/stubs.h:14:5: note: expanded from macro 'WTF_MAKE_FAST_ALLOCATED'
+ void* operator new(size_t size); \
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_finalization_base.cpp b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_base.cpp
new file mode 100644
index 0000000..99ccbb6
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_base.cpp
@@ -0,0 +1,29 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "class_requires_finalization_base.h"
+
+namespace blink {
+
+void NeedsFinalizer::trace(Visitor* visitor)
+{
+ A::trace(visitor);
+}
+
+void DoesNotNeedFinalizer::trace(Visitor* visitor)
+{
+ A::trace(visitor);
+}
+
+void GCedClassWithAScriptWrappableBase::trace(Visitor* visitor)
+{
+ A::trace(visitor);
+}
+
+void GCedClassWithAScriptWrappableAndAFinalizableBase::trace(Visitor* visitor)
+{
+ GCedClassWithAScriptWrappableBase::trace(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_finalization_base.h b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_base.h
new file mode 100644
index 0000000..f6c0c28
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_base.h
@@ -0,0 +1,50 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CLASS_REQUIRES_FINALIZATION_BASE_H_
+#define CLASS_REQUIRES_FINALIZATION_BASE_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class A : public GarbageCollected<A> {
+public:
+ virtual void trace(Visitor*) {}
+};
+
+class B {
+public:
+ ~B() { /* user-declared, thus, non-trivial */ }
+};
+
+// Second base class needs finalization.
+class NeedsFinalizer : public A, public B {
+public:
+ void trace(Visitor*);
+};
+
+// Base does not need finalization.
+class DoesNotNeedFinalizer : public A {
+public:
+ void trace(Visitor*);
+};
+
+class GCedClassWithAScriptWrappableBase
+ : public A, public ScriptWrappable {
+public:
+ void trace(Visitor*);
+};
+
+class GCedClassWithAScriptWrappableAndAFinalizableBase
+ : public GCedClassWithAScriptWrappableBase
+ , public B
+ , public ScriptWrappable {
+public:
+ void trace(Visitor*);
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_finalization_base.txt b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_base.txt
new file mode 100644
index 0000000..36b5431
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_base.txt
@@ -0,0 +1,14 @@
+In file included from class_requires_finalization_base.cpp:5:
+./class_requires_finalization_base.h:23:1: warning: [blink-gc] Class 'NeedsFinalizer' requires finalization.
+class NeedsFinalizer : public A, public B {
+^
+./class_requires_finalization_base.h:23:34: note: [blink-gc] Base class 'B' requiring finalization declared here:
+class NeedsFinalizer : public A, public B {
+ ^
+./class_requires_finalization_base.h:40:1: warning: [blink-gc] Class 'GCedClassWithAScriptWrappableAndAFinalizableBase' requires finalization.
+class GCedClassWithAScriptWrappableAndAFinalizableBase
+^
+./class_requires_finalization_base.h:42:7: note: [blink-gc] Base class 'B' requiring finalization declared here:
+ , public B
+ ^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_finalization_field.cpp b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_field.cpp
new file mode 100644
index 0000000..eb23ab0
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_field.cpp
@@ -0,0 +1,34 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "class_requires_finalization_field.h"
+
+namespace blink {
+
+void NeedsFinalizer::trace(Visitor* visitor)
+{
+ visitor->trace(m_as);
+ A::trace(visitor);
+}
+
+void AlsoNeedsFinalizer::trace(Visitor* visitor)
+{
+ visitor->trace(m_bs);
+ A::trace(visitor);
+}
+
+void DoesNotNeedFinalizer::trace(Visitor* visitor)
+{
+ visitor->trace(m_bs);
+ A::trace(visitor);
+}
+
+void AlsoDoesNotNeedFinalizer::trace(Visitor* visitor)
+{
+ visitor->trace(m_as);
+ visitor->trace(m_cs);
+ A::trace(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_finalization_field.h b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_field.h
new file mode 100644
index 0000000..33f43c2
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_field.h
@@ -0,0 +1,80 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CLASS_REQUIRES_FINALIZATION_H_
+#define CLASS_REQUIRES_FINALIZATION_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class A : public GarbageCollected<A> {
+public:
+ virtual void trace(Visitor*) { }
+};
+
+// Has a non-trivial dtor (user-declared).
+class B {
+public:
+ ~B() { }
+ void trace(Visitor*) { };
+};
+
+// Has a trivial dtor.
+class C {
+public:
+ void trace(Visitor*) { };
+};
+
+} // blink namespace
+
+namespace WTF {
+
+template<>
+struct VectorTraits<blink::C> {
+ static const bool needsDestruction = false;
+};
+
+} // WTF namespace
+
+namespace blink {
+
+// Off-heap vectors always need to be finalized.
+class NeedsFinalizer : public A, public ScriptWrappable {
+public:
+ void trace(Visitor*);
+private:
+ Vector<Member<A> > m_as;
+};
+
+// On-heap vectors with inlined objects that need destruction
+// need to be finalized.
+class AlsoNeedsFinalizer : public A {
+public:
+ void trace(Visitor*);
+private:
+ HeapVector<B, 10> m_bs;
+};
+
+// On-heap vectors with no inlined objects never need to be finalized.
+class DoesNotNeedFinalizer : public A, public ScriptWrappable {
+public:
+ void trace(Visitor*);
+private:
+ HeapVector<B> m_bs;
+};
+
+// On-heap vectors with inlined objects that don't need destruction
+// don't need to be finalized.
+class AlsoDoesNotNeedFinalizer : public A, public ScriptWrappable {
+public:
+ void trace(Visitor*);
+private:
+ HeapVector<Member<A>, 10> m_as;
+ HeapVector<C, 10> m_cs;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_finalization_field.txt b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_field.txt
new file mode 100644
index 0000000..54f0fff
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_field.txt
@@ -0,0 +1,14 @@
+In file included from class_requires_finalization_field.cpp:5:
+./class_requires_finalization_field.h:44:1: warning: [blink-gc] Class 'NeedsFinalizer' requires finalization.
+class NeedsFinalizer : public A, public ScriptWrappable {
+^
+./class_requires_finalization_field.h:48:5: note: [blink-gc] Field 'm_as' requiring finalization declared here:
+ Vector<Member<A> > m_as;
+ ^
+./class_requires_finalization_field.h:53:1: warning: [blink-gc] Class 'AlsoNeedsFinalizer' requires finalization.
+class AlsoNeedsFinalizer : public A {
+^
+./class_requires_finalization_field.h:57:5: note: [blink-gc] Field 'm_bs' requiring finalization declared here:
+ HeapVector<B, 10> m_bs;
+ ^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_finalization_mixin.cpp b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_mixin.cpp
new file mode 100644
index 0000000..782810e
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_mixin.cpp
@@ -0,0 +1,37 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "class_requires_finalization_mixin.h"
+
+namespace blink {
+
+void MixinFinalizable::trace(Visitor* visitor)
+{
+ visitor->trace(m_onHeap);
+}
+
+void MixinNotFinalizable::trace(Visitor* visitor)
+{
+ visitor->trace(m_onHeap);
+}
+
+void NeedsFinalizer::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+ MixinFinalizable::trace(visitor);
+}
+
+void HasFinalizer::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+ MixinFinalizable::trace(visitor);
+}
+
+void NeedsNoFinalization::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+ MixinNotFinalizable::trace(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_finalization_mixin.h b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_mixin.h
new file mode 100644
index 0000000..d56a57e
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_mixin.h
@@ -0,0 +1,62 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CLASS_REQUIRES_FINALIZATION_MIXIN_H_
+#define CLASS_REQUIRES_FINALIZATION_MIXIN_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class OffHeap : public RefCounted<OffHeap> { };
+class OnHeap : public GarbageCollected<OnHeap> { };
+
+class MixinFinalizable : public GarbageCollectedMixin {
+public:
+ void trace(Visitor*);
+private:
+ RefPtr<OffHeap> m_offHeap; // Requires finalization
+ Member<OnHeap> m_onHeap;
+};
+
+class MixinNotFinalizable : public GarbageCollectedMixin {
+public:
+ void trace(Visitor*);
+private:
+ Member<OnHeap> m_onHeap;
+};
+
+class NeedsFinalizer
+ : public GarbageCollected<NeedsFinalizer>
+ , public MixinFinalizable {
+ USING_GARBAGE_COLLECTED_MIXIN(NeedsFinalizer);
+public:
+ void trace(Visitor*);
+private:
+ Member<OnHeap> m_obj;
+};
+
+class HasFinalizer : public GarbageCollectedFinalized<HasFinalizer>,
+ public MixinFinalizable {
+ USING_GARBAGE_COLLECTED_MIXIN(HasFinalizer);
+public:
+ void trace(Visitor*);
+private:
+ Member<OnHeap> m_obj;
+};
+
+class NeedsNoFinalization
+ : public GarbageCollected<NeedsNoFinalization>
+ , public MixinNotFinalizable
+ , public ScriptWrappable {
+ USING_GARBAGE_COLLECTED_MIXIN(NeedsNoFinalization);
+public:
+ void trace(Visitor*);
+private:
+ Member<OnHeap> m_obj;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_finalization_mixin.txt b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_mixin.txt
new file mode 100644
index 0000000..0bf93d5
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_finalization_mixin.txt
@@ -0,0 +1,8 @@
+In file included from class_requires_finalization_mixin.cpp:5:
+./class_requires_finalization_mixin.h:30:1: warning: [blink-gc] Class 'NeedsFinalizer' requires finalization.
+class NeedsFinalizer
+^
+./class_requires_finalization_mixin.h:32:7: note: [blink-gc] Base class 'MixinFinalizable' requiring finalization declared here:
+ , public MixinFinalizable {
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_trace_method.cpp b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method.cpp
new file mode 100644
index 0000000..866b351
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method.cpp
@@ -0,0 +1,7 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "class_requires_trace_method.h"
+
+// Nothing to define.
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_trace_method.h b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method.h
new file mode 100644
index 0000000..82cad15
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method.h
@@ -0,0 +1,27 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CLASS_REQUIRES_TRACE_METHOD_H_
+#define CLASS_REQUIRES_TRACE_METHOD_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject;
+
+class PartObject {
+ DISALLOW_ALLOCATION();
+private:
+ Member<HeapObject> m_obj;
+};
+
+class HeapObject : public GarbageCollected<HeapObject> {
+private:
+ PartObject m_part;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_trace_method.txt b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method.txt
new file mode 100644
index 0000000..de6fd94
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method.txt
@@ -0,0 +1,14 @@
+In file included from class_requires_trace_method.cpp:5:
+./class_requires_trace_method.h:14:1: warning: [blink-gc] Class 'PartObject' requires a trace method.
+class PartObject {
+^
+./class_requires_trace_method.h:17:5: note: [blink-gc] Untraced field 'm_obj' declared here:
+ Member<HeapObject> m_obj;
+ ^
+./class_requires_trace_method.h:20:1: warning: [blink-gc] Class 'HeapObject' requires a trace method.
+class HeapObject : public GarbageCollected<HeapObject> {
+^
+./class_requires_trace_method.h:22:5: note: [blink-gc] Untraced field 'm_part' declared here:
+ PartObject m_part;
+ ^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_trace_method_tmpl.cpp b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method_tmpl.cpp
new file mode 100644
index 0000000..7051fb2
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method_tmpl.cpp
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "class_requires_trace_method_tmpl.h"
+
+namespace blink {
+
+// Does not need a trace method.
+class NoTrace : public TemplatedObject<PartObjectA> { };
+
+// Needs a trace method.
+class NeedsTrace : public TemplatedObject<PartObjectB> { };
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_trace_method_tmpl.h b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method_tmpl.h
new file mode 100644
index 0000000..b24b1dd
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method_tmpl.h
@@ -0,0 +1,34 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CLASS_REQUIRES_TRACE_METHOD_TMPL_H_
+#define CLASS_REQUIRES_TRACE_METHOD_TMPL_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject : public GarbageCollected<HeapObject> { };
+
+class PartObjectA {
+ DISALLOW_ALLOCATION();
+};
+
+class PartObjectB {
+ DISALLOW_ALLOCATION();
+public:
+ void trace(Visitor* visitor) { visitor->trace(m_obj); }
+private:
+ Member<HeapObject> m_obj;
+};
+
+template<typename T>
+class TemplatedObject {
+private:
+ T m_part;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/class_requires_trace_method_tmpl.txt b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method_tmpl.txt
new file mode 100644
index 0000000..49705b9
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/class_requires_trace_method_tmpl.txt
@@ -0,0 +1,8 @@
+In file included from class_requires_trace_method_tmpl.cpp:5:
+./class_requires_trace_method_tmpl.h:27:1: warning: [blink-gc] Class 'TemplatedObject<blink::PartObjectB>' requires a trace method.
+class TemplatedObject {
+^
+./class_requires_trace_method_tmpl.h:29:5: note: [blink-gc] Untraced field 'm_part' declared here:
+ T m_part;
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_ptrs.cpp b/tools/clang/blink_gc_plugin/tests/cycle_ptrs.cpp
new file mode 100644
index 0000000..f3b3989
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_ptrs.cpp
@@ -0,0 +1,17 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "cycle_ptrs.h"
+
+namespace blink {
+
+void A::trace(Visitor* visitor) {
+ visitor->trace(m_b);
+}
+
+void B::trace(Visitor* visitor) {
+ visitor->trace(m_a);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_ptrs.flags b/tools/clang/blink_gc_plugin/tests/cycle_ptrs.flags
new file mode 100644
index 0000000..a55c2f0
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_ptrs.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-blink-gc-plugin -Xclang dump-graph
\ No newline at end of file
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_ptrs.h b/tools/clang/blink_gc_plugin/tests/cycle_ptrs.h
new file mode 100644
index 0000000..8c07a06
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_ptrs.h
@@ -0,0 +1,54 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CYCLE_PTRS_H_
+#define CYCLE_PTRS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class B;
+class C;
+class D;
+class E;
+
+// This contains a leaking cycle:
+// E -per-> A -mem-> B -ref-> C -own-> D -own-vec-> E
+
+// The traced cycle from A -> B -> A does not leak.
+
+class A : public GarbageCollected<A> {
+public:
+ virtual void trace(Visitor*);
+private:
+ Member<B> m_b;
+};
+
+class B : public GarbageCollectedFinalized<B> {
+public:
+ virtual void trace(Visitor*);
+private:
+ Member<A> m_a;
+ RefPtr<C> m_c;
+};
+
+class C : public RefCounted<C> {
+private:
+ OwnPtr<D> m_d;
+};
+
+class D {
+private:
+ Vector<OwnPtr<E> > m_es;
+};
+
+class E {
+private:
+ Persistent<A> m_a;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_ptrs.txt b/tools/clang/blink_gc_plugin/tests/cycle_ptrs.txt
new file mode 100644
index 0000000..7ad6c48
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_ptrs.txt
@@ -0,0 +1,7 @@
+
+Found a potentially leaking cycle starting from a GC root:
+./cycle_ptrs.h:49:5: blink::E (m_a) => blink::A
+./cycle_ptrs.h:26:5: blink::A (m_b) => blink::B
+./cycle_ptrs.h:34:5: blink::B (m_c) => blink::C
+./cycle_ptrs.h:39:5: blink::C (m_d) => blink::D
+./cycle_ptrs.h:44:5: blink::D (m_es) => blink::E
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_sub.cpp b/tools/clang/blink_gc_plugin/tests/cycle_sub.cpp
new file mode 100644
index 0000000..dfe835a
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_sub.cpp
@@ -0,0 +1,14 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "cycle_sub.h"
+
+namespace blink {
+
+void B::trace(Visitor* visitor) {
+ visitor->trace(m_c);
+ A::trace(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_sub.flags b/tools/clang/blink_gc_plugin/tests/cycle_sub.flags
new file mode 100644
index 0000000..a55c2f0
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_sub.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-blink-gc-plugin -Xclang dump-graph
\ No newline at end of file
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_sub.h b/tools/clang/blink_gc_plugin/tests/cycle_sub.h
new file mode 100644
index 0000000..a007061
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_sub.h
@@ -0,0 +1,36 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CYCLE_SUB_H_
+#define CYCLE_SUB_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class C;
+
+// This contains a leaking cycle:
+// C -per-> A -sub-> B -ref-> C
+
+class A : public GarbageCollectedFinalized<A> {
+public:
+ virtual void trace(Visitor*) {}
+};
+
+class B : public A {
+public:
+ virtual void trace(Visitor*);
+private:
+ RefPtr<C> m_c;
+};
+
+class C : public RefCounted<C> {
+private:
+ Persistent<A> m_a;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_sub.txt b/tools/clang/blink_gc_plugin/tests/cycle_sub.txt
new file mode 100644
index 0000000..9c015c8
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_sub.txt
@@ -0,0 +1,5 @@
+
+Found a potentially leaking cycle starting from a GC root:
+./cycle_sub.h:31:5: blink::C (m_a) => blink::A
+./cycle_sub.h:22:11: blink::A (<subclass>) => blink::B
+./cycle_sub.h:26:5: blink::B (m_c) => blink::C
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_super.cpp b/tools/clang/blink_gc_plugin/tests/cycle_super.cpp
new file mode 100644
index 0000000..d9ecd79
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_super.cpp
@@ -0,0 +1,21 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "cycle_super.h"
+
+namespace blink {
+
+void A::trace(Visitor* visitor) {
+ visitor->trace(m_d);
+}
+
+void B::trace(Visitor* visitor) {
+ A::trace(visitor);
+}
+
+void C::trace(Visitor* visitor) {
+ B::trace(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_super.flags b/tools/clang/blink_gc_plugin/tests/cycle_super.flags
new file mode 100644
index 0000000..a55c2f0
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_super.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-blink-gc-plugin -Xclang dump-graph
\ No newline at end of file
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_super.h b/tools/clang/blink_gc_plugin/tests/cycle_super.h
new file mode 100644
index 0000000..13b05c1
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_super.h
@@ -0,0 +1,41 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CYCLE_SUPER_H_
+#define CYCLE_SUPER_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class D;
+
+// This contains a leaking cycle:
+// D -per-> C -sup-> B -sup-> A -ref-> D
+
+class A : public GarbageCollectedFinalized<A> {
+public:
+ virtual void trace(Visitor*);
+private:
+ RefPtr<D> m_d;
+};
+
+class B : public A {
+public:
+ virtual void trace(Visitor*);
+};
+
+class C : public B {
+public:
+ virtual void trace(Visitor*);
+};
+
+class D : public RefCounted<C> {
+private:
+ Persistent<C> m_c;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_super.txt b/tools/clang/blink_gc_plugin/tests/cycle_super.txt
new file mode 100644
index 0000000..374074b
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_super.txt
@@ -0,0 +1,4 @@
+
+Found a potentially leaking cycle starting from a GC root:
+./cycle_super.h:36:5: blink::D (m_c) => blink::C
+./cycle_super.h:21:5: blink::C (blink::B <: blink::A <: m_d) => blink::D
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_super_neg.cpp b/tools/clang/blink_gc_plugin/tests/cycle_super_neg.cpp
new file mode 100644
index 0000000..33dec59
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_super_neg.cpp
@@ -0,0 +1,18 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "cycle_super_neg.h"
+
+namespace blink {
+
+void B::trace(Visitor* visitor) {
+ A::trace(visitor);
+}
+
+void D::trace(Visitor* visitor) {
+ visitor->trace(m_c);
+ A::trace(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_super_neg.flags b/tools/clang/blink_gc_plugin/tests/cycle_super_neg.flags
new file mode 100644
index 0000000..a55c2f0
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_super_neg.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-blink-gc-plugin -Xclang dump-graph
\ No newline at end of file
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_super_neg.h b/tools/clang/blink_gc_plugin/tests/cycle_super_neg.h
new file mode 100644
index 0000000..6f99eff
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_super_neg.h
@@ -0,0 +1,44 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CYCLE_SUPER_NEG_H_
+#define CYCLE_SUPER_NEG_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class C;
+
+// The chain:
+// C -per-> B -sup-> A -sub-> D -ref-> C
+// is not a leaking cycle, because the super-class relationship
+// should not transitively imply sub-class relationships.
+// I.e. B -/-> D
+
+class A : public GarbageCollectedFinalized<A> {
+public:
+ virtual void trace(Visitor*) {}
+};
+
+class B : public A {
+public:
+ virtual void trace(Visitor*);
+};
+
+class C : public RefCounted<C> {
+private:
+ Persistent<B> m_b;
+};
+
+class D : public A {
+public:
+ virtual void trace(Visitor*);
+private:
+ RefPtr<C> m_c;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/cycle_super_neg.txt b/tools/clang/blink_gc_plugin/tests/cycle_super_neg.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/cycle_super_neg.txt
@@ -0,0 +1 @@
+
diff --git a/tools/clang/blink_gc_plugin/tests/destructor_access_finalized_field.cpp b/tools/clang/blink_gc_plugin/tests/destructor_access_finalized_field.cpp
new file mode 100644
index 0000000..79cbce7
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/destructor_access_finalized_field.cpp
@@ -0,0 +1,27 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "destructor_access_finalized_field.h"
+
+namespace blink {
+
+HeapObject::~HeapObject()
+{
+ // Valid access to fields.
+ if (m_ref->foo() && !m_obj)
+ m_objs.size();
+
+ // Invalid access to fields.
+ bar(m_obj);
+ m_obj->foo();
+ m_objs[0];
+}
+
+void HeapObject::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+ visitor->trace(m_objs);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/destructor_access_finalized_field.h b/tools/clang/blink_gc_plugin/tests/destructor_access_finalized_field.h
new file mode 100644
index 0000000..232eb08
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/destructor_access_finalized_field.h
@@ -0,0 +1,31 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef DESTRUCTOR_ACCESS_FINALIZED_FIELD_H_
+#define DESTRUCTOR_ACCESS_FINALIZED_FIELD_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class Other : public RefCounted<Other> {
+public:
+ bool foo() { return true; }
+};
+
+class HeapObject : public GarbageCollectedFinalized<HeapObject> {
+public:
+ ~HeapObject();
+ void trace(Visitor*);
+ bool foo() { return true; }
+ void bar(HeapObject*) { }
+private:
+ RefPtr<Other> m_ref;
+ Member<HeapObject> m_obj;
+ Vector<Member<HeapObject> > m_objs;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/destructor_access_finalized_field.txt b/tools/clang/blink_gc_plugin/tests/destructor_access_finalized_field.txt
new file mode 100644
index 0000000..0746b01
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/destructor_access_finalized_field.txt
@@ -0,0 +1,19 @@
+destructor_access_finalized_field.cpp:16:9: warning: [blink-gc] Finalizer '~HeapObject' accesses potentially finalized field 'm_obj'.
+ bar(m_obj);
+ ^
+./destructor_access_finalized_field.h:25:5: note: [blink-gc] Potentially finalized field 'm_obj' declared here:
+ Member<HeapObject> m_obj;
+ ^
+destructor_access_finalized_field.cpp:17:5: warning: [blink-gc] Finalizer '~HeapObject' accesses potentially finalized field 'm_obj'.
+ m_obj->foo();
+ ^
+./destructor_access_finalized_field.h:25:5: note: [blink-gc] Potentially finalized field 'm_obj' declared here:
+ Member<HeapObject> m_obj;
+ ^
+destructor_access_finalized_field.cpp:18:5: warning: [blink-gc] Finalizer '~HeapObject' accesses potentially finalized field 'm_objs'.
+ m_objs[0];
+ ^
+./destructor_access_finalized_field.h:26:5: note: [blink-gc] Potentially finalized field 'm_objs' declared here:
+ Vector<Member<HeapObject> > m_objs;
+ ^
+3 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/destructor_in_nonfinalized_class.cpp b/tools/clang/blink_gc_plugin/tests/destructor_in_nonfinalized_class.cpp
new file mode 100644
index 0000000..8efc41d
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/destructor_in_nonfinalized_class.cpp
@@ -0,0 +1,20 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "destructor_in_nonfinalized_class.h"
+
+namespace blink {
+
+HeapObject::~HeapObject()
+{
+ // Do something when destructed...
+ (void)this;
+}
+
+void HeapObject::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/destructor_in_nonfinalized_class.h b/tools/clang/blink_gc_plugin/tests/destructor_in_nonfinalized_class.h
new file mode 100644
index 0000000..f3fa506
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/destructor_in_nonfinalized_class.h
@@ -0,0 +1,22 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef DESTRUCTOR_IN_NONFINALIZED_CLASS_H_
+#define DESTRUCTOR_IN_NONFINALIZED_CLASS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject : public GarbageCollected<HeapObject> {
+public:
+ ~HeapObject();
+ void trace(Visitor*);
+private:
+ Member<HeapObject> m_obj;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/destructor_in_nonfinalized_class.txt b/tools/clang/blink_gc_plugin/tests/destructor_in_nonfinalized_class.txt
new file mode 100644
index 0000000..cf19ea1
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/destructor_in_nonfinalized_class.txt
@@ -0,0 +1,8 @@
+In file included from destructor_in_nonfinalized_class.cpp:5:
+./destructor_in_nonfinalized_class.h:12:1: warning: [blink-gc] Class 'HeapObject' requires finalization.
+class HeapObject : public GarbageCollected<HeapObject> {
+^
+destructor_in_nonfinalized_class.cpp:9:1: note: [blink-gc] User-declared destructor declared here:
+HeapObject::~HeapObject()
+^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/fields_require_tracing.cpp b/tools/clang/blink_gc_plugin/tests/fields_require_tracing.cpp
new file mode 100644
index 0000000..9779510
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/fields_require_tracing.cpp
@@ -0,0 +1,20 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "fields_require_tracing.h"
+
+namespace blink {
+
+void PartObject::trace(Visitor* visitor) {
+ m_obj1->trace(visitor); // Don't allow direct tracing.
+ visitor->trace(m_obj2);
+ // Missing visitor->trace(m_obj3);
+}
+
+void HeapObject::trace(Visitor* visitor) {
+ // Missing visitor->trace(m_part);
+ visitor->trace(m_obj);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/fields_require_tracing.h b/tools/clang/blink_gc_plugin/tests/fields_require_tracing.h
new file mode 100644
index 0000000..fe5b8d9
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/fields_require_tracing.h
@@ -0,0 +1,34 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef FIELDS_REQUIRE_TRACING_H_
+#define FIELDS_REQUIRE_TRACING_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject;
+
+class PartObject {
+ DISALLOW_ALLOCATION();
+public:
+ void trace(Visitor*);
+private:
+ Member<HeapObject> m_obj1;
+ Member<HeapObject> m_obj2;
+ Member<HeapObject> m_obj3;
+};
+
+class HeapObject : public GarbageCollected<HeapObject> {
+public:
+ void trace(Visitor*);
+private:
+ PartObject m_part;
+ Member<HeapObject> m_obj;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/fields_require_tracing.txt b/tools/clang/blink_gc_plugin/tests/fields_require_tracing.txt
new file mode 100644
index 0000000..cafa32f
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/fields_require_tracing.txt
@@ -0,0 +1,16 @@
+fields_require_tracing.cpp:9:1: warning: [blink-gc] Class 'PartObject' has untraced fields that require tracing.
+void PartObject::trace(Visitor* visitor) {
+^
+./fields_require_tracing.h:19:5: note: [blink-gc] Untraced field 'm_obj1' declared here:
+ Member<HeapObject> m_obj1;
+ ^
+./fields_require_tracing.h:21:5: note: [blink-gc] Untraced field 'm_obj3' declared here:
+ Member<HeapObject> m_obj3;
+ ^
+fields_require_tracing.cpp:15:1: warning: [blink-gc] Class 'HeapObject' has untraced fields that require tracing.
+void HeapObject::trace(Visitor* visitor) {
+^
+./fields_require_tracing.h:28:5: note: [blink-gc] Untraced field 'm_part' declared here:
+ PartObject m_part;
+ ^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/finalize_after_dispatch.cpp b/tools/clang/blink_gc_plugin/tests/finalize_after_dispatch.cpp
new file mode 100644
index 0000000..91244d1
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/finalize_after_dispatch.cpp
@@ -0,0 +1,63 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "finalize_after_dispatch.h"
+
+namespace blink {
+
+static B* toB(A* a) { return static_cast<B*>(a); }
+
+void A::trace(Visitor* visitor)
+{
+ switch (m_type) {
+ case TB:
+ toB(this)->traceAfterDispatch(visitor);
+ break;
+ case TC:
+ static_cast<C*>(this)->traceAfterDispatch(visitor);
+ break;
+ case TD:
+ static_cast<D*>(this)->traceAfterDispatch(visitor);
+ break;
+ }
+}
+
+void A::traceAfterDispatch(Visitor* visitor)
+{
+}
+
+void A::finalizeGarbageCollectedObject()
+{
+ switch (m_type) {
+ case TB:
+ toB(this)->~B();
+ break;
+ case TC:
+ static_cast<C*>(this)->~C();
+ break;
+ case TD:
+ // Missing static_cast<D*>(this)->~D();
+ break;
+ }
+}
+
+void B::traceAfterDispatch(Visitor* visitor)
+{
+ visitor->trace(m_a);
+ A::traceAfterDispatch(visitor);
+}
+
+void C::traceAfterDispatch(Visitor* visitor)
+{
+ visitor->trace(m_a);
+ A::traceAfterDispatch(visitor);
+}
+
+void D::traceAfterDispatch(Visitor* visitor)
+{
+ visitor->trace(m_a);
+ Abstract::traceAfterDispatch(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/finalize_after_dispatch.h b/tools/clang/blink_gc_plugin/tests/finalize_after_dispatch.h
new file mode 100644
index 0000000..acd16ec
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/finalize_after_dispatch.h
@@ -0,0 +1,78 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef FINALIZE_AFTER_DISPATCH_H_
+#define FINALIZE_AFTER_DISPATCH_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class NeedsFinalize : public GarbageCollectedFinalized<NeedsFinalize> {
+public:
+ void trace(Visitor*);
+ void traceAfterDispatch(Visitor*);
+ // Needs a finalizeGarbageCollectedObject method.
+};
+
+class NeedsDispatch : public GarbageCollectedFinalized<NeedsDispatch> {
+public:
+ void trace(Visitor*);
+ // Needs a traceAfterDispatch method.
+ void finalizeGarbageCollectedObject() { };
+};
+
+class NeedsFinalizedBase : public GarbageCollected<NeedsFinalizedBase> {
+public:
+ void trace(Visitor*) { };
+ void traceAfterDispatch(Visitor*) { };
+ void finalizeGarbageCollectedObject() { };
+};
+
+class A : GarbageCollectedFinalized<A> {
+public:
+ void trace(Visitor*);
+ void traceAfterDispatch(Visitor*);
+ void finalizeGarbageCollectedObject();
+protected:
+ enum Type { TB, TC, TD };
+ A(Type type) : m_type(type) { }
+private:
+ Type m_type;
+};
+
+class B : public A {
+public:
+ B() : A(TB) { }
+ ~B() { }
+ void traceAfterDispatch(Visitor*);
+private:
+ Member<A> m_a;
+};
+
+class C : public A {
+public:
+ C() : A(TC) { }
+ void traceAfterDispatch(Visitor*);
+private:
+ Member<A> m_a;
+};
+
+// This class is considered abstract does not need to be dispatched to.
+class Abstract : public A {
+protected:
+ Abstract(Type type) : A(type) { }
+};
+
+class D : public Abstract {
+public:
+ D() : Abstract(TD) { }
+ void traceAfterDispatch(Visitor*);
+private:
+ Member<A> m_a;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/finalize_after_dispatch.txt b/tools/clang/blink_gc_plugin/tests/finalize_after_dispatch.txt
new file mode 100644
index 0000000..8a652a4
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/finalize_after_dispatch.txt
@@ -0,0 +1,17 @@
+In file included from finalize_after_dispatch.cpp:5:
+./finalize_after_dispatch.h:12:1: warning: [blink-gc] Class 'NeedsFinalize' is missing manual finalize dispatch.
+class NeedsFinalize : public GarbageCollectedFinalized<NeedsFinalize> {
+^
+./finalize_after_dispatch.h:19:1: warning: [blink-gc] Class 'NeedsDispatch' is missing manual trace dispatch.
+class NeedsDispatch : public GarbageCollectedFinalized<NeedsDispatch> {
+^
+./finalize_after_dispatch.h:26:1: warning: [blink-gc] Class 'NeedsFinalizedBase' requires finalization.
+class NeedsFinalizedBase : public GarbageCollected<NeedsFinalizedBase> {
+^
+./finalize_after_dispatch.h:30:5: note: [blink-gc] User-declared finalizer declared here:
+ void finalizeGarbageCollectedObject() { };
+ ^
+finalize_after_dispatch.cpp:30:1: warning: [blink-gc] Missing dispatch to class 'D' in manual finalize dispatch.
+void A::finalizeGarbageCollectedObject()
+^
+4 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/garbage_collected_mixin.cpp b/tools/clang/blink_gc_plugin/tests/garbage_collected_mixin.cpp
new file mode 100644
index 0000000..e8f42f2
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/garbage_collected_mixin.cpp
@@ -0,0 +1,20 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "garbage_collected_mixin.h"
+
+namespace blink {
+
+void Mixin::trace(Visitor* visitor)
+{
+ // Missing: visitor->trace(m_self);
+}
+
+void HeapObject::trace(Visitor* visitor)
+{
+ visitor->trace(m_mix);
+ // Missing: Mixin::trace(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/garbage_collected_mixin.h b/tools/clang/blink_gc_plugin/tests/garbage_collected_mixin.h
new file mode 100644
index 0000000..55449cb
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/garbage_collected_mixin.h
@@ -0,0 +1,29 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef GARBAGE_COLLECTED_MIXIN_H_
+#define GARBAGE_COLLECTED_MIXIN_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class Mixin : public GarbageCollectedMixin {
+public:
+ void trace(Visitor*);
+private:
+ Member<Mixin> m_self;
+};
+
+class HeapObject : public GarbageCollected<HeapObject>, public Mixin {
+ USING_GARBAGE_COLLECTED_MIXIN(HeapObject);
+public:
+ void trace(Visitor*);
+private:
+ Member<Mixin> m_mix;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/garbage_collected_mixin.txt b/tools/clang/blink_gc_plugin/tests/garbage_collected_mixin.txt
new file mode 100644
index 0000000..4051a6a
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/garbage_collected_mixin.txt
@@ -0,0 +1,10 @@
+garbage_collected_mixin.cpp:9:1: warning: [blink-gc] Class 'Mixin' has untraced fields that require tracing.
+void Mixin::trace(Visitor* visitor)
+^
+./garbage_collected_mixin.h:16:5: note: [blink-gc] Untraced field 'm_self' declared here:
+ Member<Mixin> m_self;
+ ^
+garbage_collected_mixin.cpp:14:1: warning: [blink-gc] Base class 'Mixin' of derived class 'HeapObject' requires tracing.
+void HeapObject::trace(Visitor* visitor)
+^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/heap/stubs.h b/tools/clang/blink_gc_plugin/tests/heap/stubs.h
new file mode 100644
index 0000000..7d646cd
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/heap/stubs.h
@@ -0,0 +1,247 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef HEAP_STUBS_H_
+#define HEAP_STUBS_H_
+
+#include "stddef.h"
+
+#define WTF_MAKE_FAST_ALLOCATED \
+ public: \
+ void* operator new(size_t, void* p); \
+ void* operator new[](size_t, void* p); \
+ void* operator new(size_t size); \
+ private: \
+ typedef int __thisIsHereToForceASemicolonAfterThisMacro
+
+namespace WTF {
+
+template<typename T> class RefCounted { };
+
+template<typename T> class RawPtr {
+public:
+ operator T*() const { return 0; }
+ T* operator->() { return 0; }
+};
+
+template<typename T> class RefPtr {
+public:
+ ~RefPtr() { }
+ operator T*() const { return 0; }
+ T* operator->() { return 0; }
+};
+
+template<typename T> class OwnPtr {
+public:
+ ~OwnPtr() { }
+ operator T*() const { return 0; }
+ T* operator->() { return 0; }
+};
+
+class DefaultAllocator {
+public:
+ static const bool isGarbageCollected = false;
+};
+
+template<typename T>
+struct VectorTraits {
+ static const bool needsDestruction = true;
+};
+
+template<size_t inlineCapacity, bool isGarbageCollected, bool tNeedsDestruction>
+class VectorDestructorBase {
+public:
+ ~VectorDestructorBase() {}
+};
+
+template<size_t inlineCapacity>
+class VectorDestructorBase<inlineCapacity, true, false> {};
+
+template<>
+class VectorDestructorBase<0, true, true> {};
+
+template<
+ typename T,
+ size_t inlineCapacity = 0,
+ typename Allocator = DefaultAllocator>
+class Vector : public VectorDestructorBase<inlineCapacity,
+ Allocator::isGarbageCollected,
+ VectorTraits<T>::needsDestruction> {
+public:
+ size_t size();
+ T& operator[](size_t);
+};
+
+template<
+ typename T,
+ size_t inlineCapacity = 0,
+ typename Allocator = DefaultAllocator>
+class Deque {};
+
+template<
+ typename ValueArg,
+ typename HashArg = void,
+ typename TraitsArg = void,
+ typename Allocator = DefaultAllocator>
+class HashSet {};
+
+template<
+ typename ValueArg,
+ typename HashArg = void,
+ typename TraitsArg = void,
+ typename Allocator = DefaultAllocator>
+class ListHashSet {};
+
+template<
+ typename ValueArg,
+ typename HashArg = void,
+ typename TraitsArg = void,
+ typename Allocator = DefaultAllocator>
+class LinkedHashSet {};
+
+template<
+ typename ValueArg,
+ typename HashArg = void,
+ typename TraitsArg = void,
+ typename Allocator = DefaultAllocator>
+class HashCountedSet {};
+
+template<
+ typename KeyArg,
+ typename MappedArg,
+ typename HashArg = void,
+ typename KeyTraitsArg = void,
+ typename MappedTraitsArg = void,
+ typename Allocator = DefaultAllocator>
+class HashMap {};
+
+}
+
+namespace blink {
+
+using namespace WTF;
+
+#define DISALLOW_ALLOCATION() \
+ private: \
+ void* operator new(size_t) = delete; \
+ void* operator new(size_t, void*) = delete;
+
+#define STACK_ALLOCATED() \
+ private: \
+ __attribute__((annotate("blink_stack_allocated"))) \
+ void* operator new(size_t) = delete; \
+ void* operator new(size_t, void*) = delete;
+
+#define ALLOW_ONLY_INLINE_ALLOCATION() \
+ public: \
+ void* operator new(size_t, void*); \
+ private: \
+ void* operator new(size_t) = delete;
+
+#define GC_PLUGIN_IGNORE(bug) \
+ __attribute__((annotate("blink_gc_plugin_ignore")))
+
+#define USING_GARBAGE_COLLECTED_MIXIN(type) \
+ public: \
+ virtual void adjustAndMark(Visitor*) const {} \
+ virtual bool isAlive(Visitor*) const { return 0; }
+
+template<typename T> class GarbageCollected { };
+
+template<typename T>
+class GarbageCollectedFinalized : public GarbageCollected<T> { };
+
+template<typename T> class Member {
+public:
+ operator T*() const { return 0; }
+ T* operator->() { return 0; }
+ bool operator!() const { return false; }
+};
+
+template<typename T> class WeakMember {
+public:
+ operator T*() const { return 0; }
+ T* operator->() { return 0; }
+ bool operator!() const { return false; }
+};
+
+template<typename T> class Persistent {
+public:
+ operator T*() const { return 0; }
+ T* operator->() { return 0; }
+ bool operator!() const { return false; }
+};
+
+class HeapAllocator {
+public:
+ static const bool isGarbageCollected = true;
+};
+
+template<typename T, size_t inlineCapacity = 0>
+class HeapVector : public Vector<T, inlineCapacity, HeapAllocator> { };
+
+template<typename T, size_t inlineCapacity = 0>
+class HeapDeque : public Vector<T, inlineCapacity, HeapAllocator> { };
+
+template<typename T>
+class HeapHashSet : public HashSet<T, void, void, HeapAllocator> { };
+
+template<typename T>
+class HeapListHashSet : public ListHashSet<T, void, void, HeapAllocator> { };
+
+template<typename T>
+class HeapLinkedHashSet : public LinkedHashSet<T, void, void, HeapAllocator> {
+};
+
+template<typename T>
+class HeapHashCountedSet : public HashCountedSet<T, void, void, HeapAllocator> {
+};
+
+template<typename K, typename V>
+class HeapHashMap : public HashMap<K, V, void, void, void, HeapAllocator> { };
+
+template<typename T>
+class PersistentHeapVector : public Vector<T, 0, HeapAllocator> { };
+
+class Visitor {
+public:
+ template<typename T>
+ void trace(const T&);
+
+ template<typename T, void (T::*method)(Visitor*)>
+ void registerWeakMembers(const T* obj);
+};
+
+class GarbageCollectedMixin {
+ virtual void adjustAndMark(Visitor*) const = 0;
+ virtual bool isAlive(Visitor*) const = 0;
+ virtual void trace(Visitor*) { }
+};
+
+template<typename T>
+struct TraceIfNeeded {
+ static void trace(Visitor*, T*);
+};
+
+// blink::ScriptWrappable receives special treatment
+// so as to allow it to be used together with GarbageCollected<T>,
+// even when its user-declared destructor is provided.
+// As it is with Oilpan disabled.
+class ScriptWrappable {
+public:
+ ~ScriptWrappable() { /* user-declared, thus, non-trivial */ }
+};
+
+}
+
+namespace WTF {
+
+template<typename T>
+struct VectorTraits<blink::Member<T> > {
+ static const bool needsDestruction = false;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/ignore_class.cpp b/tools/clang/blink_gc_plugin/tests/ignore_class.cpp
new file mode 100644
index 0000000..c539eb6
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/ignore_class.cpp
@@ -0,0 +1,20 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "ignore_class.h"
+
+namespace blink {
+
+void B::trace(Visitor* visitor)
+{
+ // Class is ignored so no checking here.
+}
+
+void C::trace(Visitor* visitor)
+{
+ // Missing trace of m_obj.
+ // Ignored base class B does not need tracing.
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/ignore_class.h b/tools/clang/blink_gc_plugin/tests/ignore_class.h
new file mode 100644
index 0000000..580ed7c
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/ignore_class.h
@@ -0,0 +1,40 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef IGNORE_CLASS_H_
+#define IGNORE_CLASS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject : public GarbageCollected<HeapObject> { };
+
+// Don't require trace method on ignored class.
+class GC_PLUGIN_IGNORE("http://crbug.com/12345") A;
+class A : public GarbageCollected<A> {
+private:
+ Member<HeapObject> m_obj;
+};
+
+// Don't require tracing of fields on ignored class.
+class GC_PLUGIN_IGNORE("http://crbug.com/12345") B;
+class B : public GarbageCollected<B> {
+public:
+ virtual void trace(Visitor*);
+private:
+ Member<HeapObject> m_obj;
+};
+
+// Don't require tracing of an ignored base class.
+class C : public B {
+public:
+ void trace(Visitor*);
+private:
+ Member<HeapObject> m_obj;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/ignore_class.txt b/tools/clang/blink_gc_plugin/tests/ignore_class.txt
new file mode 100644
index 0000000..d3d2d80
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/ignore_class.txt
@@ -0,0 +1,7 @@
+ignore_class.cpp:14:1: warning: [blink-gc] Class 'C' has untraced fields that require tracing.
+void C::trace(Visitor* visitor)
+^
+./ignore_class.h:35:5: note: [blink-gc] Untraced field 'm_obj' declared here:
+ Member<HeapObject> m_obj;
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/ignore_fields.cpp b/tools/clang/blink_gc_plugin/tests/ignore_fields.cpp
new file mode 100644
index 0000000..118af75
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/ignore_fields.cpp
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "ignore_fields.h"
+
+namespace blink {
+
+void C::trace(Visitor* visitor)
+{
+ // Missing trace of m_one.
+ // Not missing ignored field m_two.
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/ignore_fields.h b/tools/clang/blink_gc_plugin/tests/ignore_fields.h
new file mode 100644
index 0000000..e12bbab
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/ignore_fields.h
@@ -0,0 +1,43 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef IGNORE_FIELDS_H_
+#define IGNORE_FIELDS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject : public GarbageCollected<HeapObject> {
+public:
+ virtual void trace(Visitor*) { }
+};
+
+// Don't warn about raw pointers to heap allocated objects.
+class A : public GarbageCollected<A>{
+private:
+ GC_PLUGIN_IGNORE("http://crbug.com/12345")
+ HeapObject* m_obj;
+};
+
+// Don't require trace method when (all) GC fields are ignored.
+class B : public GarbageCollected<B> {
+private:
+ GC_PLUGIN_IGNORE("http://crbug.com/12345")
+ Member<HeapObject> m_one;
+};
+
+// Don't require tracing an ignored field.
+class C : public GarbageCollected<C> {
+public:
+ void trace(Visitor*);
+private:
+ Member<HeapObject> m_one;
+ GC_PLUGIN_IGNORE("http://crbug.com/12345")
+ Member<HeapObject> m_two;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/ignore_fields.txt b/tools/clang/blink_gc_plugin/tests/ignore_fields.txt
new file mode 100644
index 0000000..b4de498
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/ignore_fields.txt
@@ -0,0 +1,7 @@
+ignore_fields.cpp:9:1: warning: [blink-gc] Class 'C' has untraced fields that require tracing.
+void C::trace(Visitor* visitor)
+^
+./ignore_fields.h:36:5: note: [blink-gc] Untraced field 'm_one' declared here:
+ Member<HeapObject> m_one;
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/inner_class.cpp b/tools/clang/blink_gc_plugin/tests/inner_class.cpp
new file mode 100644
index 0000000..03a53ea
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/inner_class.cpp
@@ -0,0 +1,14 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "inner_class.h"
+
+namespace blink {
+
+void SomeObject::InnerObject::trace(Visitor* visitor)
+{
+ // Missing: visitor->trace(m_obj);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/inner_class.h b/tools/clang/blink_gc_plugin/tests/inner_class.h
new file mode 100644
index 0000000..30f6ce3
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/inner_class.h
@@ -0,0 +1,24 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef INNER_CLASS_H_
+#define INNER_CLASS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class SomeObject {
+private:
+ class InnerObject : public GarbageCollected<InnerObject> {
+ public:
+ void trace(Visitor*);
+ private:
+ Member<InnerObject> m_obj;
+ };
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/inner_class.txt b/tools/clang/blink_gc_plugin/tests/inner_class.txt
new file mode 100644
index 0000000..acdef6e
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/inner_class.txt
@@ -0,0 +1,7 @@
+inner_class.cpp:9:1: warning: [blink-gc] Class 'InnerObject' has untraced fields that require tracing.
+void SomeObject::InnerObject::trace(Visitor* visitor)
+^
+./inner_class.h:18:9: note: [blink-gc] Untraced field 'm_obj' declared here:
+ Member<InnerObject> m_obj;
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/left_most_gc_base.cpp b/tools/clang/blink_gc_plugin/tests/left_most_gc_base.cpp
new file mode 100644
index 0000000..041d9f0
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/left_most_gc_base.cpp
@@ -0,0 +1,7 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "left_most_gc_base.h"
+
+// Nothing to define.
diff --git a/tools/clang/blink_gc_plugin/tests/left_most_gc_base.h b/tools/clang/blink_gc_plugin/tests/left_most_gc_base.h
new file mode 100644
index 0000000..f9bc6dd
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/left_most_gc_base.h
@@ -0,0 +1,23 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef LEFT_MOST_GC_BASE_H_
+#define LEFT_MOST_GC_BASE_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class A { };
+class B { };
+
+class Right : public A, public B, public GarbageCollected<Right> { }; // Error
+class Left : public GarbageCollected<Left>, public B, public A { };
+
+class DerivedRight : public Right, public Left { }; // Error
+class DerivedLeft : public Left, public Right { };
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/left_most_gc_base.txt b/tools/clang/blink_gc_plugin/tests/left_most_gc_base.txt
new file mode 100644
index 0000000..4766ea6
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/left_most_gc_base.txt
@@ -0,0 +1,8 @@
+In file included from left_most_gc_base.cpp:5:
+./left_most_gc_base.h:15:1: warning: [blink-gc] Class 'Right' must derive its GC base in the left-most position.
+class Right : public A, public B, public GarbageCollected<Right> { }; // Error
+^
+./left_most_gc_base.h:18:1: warning: [blink-gc] Class 'DerivedRight' must derive its GC base in the left-most position.
+class DerivedRight : public Right, public Left { }; // Error
+^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/member_in_offheap_class.cpp b/tools/clang/blink_gc_plugin/tests/member_in_offheap_class.cpp
new file mode 100644
index 0000000..4b44c2d
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/member_in_offheap_class.cpp
@@ -0,0 +1,24 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "member_in_offheap_class.h"
+
+namespace blink {
+
+void OffHeapObject::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+}
+
+void PartObject::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+}
+
+void InlineObject::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/member_in_offheap_class.h b/tools/clang/blink_gc_plugin/tests/member_in_offheap_class.h
new file mode 100644
index 0000000..0dcacc5
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/member_in_offheap_class.h
@@ -0,0 +1,46 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEMBER_IN_OFFHEAP_CLASS_H_
+#define MEMBER_IN_OFFHEAP_CLASS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject : public GarbageCollected<HeapObject> { };
+
+class OffHeapObject {
+public:
+ void trace(Visitor*);
+private:
+ Member<HeapObject> m_obj; // Must not contain Member.
+ Persistent<HeapVector<Member<HeapObject> > > m_objs; // OK
+};
+
+class StackObject {
+ STACK_ALLOCATED();
+private:
+ Member<HeapObject> m_obj; // OK
+};
+
+class PartObject {
+ DISALLOW_ALLOCATION();
+public:
+ void trace(Visitor*);
+private:
+ Member<HeapObject> m_obj; // OK
+};
+
+class InlineObject {
+ ALLOW_ONLY_INLINE_ALLOCATION();
+public:
+ void trace(Visitor*);
+private:
+ Member<HeapObject> m_obj; // OK
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/member_in_offheap_class.txt b/tools/clang/blink_gc_plugin/tests/member_in_offheap_class.txt
new file mode 100644
index 0000000..6aada91
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/member_in_offheap_class.txt
@@ -0,0 +1,8 @@
+In file included from member_in_offheap_class.cpp:5:
+./member_in_offheap_class.h:14:1: warning: [blink-gc] Class 'OffHeapObject' contains invalid fields.
+class OffHeapObject {
+^
+./member_in_offheap_class.h:18:5: note: [blink-gc] Member field 'm_obj' in unmanaged class declared here:
+ Member<HeapObject> m_obj; // Must not contain Member.
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/non_virtual_trace.cpp b/tools/clang/blink_gc_plugin/tests/non_virtual_trace.cpp
new file mode 100644
index 0000000..9f57711
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/non_virtual_trace.cpp
@@ -0,0 +1,23 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "non_virtual_trace.h"
+
+namespace blink {
+
+void A::trace(Visitor* visitor)
+{
+}
+
+void C::trace(Visitor* visitor)
+{
+ B::trace(visitor);
+}
+
+void D::trace(Visitor* visitor)
+{
+ B::trace(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/non_virtual_trace.h b/tools/clang/blink_gc_plugin/tests/non_virtual_trace.h
new file mode 100644
index 0000000..4179d49
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/non_virtual_trace.h
@@ -0,0 +1,32 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef NON_VIRTUAL_TRACE_H_
+#define NON_VIRTUAL_TRACE_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class A : public GarbageCollected<A> {
+public:
+ void trace(Visitor*);
+};
+
+class B : public A {
+};
+
+class C : public B {
+public:
+ void trace(Visitor*); // Cannot override a non-virtual trace.
+};
+
+class D : public B {
+public:
+ virtual void trace(Visitor*); // Cannot override a non-virtual trace.
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/non_virtual_trace.txt b/tools/clang/blink_gc_plugin/tests/non_virtual_trace.txt
new file mode 100644
index 0000000..a05a94d
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/non_virtual_trace.txt
@@ -0,0 +1,17 @@
+In file included from non_virtual_trace.cpp:5:
+./non_virtual_trace.h:12:1: warning: [blink-gc] Left-most base class 'A' of derived class 'D' must define a virtual trace method.
+class A : public GarbageCollected<A> {
+^
+non_virtual_trace.cpp:13:1: warning: [blink-gc] Class 'C' overrides non-virtual trace of base class 'A'.
+void C::trace(Visitor* visitor)
+^
+./non_virtual_trace.h:14:5: note: [blink-gc] Non-virtual trace method declared here:
+ void trace(Visitor*);
+ ^
+non_virtual_trace.cpp:18:1: warning: [blink-gc] Class 'D' overrides non-virtual trace of base class 'A'.
+void D::trace(Visitor* visitor)
+^
+./non_virtual_trace.h:14:5: note: [blink-gc] Non-virtual trace method declared here:
+ void trace(Visitor*);
+ ^
+3 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/own_ptr_to_gc_managed_class.cpp b/tools/clang/blink_gc_plugin/tests/own_ptr_to_gc_managed_class.cpp
new file mode 100644
index 0000000..9e27c3d
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/own_ptr_to_gc_managed_class.cpp
@@ -0,0 +1,11 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "own_ptr_to_gc_managed_class.h"
+
+namespace blink {
+
+void HeapObject::trace(Visitor* visitor) { }
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/own_ptr_to_gc_managed_class.h b/tools/clang/blink_gc_plugin/tests/own_ptr_to_gc_managed_class.h
new file mode 100644
index 0000000..b412561
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/own_ptr_to_gc_managed_class.h
@@ -0,0 +1,30 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef OWN_PTR_TO_GC_MANAGED_CLASS_H_
+#define OWN_PTR_TO_GC_MANAGED_CLASS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject;
+
+class PartObject {
+ DISALLOW_ALLOCATION();
+private:
+ OwnPtr<HeapObject> m_obj;
+};
+
+class HeapObject : public GarbageCollectedFinalized<HeapObject> {
+public:
+ void trace(Visitor*);
+private:
+ Vector<OwnPtr<HeapObject> > m_objs;
+ OwnPtr<HeapVector<Member<HeapObject> > > m_objs2;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/own_ptr_to_gc_managed_class.txt b/tools/clang/blink_gc_plugin/tests/own_ptr_to_gc_managed_class.txt
new file mode 100644
index 0000000..4102e86
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/own_ptr_to_gc_managed_class.txt
@@ -0,0 +1,17 @@
+In file included from own_ptr_to_gc_managed_class.cpp:5:
+./own_ptr_to_gc_managed_class.h:14:1: warning: [blink-gc] Class 'PartObject' contains invalid fields.
+class PartObject {
+^
+./own_ptr_to_gc_managed_class.h:17:5: note: [blink-gc] OwnPtr field 'm_obj' to a GC managed class declared here:
+ OwnPtr<HeapObject> m_obj;
+ ^
+./own_ptr_to_gc_managed_class.h:20:1: warning: [blink-gc] Class 'HeapObject' contains invalid fields.
+class HeapObject : public GarbageCollectedFinalized<HeapObject> {
+^
+./own_ptr_to_gc_managed_class.h:24:5: note: [blink-gc] OwnPtr field 'm_objs' to a GC managed class declared here:
+ Vector<OwnPtr<HeapObject> > m_objs;
+ ^
+./own_ptr_to_gc_managed_class.h:25:5: note: [blink-gc] OwnPtr field 'm_objs2' to a GC managed class declared here:
+ OwnPtr<HeapVector<Member<HeapObject> > > m_objs2;
+ ^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/part_object_to_gc_derived_class.cpp b/tools/clang/blink_gc_plugin/tests/part_object_to_gc_derived_class.cpp
new file mode 100644
index 0000000..2da8661
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/part_object_to_gc_derived_class.cpp
@@ -0,0 +1,14 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "part_object_to_gc_derived_class.h"
+
+namespace blink {
+
+void B::trace(Visitor* visitor)
+{
+ visitor->trace(m_a);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/part_object_to_gc_derived_class.h b/tools/clang/blink_gc_plugin/tests/part_object_to_gc_derived_class.h
new file mode 100644
index 0000000..ef5a649
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/part_object_to_gc_derived_class.h
@@ -0,0 +1,23 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PART_OBJECT_TO_GC_DERIVED_CLASS_H_
+#define PART_OBJECT_TO_GC_DERIVED_CLASS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class A : public GarbageCollected<A> { };
+
+class B : public GarbageCollected<B> {
+public:
+ void trace(Visitor*);
+private:
+ A m_a;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/part_object_to_gc_derived_class.txt b/tools/clang/blink_gc_plugin/tests/part_object_to_gc_derived_class.txt
new file mode 100644
index 0000000..5970132
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/part_object_to_gc_derived_class.txt
@@ -0,0 +1,8 @@
+In file included from part_object_to_gc_derived_class.cpp:5:
+./part_object_to_gc_derived_class.h:14:1: warning: [blink-gc] Class 'B' contains invalid fields.
+class B : public GarbageCollected<B> {
+^
+./part_object_to_gc_derived_class.h:18:5: note: [blink-gc] Part-object field 'm_a' to a GC derived class declared here:
+ A m_a;
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/persistent_field_in_gc_managed_class.cpp b/tools/clang/blink_gc_plugin/tests/persistent_field_in_gc_managed_class.cpp
new file mode 100644
index 0000000..7b3f286
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/persistent_field_in_gc_managed_class.cpp
@@ -0,0 +1,13 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "persistent_field_in_gc_managed_class.h"
+
+namespace blink {
+
+void HeapObject::trace(Visitor* visitor) {
+ visitor->trace(m_parts);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/persistent_field_in_gc_managed_class.h b/tools/clang/blink_gc_plugin/tests/persistent_field_in_gc_managed_class.h
new file mode 100644
index 0000000..4cfcfce
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/persistent_field_in_gc_managed_class.h
@@ -0,0 +1,31 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PERSISTENT_FIELD_IN_GC_MANAGED_CLASS_H_
+#define PERSISTENT_FIELD_IN_GC_MANAGED_CLASS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject;
+
+class PartObject {
+ DISALLOW_ALLOCATION();
+private:
+ Persistent<HeapObject> m_obj;
+};
+
+class HeapObject : public GarbageCollected<HeapObject> {
+public:
+ void trace(Visitor*);
+private:
+ PartObject m_part;
+ HeapVector<PartObject> m_parts;
+ PersistentHeapVector<Member<HeapObject> > m_objs;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/persistent_field_in_gc_managed_class.txt b/tools/clang/blink_gc_plugin/tests/persistent_field_in_gc_managed_class.txt
new file mode 100644
index 0000000..d69c0c5
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/persistent_field_in_gc_managed_class.txt
@@ -0,0 +1,26 @@
+In file included from persistent_field_in_gc_managed_class.cpp:5:
+./persistent_field_in_gc_managed_class.h:20:1: warning: [blink-gc] Class 'HeapObject' contains GC root in field 'm_part'.
+class HeapObject : public GarbageCollected<HeapObject> {
+^
+./persistent_field_in_gc_managed_class.h:24:5: note: [blink-gc] Field 'm_part' with embedded GC root in 'HeapObject' declared here:
+ PartObject m_part;
+ ^
+./persistent_field_in_gc_managed_class.h:17:5: note: [blink-gc] Field 'm_obj' defining a GC root declared here:
+ Persistent<HeapObject> m_obj;
+ ^
+./persistent_field_in_gc_managed_class.h:20:1: warning: [blink-gc] Class 'HeapObject' contains GC root in field 'm_parts'.
+class HeapObject : public GarbageCollected<HeapObject> {
+^
+./persistent_field_in_gc_managed_class.h:25:5: note: [blink-gc] Field 'm_parts' with embedded GC root in 'HeapObject' declared here:
+ HeapVector<PartObject> m_parts;
+ ^
+./persistent_field_in_gc_managed_class.h:17:5: note: [blink-gc] Field 'm_obj' defining a GC root declared here:
+ Persistent<HeapObject> m_obj;
+ ^
+./persistent_field_in_gc_managed_class.h:20:1: warning: [blink-gc] Class 'HeapObject' contains GC root in field 'm_objs'.
+class HeapObject : public GarbageCollected<HeapObject> {
+^
+./persistent_field_in_gc_managed_class.h:26:5: note: [blink-gc] Field 'm_objs' defining a GC root declared here:
+ PersistentHeapVector<Member<HeapObject> > m_objs;
+ ^
+3 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/polymorphic_class_with_non_virtual_trace.cpp b/tools/clang/blink_gc_plugin/tests/polymorphic_class_with_non_virtual_trace.cpp
new file mode 100644
index 0000000..dc7620a
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/polymorphic_class_with_non_virtual_trace.cpp
@@ -0,0 +1,19 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "polymorphic_class_with_non_virtual_trace.h"
+
+namespace blink {
+
+void IsLeftMostPolymorphic::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+}
+
+void IsNotLeftMostPolymorphic::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/polymorphic_class_with_non_virtual_trace.h b/tools/clang/blink_gc_plugin/tests/polymorphic_class_with_non_virtual_trace.h
new file mode 100644
index 0000000..f5d999e
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/polymorphic_class_with_non_virtual_trace.h
@@ -0,0 +1,61 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef POLYMORPHIC_CLASS_WITH_NON_VIRTUAL_TRACE_H_
+#define POLYMORPHIC_CLASS_WITH_NON_VIRTUAL_TRACE_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject : public GarbageCollected<HeapObject> {
+public:
+ void trace(Visitor*) { }
+};
+
+class NonPolymorphicBase {
+};
+
+class PolymorphicBase {
+public:
+ virtual void foo();
+};
+
+class IsLeftMostPolymorphic
+ : public GarbageCollected<IsLeftMostPolymorphic>,
+ public PolymorphicBase {
+public:
+ void trace(Visitor*);
+private:
+ Member<HeapObject> m_obj;
+};
+
+class IsNotLeftMostPolymorphic
+ : public GarbageCollected<IsNotLeftMostPolymorphic>,
+ public NonPolymorphicBase,
+ public PolymorphicBase {
+public:
+ void trace(Visitor*);
+private:
+ Member<HeapObject> m_obj;
+};
+
+template<typename T>
+class TemplatedNonPolymorphicBase
+ : public GarbageCollected<TemplatedNonPolymorphicBase<T> > {
+public:
+ void trace(Visitor* visitor) { visitor->trace(m_obj); }
+private:
+ Member<HeapObject> m_obj;
+};
+
+// Looks OK, but will result in an incorrect object pointer when marking.
+class TemplatedIsNotLeftMostPolymorphic
+ : public TemplatedNonPolymorphicBase<TemplatedIsNotLeftMostPolymorphic>,
+ public PolymorphicBase {
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/polymorphic_class_with_non_virtual_trace.txt b/tools/clang/blink_gc_plugin/tests/polymorphic_class_with_non_virtual_trace.txt
new file mode 100644
index 0000000..38f2e77
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/polymorphic_class_with_non_virtual_trace.txt
@@ -0,0 +1,8 @@
+In file included from polymorphic_class_with_non_virtual_trace.cpp:5:
+./polymorphic_class_with_non_virtual_trace.h:17:1: warning: [blink-gc] Left-most base class 'NonPolymorphicBase' of derived class 'IsNotLeftMostPolymorphic' must be polymorphic.
+class NonPolymorphicBase {
+^
+./polymorphic_class_with_non_virtual_trace.h:45:1: warning: [blink-gc] Left-most base class 'TemplatedNonPolymorphicBase<blink::TemplatedIsNotLeftMostPolymorphic>' of derived class 'TemplatedIsNotLeftMostPolymorphic' must be polymorphic.
+class TemplatedNonPolymorphicBase
+^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/pure_virtual_trace.cpp b/tools/clang/blink_gc_plugin/tests/pure_virtual_trace.cpp
new file mode 100644
index 0000000..d993a32
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/pure_virtual_trace.cpp
@@ -0,0 +1,7 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "pure_virtual_trace.h"
+
+// Nothing to define
diff --git a/tools/clang/blink_gc_plugin/tests/pure_virtual_trace.h b/tools/clang/blink_gc_plugin/tests/pure_virtual_trace.h
new file mode 100644
index 0000000..356a95e
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/pure_virtual_trace.h
@@ -0,0 +1,19 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PURE_VIRTUAL_TRACE_H_
+#define PURE_VIRTUAL_TRACE_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class A : public GarbageCollected<A> {
+public:
+ virtual void trace(Visitor*) = 0;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/pure_virtual_trace.txt b/tools/clang/blink_gc_plugin/tests/pure_virtual_trace.txt
new file mode 100644
index 0000000..175a28a
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/pure_virtual_trace.txt
@@ -0,0 +1,5 @@
+In file included from pure_virtual_trace.cpp:5:
+./pure_virtual_trace.h:14:5: warning: [blink-gc] Garbage collected class 'A' is not permitted to declare a pure-virtual trace method.
+ virtual void trace(Visitor*) = 0;
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/raw_ptr_to_gc_managed_class.cpp b/tools/clang/blink_gc_plugin/tests/raw_ptr_to_gc_managed_class.cpp
new file mode 100644
index 0000000..4d6cc05
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/raw_ptr_to_gc_managed_class.cpp
@@ -0,0 +1,13 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "raw_ptr_to_gc_managed_class.h"
+
+namespace blink {
+
+void HeapObject::trace(Visitor* visitor) {
+ visitor->trace(m_objs);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/raw_ptr_to_gc_managed_class.h b/tools/clang/blink_gc_plugin/tests/raw_ptr_to_gc_managed_class.h
new file mode 100644
index 0000000..479f0e0
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/raw_ptr_to_gc_managed_class.h
@@ -0,0 +1,30 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef RAW_PTR_TO_GC_MANAGED_CLASS_H_
+#define RAW_PTR_TO_GC_MANAGED_CLASS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject;
+
+class PartObject {
+ DISALLOW_ALLOCATION();
+private:
+ RawPtr<HeapObject> m_obj;
+};
+
+class HeapObject : public GarbageCollected<HeapObject> {
+public:
+ void trace(Visitor*);
+private:
+ PartObject m_part;
+ HeapVector<HeapObject*> m_objs;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/raw_ptr_to_gc_managed_class.txt b/tools/clang/blink_gc_plugin/tests/raw_ptr_to_gc_managed_class.txt
new file mode 100644
index 0000000..985ea27
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/raw_ptr_to_gc_managed_class.txt
@@ -0,0 +1,14 @@
+In file included from raw_ptr_to_gc_managed_class.cpp:5:
+./raw_ptr_to_gc_managed_class.h:14:1: warning: [blink-gc] Class 'PartObject' contains invalid fields.
+class PartObject {
+^
+./raw_ptr_to_gc_managed_class.h:17:5: note: [blink-gc] Raw pointer field 'm_obj' to a GC managed class declared here:
+ RawPtr<HeapObject> m_obj;
+ ^
+./raw_ptr_to_gc_managed_class.h:20:1: warning: [blink-gc] Class 'HeapObject' contains invalid fields.
+class HeapObject : public GarbageCollected<HeapObject> {
+^
+./raw_ptr_to_gc_managed_class.h:25:5: note: [blink-gc] Raw pointer field 'm_objs' to a GC managed class declared here:
+ HeapVector<HeapObject*> m_objs;
+ ^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/ref_ptr_to_gc_managed_class.cpp b/tools/clang/blink_gc_plugin/tests/ref_ptr_to_gc_managed_class.cpp
new file mode 100644
index 0000000..e0a200f
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/ref_ptr_to_gc_managed_class.cpp
@@ -0,0 +1,11 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "ref_ptr_to_gc_managed_class.h"
+
+namespace blink {
+
+void HeapObject::trace(Visitor*) { }
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/ref_ptr_to_gc_managed_class.h b/tools/clang/blink_gc_plugin/tests/ref_ptr_to_gc_managed_class.h
new file mode 100644
index 0000000..0dba311
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/ref_ptr_to_gc_managed_class.h
@@ -0,0 +1,30 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef REF_PTR_TO_GC_MANAGED_CLASS_H_
+#define REF_PTR_TO_GC_MANAGED_CLASS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject;
+
+class PartObject {
+ DISALLOW_ALLOCATION();
+private:
+ RefPtr<HeapObject> m_obj;
+};
+
+class HeapObject : public GarbageCollectedFinalized<HeapObject> {
+public:
+ void trace(Visitor*);
+private:
+ PartObject m_part;
+ Vector<RefPtr<HeapObject> > m_objs;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/ref_ptr_to_gc_managed_class.txt b/tools/clang/blink_gc_plugin/tests/ref_ptr_to_gc_managed_class.txt
new file mode 100644
index 0000000..fd49785
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/ref_ptr_to_gc_managed_class.txt
@@ -0,0 +1,14 @@
+In file included from ref_ptr_to_gc_managed_class.cpp:5:
+./ref_ptr_to_gc_managed_class.h:14:1: warning: [blink-gc] Class 'PartObject' contains invalid fields.
+class PartObject {
+^
+./ref_ptr_to_gc_managed_class.h:17:5: note: [blink-gc] RefPtr field 'm_obj' to a GC managed class declared here:
+ RefPtr<HeapObject> m_obj;
+ ^
+./ref_ptr_to_gc_managed_class.h:20:1: warning: [blink-gc] Class 'HeapObject' contains invalid fields.
+class HeapObject : public GarbageCollectedFinalized<HeapObject> {
+^
+./ref_ptr_to_gc_managed_class.h:25:5: note: [blink-gc] RefPtr field 'm_objs' to a GC managed class declared here:
+ Vector<RefPtr<HeapObject> > m_objs;
+ ^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/stack_allocated.cpp b/tools/clang/blink_gc_plugin/tests/stack_allocated.cpp
new file mode 100644
index 0000000..450b3b5
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/stack_allocated.cpp
@@ -0,0 +1,13 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "stack_allocated.h"
+
+namespace blink {
+
+void HeapObject::trace(Visitor* visitor)
+{
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/stack_allocated.h b/tools/clang/blink_gc_plugin/tests/stack_allocated.h
new file mode 100644
index 0000000..ed77326
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/stack_allocated.h
@@ -0,0 +1,47 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef STACK_ALLOCATED_H_
+#define STACK_ALLOCATED_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject;
+
+class PartObject {
+ DISALLOW_ALLOCATION();
+private:
+ Member<HeapObject> m_obj; // Needs tracing.
+};
+
+class StackObject {
+ STACK_ALLOCATED();
+private:
+ Member<HeapObject> m_obj; // Does not need tracing.
+};
+
+class AnotherStackObject : public PartObject { // Invalid base.
+ STACK_ALLOCATED();
+private:
+ StackObject m_part; // Can embed a stack allocated object.
+};
+
+class HeapObject : public GarbageCollected<HeapObject> {
+public:
+ void trace(Visitor*);
+private:
+ StackObject m_part; // Cannot embed a stack allocated object.
+};
+
+// STACK_ALLOCATED is inherited.
+class DerivedStackObject : public StackObject {
+private:
+ AnotherStackObject m_anotherPart; // Also fine.
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/stack_allocated.txt b/tools/clang/blink_gc_plugin/tests/stack_allocated.txt
new file mode 100644
index 0000000..2296d87
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/stack_allocated.txt
@@ -0,0 +1,17 @@
+In file included from stack_allocated.cpp:5:
+./stack_allocated.h:14:1: warning: [blink-gc] Class 'PartObject' requires a trace method.
+class PartObject {
+^
+./stack_allocated.h:17:5: note: [blink-gc] Untraced field 'm_obj' declared here:
+ Member<HeapObject> m_obj; // Needs tracing.
+ ^
+./stack_allocated.h:26:28: warning: [blink-gc] Stack-allocated class 'AnotherStackObject' derives class 'PartObject' which is not stack allocated.
+class AnotherStackObject : public PartObject { // Invalid base.
+ ^
+./stack_allocated.h:32:1: warning: [blink-gc] Class 'HeapObject' contains invalid fields.
+class HeapObject : public GarbageCollected<HeapObject> {
+^
+./stack_allocated.h:36:5: note: [blink-gc] Stack-allocated field 'm_part' declared here:
+ StackObject m_part; // Cannot embed a stack allocated object.
+ ^
+3 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/test.sh b/tools/clang/blink_gc_plugin/tests/test.sh
new file mode 100755
index 0000000..02c7477
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/test.sh
@@ -0,0 +1,84 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Hacky, primitive testing: This runs the style plugin for a set of input files
+# and compares the output with golden result files.
+
+E_BADARGS=65
+E_FAILEDTEST=1
+
+failed_any_test=
+
+# Prints usage information.
+usage() {
+ echo "Usage: $(basename "${0}")" \
+ "<path to clang>" \
+ "<path to plugin>"
+ echo ""
+ echo " Runs all the libBlinkGCPlugin unit tests"
+ echo ""
+}
+
+# Runs a single test case.
+do_testcase() {
+ local flags=""
+ if [ -e "${3}" ]; then
+ flags="$(cat "${3}")"
+ fi
+ local output="$("${CLANG_PATH}" -c -Wno-c++11-extensions \
+ -Xclang -load -Xclang "${PLUGIN_PATH}" \
+ -Xclang -add-plugin -Xclang blink-gc-plugin ${flags} ${1} 2>&1)"
+ local json="${input%cpp}graph.json"
+ if [ -f "$json" ]; then
+ output="$(python ../process-graph.py -c ${json} 2>&1)"
+ fi
+ local diffout="$(echo "${output}" | diff - "${2}")"
+ if [ "${diffout}" = "" ]; then
+ echo "PASS: ${1}"
+ else
+ failed_any_test=yes
+ echo "FAIL: ${1}"
+ echo "Output of compiler:"
+ echo "${output}"
+ echo "Expected output:"
+ cat "${2}"
+ echo
+ fi
+}
+
+# Validate input to the script.
+if [[ -z "${1}" ]]; then
+ usage
+ exit ${E_BADARGS}
+elif [[ -z "${2}" ]]; then
+ usage
+ exit ${E_BADARGS}
+elif [[ ! -x "${1}" ]]; then
+ echo "${1} is not an executable"
+ usage
+ exit ${E_BADARGS}
+elif [[ ! -f "${2}" ]]; then
+ echo "${2} could not be found"
+ usage
+ exit ${E_BADARGS}
+else
+ export CLANG_PATH="${1}"
+ export PLUGIN_PATH="${2}"
+ echo "Using clang ${CLANG_PATH}..."
+ echo "Using plugin ${PLUGIN_PATH}..."
+
+ # The golden files assume that the cwd is this directory. To make the script
+ # work no matter what the cwd is, explicitly cd to there.
+ cd "$(dirname "${0}")"
+fi
+
+for input in *.cpp; do
+ do_testcase "${input}" "${input%cpp}txt" "${input%cpp}flags"
+done
+
+if [[ "${failed_any_test}" ]]; then
+ exit ${E_FAILEDTEST}
+fi
diff --git a/tools/clang/blink_gc_plugin/tests/trace_after_dispatch.cpp b/tools/clang/blink_gc_plugin/tests/trace_after_dispatch.cpp
new file mode 100644
index 0000000..c246aaa
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_after_dispatch.cpp
@@ -0,0 +1,50 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "trace_after_dispatch.h"
+
+namespace blink {
+
+static B* toB(A* a) { return static_cast<B*>(a); }
+
+void A::trace(Visitor* visitor)
+{
+ switch (m_type) {
+ case TB:
+ toB(this)->traceAfterDispatch(visitor);
+ break;
+ case TC:
+ static_cast<C*>(this)->traceAfterDispatch(visitor);
+ break;
+ case TD:
+ // Missing static_cast<D*>(this)->traceAfterDispatch(visitor);
+ break;
+ }
+}
+
+void A::traceAfterDispatch(Visitor* visitor)
+{
+}
+
+void B::traceAfterDispatch(Visitor* visitor)
+{
+ visitor->trace(m_a);
+ // Missing A::traceAfterDispatch(visitor);
+ // Also check that calling trace does not count.
+ A::trace(visitor);
+}
+
+void C::traceAfterDispatch(Visitor* visitor)
+{
+ // Missing visitor->trace(m_a);
+ A::traceAfterDispatch(visitor);
+}
+
+void D::traceAfterDispatch(Visitor* visitor)
+{
+ visitor->trace(m_a);
+ Abstract::traceAfterDispatch(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/trace_after_dispatch.h b/tools/clang/blink_gc_plugin/tests/trace_after_dispatch.h
new file mode 100644
index 0000000..a19a536
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_after_dispatch.h
@@ -0,0 +1,55 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TRACE_AFTER_DISPATCH_H_
+#define TRACE_AFTER_DISPATCH_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class A : public GarbageCollected<A> {
+public:
+ void trace(Visitor*);
+ void traceAfterDispatch(Visitor*);
+protected:
+ enum Type { TB, TC, TD };
+ A(Type type) : m_type(type) { }
+private:
+ Type m_type;
+};
+
+class B : public A {
+public:
+ B() : A(TB) { }
+ void traceAfterDispatch(Visitor*);
+private:
+ Member<A> m_a;
+};
+
+class C : public A {
+public:
+ C() : A(TC) { }
+ void traceAfterDispatch(Visitor*);
+private:
+ Member<A> m_a;
+};
+
+// This class is considered abstract does not need to be dispatched to.
+class Abstract : public A {
+protected:
+ Abstract(Type type) : A(type) { }
+};
+
+class D : public Abstract {
+public:
+ D() : Abstract(TD) { }
+ void traceAfterDispatch(Visitor*);
+private:
+ Member<A> m_a;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/trace_after_dispatch.txt b/tools/clang/blink_gc_plugin/tests/trace_after_dispatch.txt
new file mode 100644
index 0000000..877fbbe
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_after_dispatch.txt
@@ -0,0 +1,13 @@
+trace_after_dispatch.cpp:11:1: warning: [blink-gc] Missing dispatch to class 'D' in manual trace dispatch.
+void A::trace(Visitor* visitor)
+^
+trace_after_dispatch.cpp:30:1: warning: [blink-gc] Base class 'A' of derived class 'B' requires tracing.
+void B::traceAfterDispatch(Visitor* visitor)
+^
+trace_after_dispatch.cpp:38:1: warning: [blink-gc] Class 'C' has untraced fields that require tracing.
+void C::traceAfterDispatch(Visitor* visitor)
+^
+./trace_after_dispatch.h:36:5: note: [blink-gc] Untraced field 'm_a' declared here:
+ Member<A> m_a;
+ ^
+3 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/trace_collections.cpp b/tools/clang/blink_gc_plugin/tests/trace_collections.cpp
new file mode 100644
index 0000000..9ba7c96
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_collections.cpp
@@ -0,0 +1,13 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "trace_collections.h"
+
+namespace blink {
+
+void HeapObject::trace(Visitor* visitor)
+{
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/trace_collections.h b/tools/clang/blink_gc_plugin/tests/trace_collections.h
new file mode 100644
index 0000000..219b056
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_collections.h
@@ -0,0 +1,44 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TRACE_COLLECTIONS_H_
+#define TRACE_COLLECTIONS_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject : public GarbageCollected<HeapObject> {
+public:
+ void trace(Visitor*);
+private:
+ HeapVector<Member<HeapObject> > m_heapVector;
+ Vector<Member<HeapObject>, 0, HeapAllocator> m_wtfVector;
+
+ HeapDeque<Member<HeapObject> > m_heapDeque;
+ Deque<Member<HeapObject>, 0, HeapAllocator> m_wtfDeque;
+
+ HeapHashSet<Member<HeapObject> > m_heapSet;
+ HashSet<Member<HeapObject>, void, HeapAllocator> m_wtfSet;
+
+ HeapListHashSet<Member<HeapObject> > m_heapListSet;
+ ListHashSet<Member<HeapObject>, void, HeapAllocator> m_wtfListSet;
+
+ HeapLinkedHashSet<Member<HeapObject> > m_heapLinkedSet;
+ LinkedHashSet<Member<HeapObject>, void, HeapAllocator> m_wtfLinkedSet;
+
+ HeapHashCountedSet<Member<HeapObject> > m_heapCountedSet;
+ HashCountedSet<Member<HeapObject>, void, HeapAllocator> m_wtfCountedSet;
+
+ HeapHashMap<int, Member<HeapObject> > m_heapMapKey;
+ HeapHashMap<Member<HeapObject>, int > m_heapMapVal;
+ HashMap<int, Member<HeapObject>, void, void, void, HeapAllocator>
+ m_wtfMapKey;
+ HashMap<Member<HeapObject>, int, void, void, void, HeapAllocator>
+ m_wtfMapVal;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/trace_collections.txt b/tools/clang/blink_gc_plugin/tests/trace_collections.txt
new file mode 100644
index 0000000..7c20ad4
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_collections.txt
@@ -0,0 +1,52 @@
+trace_collections.cpp:9:1: warning: [blink-gc] Class 'HeapObject' has untraced fields that require tracing.
+void HeapObject::trace(Visitor* visitor)
+^
+./trace_collections.h:16:5: note: [blink-gc] Untraced field 'm_heapVector' declared here:
+ HeapVector<Member<HeapObject> > m_heapVector;
+ ^
+./trace_collections.h:17:5: note: [blink-gc] Untraced field 'm_wtfVector' declared here:
+ Vector<Member<HeapObject>, 0, HeapAllocator> m_wtfVector;
+ ^
+./trace_collections.h:19:5: note: [blink-gc] Untraced field 'm_heapDeque' declared here:
+ HeapDeque<Member<HeapObject> > m_heapDeque;
+ ^
+./trace_collections.h:20:5: note: [blink-gc] Untraced field 'm_wtfDeque' declared here:
+ Deque<Member<HeapObject>, 0, HeapAllocator> m_wtfDeque;
+ ^
+./trace_collections.h:22:5: note: [blink-gc] Untraced field 'm_heapSet' declared here:
+ HeapHashSet<Member<HeapObject> > m_heapSet;
+ ^
+./trace_collections.h:23:5: note: [blink-gc] Untraced field 'm_wtfSet' declared here:
+ HashSet<Member<HeapObject>, void, HeapAllocator> m_wtfSet;
+ ^
+./trace_collections.h:25:5: note: [blink-gc] Untraced field 'm_heapListSet' declared here:
+ HeapListHashSet<Member<HeapObject> > m_heapListSet;
+ ^
+./trace_collections.h:26:5: note: [blink-gc] Untraced field 'm_wtfListSet' declared here:
+ ListHashSet<Member<HeapObject>, void, HeapAllocator> m_wtfListSet;
+ ^
+./trace_collections.h:28:5: note: [blink-gc] Untraced field 'm_heapLinkedSet' declared here:
+ HeapLinkedHashSet<Member<HeapObject> > m_heapLinkedSet;
+ ^
+./trace_collections.h:29:5: note: [blink-gc] Untraced field 'm_wtfLinkedSet' declared here:
+ LinkedHashSet<Member<HeapObject>, void, HeapAllocator> m_wtfLinkedSet;
+ ^
+./trace_collections.h:31:5: note: [blink-gc] Untraced field 'm_heapCountedSet' declared here:
+ HeapHashCountedSet<Member<HeapObject> > m_heapCountedSet;
+ ^
+./trace_collections.h:32:5: note: [blink-gc] Untraced field 'm_wtfCountedSet' declared here:
+ HashCountedSet<Member<HeapObject>, void, HeapAllocator> m_wtfCountedSet;
+ ^
+./trace_collections.h:34:5: note: [blink-gc] Untraced field 'm_heapMapKey' declared here:
+ HeapHashMap<int, Member<HeapObject> > m_heapMapKey;
+ ^
+./trace_collections.h:35:5: note: [blink-gc] Untraced field 'm_heapMapVal' declared here:
+ HeapHashMap<Member<HeapObject>, int > m_heapMapVal;
+ ^
+./trace_collections.h:36:5: note: [blink-gc] Untraced field 'm_wtfMapKey' declared here:
+ HashMap<int, Member<HeapObject>, void, void, void, HeapAllocator>
+ ^
+./trace_collections.h:38:5: note: [blink-gc] Untraced field 'm_wtfMapVal' declared here:
+ HashMap<Member<HeapObject>, int, void, void, void, HeapAllocator>
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/trace_if_needed.cpp b/tools/clang/blink_gc_plugin/tests/trace_if_needed.cpp
new file mode 100644
index 0000000..563c6cc
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_if_needed.cpp
@@ -0,0 +1,16 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "trace_if_needed.h"
+
+namespace blink {
+
+template<typename T>
+void TemplatedObject<T>::trace(Visitor* visitor)
+{
+ TraceIfNeeded<T>::trace(visitor, &m_one);
+ // Missing trace of m_two
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/trace_if_needed.h b/tools/clang/blink_gc_plugin/tests/trace_if_needed.h
new file mode 100644
index 0000000..00b8f22
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_if_needed.h
@@ -0,0 +1,27 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TRACE_IF_NEEDED_H_
+#define TRACE_IF_NEEDED_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject : public GarbageCollected<HeapObject> { };
+
+template<typename T>
+class TemplatedObject : public GarbageCollected<TemplatedObject<T> > {
+public:
+ virtual void trace(Visitor*);
+private:
+ T m_one;
+ T m_two;
+};
+
+class InstantiatedObject : public TemplatedObject<Member<HeapObject> > { };
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/trace_if_needed.txt b/tools/clang/blink_gc_plugin/tests/trace_if_needed.txt
new file mode 100644
index 0000000..79a24e8
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_if_needed.txt
@@ -0,0 +1,7 @@
+trace_if_needed.cpp:9:1: warning: [blink-gc] Class 'TemplatedObject<blink::Member<blink::HeapObject> >' has untraced fields that require tracing.
+template<typename T>
+^
+./trace_if_needed.h:20:5: note: [blink-gc] Untraced field 'm_two' declared here:
+ T m_two;
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/trace_templated_super.cpp b/tools/clang/blink_gc_plugin/tests/trace_templated_super.cpp
new file mode 100644
index 0000000..2b59034
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_templated_super.cpp
@@ -0,0 +1,36 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "trace_templated_super.h"
+
+namespace blink {
+
+template<typename T>
+void Super<T>::clearWeakMembers(Visitor* visitor)
+{
+ (void)m_weak;
+}
+
+template<typename T>
+void Super<T>::trace(Visitor* visitor)
+{
+ visitor->registerWeakMembers<Super<T>, &Super<T>::clearWeakMembers>(this);
+ visitor->trace(m_obj);
+ Mixin::trace(visitor);
+}
+
+template<typename T>
+void Sub<T>::trace(Visitor* visitor)
+{
+ // Missing trace of m_obj.
+ Super<T>::trace(visitor);
+}
+
+void HeapObject::trace(Visitor* visitor)
+{
+ visitor->trace(m_obj);
+ Sub<HeapObject>::trace(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/trace_templated_super.h b/tools/clang/blink_gc_plugin/tests/trace_templated_super.h
new file mode 100644
index 0000000..09e5257
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_templated_super.h
@@ -0,0 +1,47 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TRACE_TEMPLATED_SUPER_H_
+#define TRACE_TEMPLATED_SUPER_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject;
+
+class Mixin : public GarbageCollectedMixin {
+public:
+ virtual void trace(Visitor*) { }
+};
+
+template<typename T>
+class Super : public GarbageCollected<Super<T> >, public Mixin {
+ USING_GARBAGE_COLLECTED_MIXIN(Super);
+public:
+ virtual void trace(Visitor*);
+ void clearWeakMembers(Visitor*);
+private:
+ Member<HeapObject> m_obj;
+ WeakMember<HeapObject> m_weak;
+};
+
+template<typename T>
+class Sub : public Super<T> {
+public:
+ virtual void trace(Visitor* visitor);
+private:
+ Member<HeapObject> m_obj;
+};
+
+class HeapObject : public Sub<HeapObject> {
+public:
+ virtual void trace(Visitor*);
+private:
+ Member<HeapObject> m_obj;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/trace_templated_super.txt b/tools/clang/blink_gc_plugin/tests/trace_templated_super.txt
new file mode 100644
index 0000000..291b018
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/trace_templated_super.txt
@@ -0,0 +1,7 @@
+trace_templated_super.cpp:23:1: warning: [blink-gc] Class 'Sub<blink::HeapObject>' has untraced fields that require tracing.
+template<typename T>
+^
+./trace_templated_super.h:35:5: note: [blink-gc] Untraced field 'm_obj' declared here:
+ Member<HeapObject> m_obj;
+ ^
+1 warning generated.
diff --git a/tools/clang/blink_gc_plugin/tests/virtual_and_trace_after_dispatch.cpp b/tools/clang/blink_gc_plugin/tests/virtual_and_trace_after_dispatch.cpp
new file mode 100644
index 0000000..2ba6f1e
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/virtual_and_trace_after_dispatch.cpp
@@ -0,0 +1,30 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "virtual_and_trace_after_dispatch.h"
+
+namespace blink {
+
+static B* toB(A* a) { return static_cast<B*>(a); }
+
+void A::trace(Visitor* visitor)
+{
+ switch (m_type) {
+ case TB:
+ toB(this)->traceAfterDispatch(visitor);
+ break;
+ }
+}
+
+void A::traceAfterDispatch(Visitor* visitor)
+{
+}
+
+void B::traceAfterDispatch(Visitor* visitor)
+{
+ visitor->trace(m_a);
+ A::traceAfterDispatch(visitor);
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/virtual_and_trace_after_dispatch.h b/tools/clang/blink_gc_plugin/tests/virtual_and_trace_after_dispatch.h
new file mode 100644
index 0000000..5048349
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/virtual_and_trace_after_dispatch.h
@@ -0,0 +1,34 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef VIRTUAL_AND_TRACE_AFTER_DISPATCH_H_
+#define VIRTUAL_AND_TRACE_AFTER_DISPATCH_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class A : public GarbageCollected<A> {
+public:
+ void trace(Visitor*);
+ void traceAfterDispatch(Visitor*);
+protected:
+ enum Type { TB };
+ A(Type type) : m_type(type) { }
+private:
+ Type m_type;
+};
+
+class B : public A {
+public:
+ B() : A(TB) { }
+ void traceAfterDispatch(Visitor*);
+ virtual void foo() { }
+private:
+ Member<A> m_a;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/virtual_and_trace_after_dispatch.txt b/tools/clang/blink_gc_plugin/tests/virtual_and_trace_after_dispatch.txt
new file mode 100644
index 0000000..fb46696
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/virtual_and_trace_after_dispatch.txt
@@ -0,0 +1,11 @@
+In file included from virtual_and_trace_after_dispatch.cpp:5:
+./virtual_and_trace_after_dispatch.h:12:1: warning: [blink-gc] Left-most base class 'A' of derived class 'B' must be polymorphic.
+class A : public GarbageCollected<A> {
+^
+./virtual_and_trace_after_dispatch.h:23:1: warning: [blink-gc] Class 'B' contains or inherits virtual methods but implements manual dispatching.
+class B : public A {
+^
+./virtual_and_trace_after_dispatch.h:14:5: note: [blink-gc] Manual dispatch 'trace' declared here:
+ void trace(Visitor*);
+ ^
+2 warnings generated.
diff --git a/tools/clang/blink_gc_plugin/tests/weak_fields_require_tracing.cpp b/tools/clang/blink_gc_plugin/tests/weak_fields_require_tracing.cpp
new file mode 100644
index 0000000..382e9f9
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/weak_fields_require_tracing.cpp
@@ -0,0 +1,28 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "weak_fields_require_tracing.h"
+
+namespace blink {
+
+void HeapObject::trace(Visitor* visitor)
+{
+ // Missing visitor->trace(m_obj1);
+ // Missing visitor->trace(m_obj2);
+ // visitor->trace(m_obj3) in callback.
+ // Missing visitor->trace(m_set1);
+ visitor->trace(m_set2);
+ visitor->registerWeakMembers<HeapObject,
+ &HeapObject::clearWeakMembers>(this);
+}
+
+void HeapObject::clearWeakMembers(Visitor* visitor)
+{
+ visitor->trace(m_obj1); // Does not count.
+ // Missing visitor->trace(m_obj2);
+ visitor->trace(m_obj3); // OK.
+ visitor->trace(m_set1); // Does not count.
+}
+
+}
diff --git a/tools/clang/blink_gc_plugin/tests/weak_fields_require_tracing.h b/tools/clang/blink_gc_plugin/tests/weak_fields_require_tracing.h
new file mode 100644
index 0000000..c6850e6
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/weak_fields_require_tracing.h
@@ -0,0 +1,26 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef WEAK_FIELDS_REQUIRE_TRACING_H_
+#define WEAK_FIELDS_REQUIRE_TRACING_H_
+
+#include "heap/stubs.h"
+
+namespace blink {
+
+class HeapObject : public GarbageCollected<HeapObject> {
+public:
+ void trace(Visitor*);
+ void clearWeakMembers(Visitor*);
+private:
+ Member<HeapObject> m_obj1;
+ WeakMember<HeapObject> m_obj2;
+ WeakMember<HeapObject> m_obj3;
+ HeapHashSet<WeakMember<HeapObject> > m_set1;
+ HeapHashSet<WeakMember<HeapObject> > m_set2;
+};
+
+}
+
+#endif
diff --git a/tools/clang/blink_gc_plugin/tests/weak_fields_require_tracing.txt b/tools/clang/blink_gc_plugin/tests/weak_fields_require_tracing.txt
new file mode 100644
index 0000000..02f56a3
--- /dev/null
+++ b/tools/clang/blink_gc_plugin/tests/weak_fields_require_tracing.txt
@@ -0,0 +1,13 @@
+weak_fields_require_tracing.cpp:9:1: warning: [blink-gc] Class 'HeapObject' has untraced fields that require tracing.
+void HeapObject::trace(Visitor* visitor)
+^
+./weak_fields_require_tracing.h:17:5: note: [blink-gc] Untraced field 'm_obj1' declared here:
+ Member<HeapObject> m_obj1;
+ ^
+./weak_fields_require_tracing.h:18:5: note: [blink-gc] Untraced field 'm_obj2' declared here:
+ WeakMember<HeapObject> m_obj2;
+ ^
+./weak_fields_require_tracing.h:20:5: note: [blink-gc] Untraced field 'm_set1' declared here:
+ HeapHashSet<WeakMember<HeapObject> > m_set1;
+ ^
+1 warning generated.
diff --git a/tools/clang/empty_string/CMakeLists.txt b/tools/clang/empty_string/CMakeLists.txt
new file mode 100644
index 0000000..49b0234
--- /dev/null
+++ b/tools/clang/empty_string/CMakeLists.txt
@@ -0,0 +1,26 @@
+set(LLVM_LINK_COMPONENTS
+ BitReader
+ MCParser
+ Option
+ )
+
+add_llvm_executable(empty_string
+ EmptyStringConverter.cpp
+ )
+
+target_link_libraries(empty_string
+ clangAST
+ clangASTMatchers
+ clangAnalysis
+ clangBasic
+ clangDriver
+ clangEdit
+ clangFrontend
+ clangLex
+ clangParse
+ clangSema
+ clangSerialization
+ clangTooling
+ )
+
+install(TARGETS empty_string RUNTIME DESTINATION bin)
diff --git a/tools/clang/empty_string/EmptyStringConverter.cpp b/tools/clang/empty_string/EmptyStringConverter.cpp
new file mode 100644
index 0000000..28cc602
--- /dev/null
+++ b/tools/clang/empty_string/EmptyStringConverter.cpp
@@ -0,0 +1,205 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This implements a Clang tool to convert all instances of std::string("") to
+// std::string(). The latter is more efficient (as std::string doesn't have to
+// take a copy of an empty string) and generates fewer instructions as well. It
+// should be run using the tools/clang/scripts/run_tool.py helper.
+
+#include <memory>
+#include "clang/ASTMatchers/ASTMatchers.h"
+#include "clang/ASTMatchers/ASTMatchFinder.h"
+#include "clang/Basic/SourceManager.h"
+#include "clang/Frontend/FrontendActions.h"
+#include "clang/Tooling/CommonOptionsParser.h"
+#include "clang/Tooling/Refactoring.h"
+#include "clang/Tooling/Tooling.h"
+#include "llvm/Support/CommandLine.h"
+
+using clang::ast_matchers::MatchFinder;
+using clang::ast_matchers::argumentCountIs;
+using clang::ast_matchers::bindTemporaryExpr;
+using clang::ast_matchers::constructorDecl;
+using clang::ast_matchers::constructExpr;
+using clang::ast_matchers::defaultArgExpr;
+using clang::ast_matchers::expr;
+using clang::ast_matchers::forEach;
+using clang::ast_matchers::has;
+using clang::ast_matchers::hasArgument;
+using clang::ast_matchers::hasDeclaration;
+using clang::ast_matchers::hasName;
+using clang::ast_matchers::id;
+using clang::ast_matchers::methodDecl;
+using clang::ast_matchers::newExpr;
+using clang::ast_matchers::ofClass;
+using clang::ast_matchers::stringLiteral;
+using clang::ast_matchers::varDecl;
+using clang::tooling::CommonOptionsParser;
+using clang::tooling::Replacement;
+using clang::tooling::Replacements;
+
+namespace {
+
+// Handles replacements for stack and heap-allocated instances, e.g.:
+// std::string a("");
+// std::string* b = new std::string("");
+class ConstructorCallback : public MatchFinder::MatchCallback {
+ public:
+ ConstructorCallback(Replacements* replacements)
+ : replacements_(replacements) {}
+
+ virtual void run(const MatchFinder::MatchResult& result) override;
+
+ private:
+ Replacements* const replacements_;
+};
+
+// Handles replacements for invocations of std::string("") in an initializer
+// list.
+class InitializerCallback : public MatchFinder::MatchCallback {
+ public:
+ InitializerCallback(Replacements* replacements)
+ : replacements_(replacements) {}
+
+ virtual void run(const MatchFinder::MatchResult& result) override;
+
+ private:
+ Replacements* const replacements_;
+};
+
+// Handles replacements for invocations of std::string("") in a temporary
+// context, e.g. FunctionThatTakesString(std::string("")). Note that this
+// handles implicits construction of std::string as well.
+class TemporaryCallback : public MatchFinder::MatchCallback {
+ public:
+ TemporaryCallback(Replacements* replacements) : replacements_(replacements) {}
+
+ virtual void run(const MatchFinder::MatchResult& result) override;
+
+ private:
+ Replacements* const replacements_;
+};
+
+class EmptyStringConverter {
+ public:
+ explicit EmptyStringConverter(Replacements* replacements)
+ : constructor_callback_(replacements),
+ initializer_callback_(replacements),
+ temporary_callback_(replacements) {}
+
+ void SetupMatchers(MatchFinder* match_finder);
+
+ private:
+ ConstructorCallback constructor_callback_;
+ InitializerCallback initializer_callback_;
+ TemporaryCallback temporary_callback_;
+};
+
+void EmptyStringConverter::SetupMatchers(MatchFinder* match_finder) {
+ const clang::ast_matchers::StatementMatcher& constructor_call =
+ id("call",
+ constructExpr(
+ hasDeclaration(methodDecl(ofClass(hasName("std::basic_string")))),
+ argumentCountIs(2),
+ hasArgument(0, id("literal", stringLiteral())),
+ hasArgument(1, defaultArgExpr())));
+
+ // Note that expr(has()) in the matcher is significant; the Clang AST wraps
+ // calls to the std::string constructor with exprWithCleanups nodes. Without
+ // the expr(has()) matcher, the first and last rules would not match anything!
+ match_finder->addMatcher(varDecl(forEach(expr(has(constructor_call)))),
+ &constructor_callback_);
+ match_finder->addMatcher(newExpr(has(constructor_call)),
+ &constructor_callback_);
+ match_finder->addMatcher(bindTemporaryExpr(has(constructor_call)),
+ &temporary_callback_);
+ match_finder->addMatcher(
+ constructorDecl(forEach(expr(has(constructor_call)))),
+ &initializer_callback_);
+}
+
+void ConstructorCallback::run(const MatchFinder::MatchResult& result) {
+ const clang::StringLiteral* literal =
+ result.Nodes.getNodeAs<clang::StringLiteral>("literal");
+ if (literal->getLength() > 0)
+ return;
+
+ const clang::CXXConstructExpr* call =
+ result.Nodes.getNodeAs<clang::CXXConstructExpr>("call");
+ clang::CharSourceRange range =
+ clang::CharSourceRange::getTokenRange(call->getParenOrBraceRange());
+ replacements_->insert(Replacement(*result.SourceManager, range, ""));
+}
+
+void InitializerCallback::run(const MatchFinder::MatchResult& result) {
+ const clang::StringLiteral* literal =
+ result.Nodes.getNodeAs<clang::StringLiteral>("literal");
+ if (literal->getLength() > 0)
+ return;
+
+ const clang::CXXConstructExpr* call =
+ result.Nodes.getNodeAs<clang::CXXConstructExpr>("call");
+ replacements_->insert(Replacement(*result.SourceManager, call, ""));
+}
+
+void TemporaryCallback::run(const MatchFinder::MatchResult& result) {
+ const clang::StringLiteral* literal =
+ result.Nodes.getNodeAs<clang::StringLiteral>("literal");
+ if (literal->getLength() > 0)
+ return;
+
+ const clang::CXXConstructExpr* call =
+ result.Nodes.getNodeAs<clang::CXXConstructExpr>("call");
+ // Differentiate between explicit and implicit calls to std::string's
+ // constructor. An implicitly generated constructor won't have a valid
+ // source range for the parenthesis. We do this because the matched expression
+ // for |call| in the explicit case doesn't include the closing parenthesis.
+ clang::SourceRange range = call->getParenOrBraceRange();
+ if (range.isValid()) {
+ replacements_->insert(Replacement(*result.SourceManager, literal, ""));
+ } else {
+ replacements_->insert(
+ Replacement(*result.SourceManager,
+ call,
+ literal->isWide() ? "std::wstring()" : "std::string()"));
+ }
+}
+
+} // namespace
+
+static llvm::cl::extrahelp common_help(CommonOptionsParser::HelpMessage);
+
+int main(int argc, const char* argv[]) {
+ llvm::cl::OptionCategory category("EmptyString Tool");
+ CommonOptionsParser options(argc, argv, category);
+ clang::tooling::ClangTool tool(options.getCompilations(),
+ options.getSourcePathList());
+
+ Replacements replacements;
+ EmptyStringConverter converter(&replacements);
+ MatchFinder match_finder;
+ converter.SetupMatchers(&match_finder);
+
+ std::unique_ptr<clang::tooling::FrontendActionFactory> frontend_factory =
+ clang::tooling::newFrontendActionFactory(&match_finder);
+ int result = tool.run(frontend_factory.get());
+ if (result != 0)
+ return result;
+
+ // Each replacement line should have the following format:
+ // r:<file path>:<offset>:<length>:<replacement text>
+ // Only the <replacement text> field can contain embedded ":" characters.
+ // TODO(dcheng): Use a more clever serialization. Ideally we'd use the YAML
+ // serialization and then use clang-apply-replacements, but that would require
+ // copying and pasting a larger amount of boilerplate for all Chrome clang
+ // tools.
+ llvm::outs() << "==== BEGIN EDITS ====\n";
+ for (const auto& r : replacements) {
+ llvm::outs() << "r:" << r.getFilePath() << ":" << r.getOffset() << ":"
+ << r.getLength() << ":" << r.getReplacementText() << "\n";
+ }
+ llvm::outs() << "==== END EDITS ====\n";
+
+ return 0;
+}
diff --git a/tools/clang/empty_string/tests/test-expected.cc b/tools/clang/empty_string/tests/test-expected.cc
new file mode 100644
index 0000000..6762e37
--- /dev/null
+++ b/tools/clang/empty_string/tests/test-expected.cc
@@ -0,0 +1,46 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Test file for the empty string clang tool.
+
+#include <string>
+
+// Tests for std::string declarations.
+void TestDeclarations() {
+ std::string a, b("abc"), c;
+}
+
+// Tests for std::string allocated with new.
+void TestNew() {
+ std::string* a = new std::string, *b = new std::string("abc"),
+ *c = new std::string, *d = new std::string();
+}
+
+// Tests for std::string construction in initializer lists.
+class TestInitializers {
+ public:
+ TestInitializers() {}
+ TestInitializers(bool) {}
+ TestInitializers(double) : b("cat"), c() {}
+
+ private:
+ std::string a;
+ std::string b;
+ std::string c;
+};
+
+// Tests for temporary std::strings.
+void TestTemporaries(const std::string& reference_argument,
+ const std::string value_argument) {
+ TestTemporaries(std::string(), std::string());
+ TestTemporaries(std::string(), std::string());
+}
+
+// Tests for temporary std::wstrings.
+void TestWideTemporaries(const std::wstring& reference_argument,
+ const std::wstring value_argument) {
+ TestWideTemporaries(std::wstring(), std::wstring());
+ TestWideTemporaries(std::wstring(), std::wstring());
+}
+
diff --git a/tools/clang/empty_string/tests/test-original.cc b/tools/clang/empty_string/tests/test-original.cc
new file mode 100644
index 0000000..2edb896
--- /dev/null
+++ b/tools/clang/empty_string/tests/test-original.cc
@@ -0,0 +1,46 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Test file for the empty string clang tool.
+
+#include <string>
+
+// Tests for std::string declarations.
+void TestDeclarations() { std::string a(""), b("abc"), c(""); }
+
+// Tests for std::string allocated with new.
+void TestNew() {
+ std::string* a = new std::string(""),
+ *b = new std::string("abc"),
+ *c = new std::string(""),
+ *d = new std::string();
+}
+
+// Tests for std::string construction in initializer lists.
+class TestInitializers {
+ public:
+ TestInitializers() : a("") {}
+ TestInitializers(bool) : a(""), b("") {}
+ TestInitializers(double) : a(""), b("cat"), c() {}
+
+ private:
+ std::string a;
+ std::string b;
+ std::string c;
+};
+
+// Tests for temporary std::strings.
+void TestTemporaries(const std::string& reference_argument,
+ const std::string value_argument) {
+ TestTemporaries("", "");
+ TestTemporaries(std::string(""), std::string(""));
+}
+
+// Tests for temporary std::wstrings.
+void TestWideTemporaries(const std::wstring& reference_argument,
+ const std::wstring value_argument) {
+ TestWideTemporaries(L"", L"");
+ TestWideTemporaries(std::wstring(L""), std::wstring(L""));
+}
+
diff --git a/tools/clang/plugins/CMakeLists.txt b/tools/clang/plugins/CMakeLists.txt
new file mode 100644
index 0000000..df30cdd
--- /dev/null
+++ b/tools/clang/plugins/CMakeLists.txt
@@ -0,0 +1,13 @@
+add_llvm_loadable_module(libFindBadConstructs
+ ChromeClassTester.cpp
+ FindBadConstructsAction.cpp
+ FindBadConstructsConsumer.cpp
+ )
+
+install(TARGETS libFindBadConstructs LIBRARY DESTINATION lib)
+
+cr_add_test(plugins_test
+ ${CMAKE_CURRENT_SOURCE_DIR}/tests/test.sh
+ ${LLVM_BUILD_DIR}/bin/clang
+ $<TARGET_FILE:libFindBadConstructs>
+ )
diff --git a/tools/clang/plugins/ChromeClassTester.cpp b/tools/clang/plugins/ChromeClassTester.cpp
new file mode 100644
index 0000000..c8bc543
--- /dev/null
+++ b/tools/clang/plugins/ChromeClassTester.cpp
@@ -0,0 +1,311 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// A general interface for filtering and only acting on classes in Chromium C++
+// code.
+
+#include "ChromeClassTester.h"
+
+#include <sys/param.h>
+
+#include "clang/AST/AST.h"
+#include "clang/Basic/FileManager.h"
+#include "clang/Basic/SourceManager.h"
+
+using namespace clang;
+
+namespace {
+
+bool starts_with(const std::string& one, const std::string& two) {
+ return one.compare(0, two.size(), two) == 0;
+}
+
+std::string lstrip(const std::string& one, const std::string& two) {
+ if (starts_with(one, two))
+ return one.substr(two.size());
+ return one;
+}
+
+bool ends_with(const std::string& one, const std::string& two) {
+ if (two.size() > one.size())
+ return false;
+
+ return one.compare(one.size() - two.size(), two.size(), two) == 0;
+}
+
+} // namespace
+
+ChromeClassTester::ChromeClassTester(CompilerInstance& instance)
+ : instance_(instance),
+ diagnostic_(instance.getDiagnostics()) {
+ BuildBannedLists();
+}
+
+ChromeClassTester::~ChromeClassTester() {}
+
+void ChromeClassTester::HandleTagDeclDefinition(TagDecl* tag) {
+ pending_class_decls_.push_back(tag);
+}
+
+bool ChromeClassTester::HandleTopLevelDecl(DeclGroupRef group_ref) {
+ for (size_t i = 0; i < pending_class_decls_.size(); ++i)
+ CheckTag(pending_class_decls_[i]);
+ pending_class_decls_.clear();
+
+ return true; // true means continue parsing.
+}
+
+void ChromeClassTester::CheckTag(TagDecl* tag) {
+ // We handle class types here where we have semantic information. We can only
+ // check structs/classes/enums here, but we get a bunch of nice semantic
+ // information instead of just parsing information.
+
+ if (CXXRecordDecl* record = dyn_cast<CXXRecordDecl>(tag)) {
+ // If this is a POD or a class template or a type dependent on a
+ // templated class, assume there's no ctor/dtor/virtual method
+ // optimization that we can do.
+ if (record->isPOD() ||
+ record->getDescribedClassTemplate() ||
+ record->getTemplateSpecializationKind() ||
+ record->isDependentType())
+ return;
+
+ if (InBannedNamespace(record))
+ return;
+
+ SourceLocation record_location = record->getInnerLocStart();
+ if (InBannedDirectory(record_location))
+ return;
+
+ // We sadly need to maintain a blacklist of types that violate these
+ // rules, but do so for good reason or due to limitations of this
+ // checker (i.e., we don't handle extern templates very well).
+ std::string base_name = record->getNameAsString();
+ if (IsIgnoredType(base_name))
+ return;
+
+ // We ignore all classes that end with "Matcher" because they're probably
+ // GMock artifacts.
+ if (ends_with(base_name, "Matcher"))
+ return;
+
+ CheckChromeClass(record_location, record);
+ } else if (EnumDecl* enum_decl = dyn_cast<EnumDecl>(tag)) {
+ SourceLocation enum_location = enum_decl->getInnerLocStart();
+ if (InBannedDirectory(enum_location))
+ return;
+
+ std::string base_name = enum_decl->getNameAsString();
+ if (IsIgnoredType(base_name))
+ return;
+
+ CheckChromeEnum(enum_location, enum_decl);
+ }
+}
+
+void ChromeClassTester::emitWarning(SourceLocation loc,
+ const char* raw_error) {
+ FullSourceLoc full(loc, instance().getSourceManager());
+ std::string err;
+ err = "[chromium-style] ";
+ err += raw_error;
+ DiagnosticIDs::Level level =
+ diagnostic().getWarningsAsErrors() ?
+ DiagnosticIDs::Error :
+ DiagnosticIDs::Warning;
+ unsigned id = diagnostic().getDiagnosticIDs()->getCustomDiagID(level, err);
+ DiagnosticBuilder builder = diagnostic().Report(full, id);
+}
+
+bool ChromeClassTester::InBannedNamespace(const Decl* record) {
+ std::string n = GetNamespace(record);
+ if (!n.empty()) {
+ return std::find(banned_namespaces_.begin(), banned_namespaces_.end(), n)
+ != banned_namespaces_.end();
+ }
+
+ return false;
+}
+
+std::string ChromeClassTester::GetNamespace(const Decl* record) {
+ return GetNamespaceImpl(record->getDeclContext(), "");
+}
+
+bool ChromeClassTester::InImplementationFile(SourceLocation record_location) {
+ std::string filename;
+ if (!GetFilename(record_location, &filename))
+ return false;
+
+ if (ends_with(filename, ".cc") || ends_with(filename, ".cpp") ||
+ ends_with(filename, ".mm")) {
+ return true;
+ }
+
+ return false;
+}
+
+void ChromeClassTester::BuildBannedLists() {
+ banned_namespaces_.push_back("std");
+ banned_namespaces_.push_back("__gnu_cxx");
+
+ banned_namespaces_.push_back("blink");
+ banned_namespaces_.push_back("WTF");
+
+ banned_directories_.push_back("/third_party/");
+ banned_directories_.push_back("/native_client/");
+ banned_directories_.push_back("/breakpad/");
+ banned_directories_.push_back("/courgette/");
+ banned_directories_.push_back("/pdf/");
+ banned_directories_.push_back("/ppapi/");
+ banned_directories_.push_back("/usr/");
+ banned_directories_.push_back("/testing/");
+ banned_directories_.push_back("/v8/");
+ banned_directories_.push_back("/dart/");
+ banned_directories_.push_back("/sdch/");
+ banned_directories_.push_back("/icu4c/");
+ banned_directories_.push_back("/frameworks/");
+
+ // Don't check autogenerated headers.
+ // Make puts them below $(builddir_name)/.../gen and geni.
+ // Ninja puts them below OUTPUT_DIR/.../gen
+ // Xcode has a fixed output directory for everything.
+ banned_directories_.push_back("/gen/");
+ banned_directories_.push_back("/geni/");
+ banned_directories_.push_back("/xcodebuild/");
+
+ // You are standing in a mazy of twisty dependencies, all resolved by
+ // putting everything in the header.
+ banned_directories_.push_back("/automation/");
+
+ // Don't check system headers.
+ banned_directories_.push_back("/Developer/");
+
+ // Used in really low level threading code that probably shouldn't be out of
+ // lined.
+ ignored_record_names_.insert("ThreadLocalBoolean");
+
+ // A complicated pickle derived struct that is all packed integers.
+ ignored_record_names_.insert("Header");
+
+ // Part of the GPU system that uses multiple included header
+ // weirdness. Never getting this right.
+ ignored_record_names_.insert("Validators");
+
+ // Has a UNIT_TEST only constructor. Isn't *terribly* complex...
+ ignored_record_names_.insert("AutocompleteController");
+ ignored_record_names_.insert("HistoryURLProvider");
+
+ // Because of chrome frame
+ ignored_record_names_.insert("ReliabilityTestSuite");
+
+ // Used over in the net unittests. A large enough bundle of integers with 1
+ // non-pod class member. Probably harmless.
+ ignored_record_names_.insert("MockTransaction");
+
+ // Enum type with _LAST members where _LAST doesn't mean last enum value.
+ ignored_record_names_.insert("ServerFieldType");
+
+ // Used heavily in ui_unittests and once in views_unittests. Fixing this
+ // isn't worth the overhead of an additional library.
+ ignored_record_names_.insert("TestAnimationDelegate");
+
+ // Part of our public interface that nacl and friends use. (Arguably, this
+ // should mean that this is a higher priority but fixing this looks hard.)
+ ignored_record_names_.insert("PluginVersionInfo");
+
+ // Measured performance improvement on cc_perftests. See
+ // https://codereview.chromium.org/11299290/
+ ignored_record_names_.insert("QuadF");
+
+ // Enum type with _LAST members where _LAST doesn't mean last enum value.
+ ignored_record_names_.insert("ViewID");
+}
+
+std::string ChromeClassTester::GetNamespaceImpl(const DeclContext* context,
+ const std::string& candidate) {
+ switch (context->getDeclKind()) {
+ case Decl::TranslationUnit: {
+ return candidate;
+ }
+ case Decl::Namespace: {
+ const NamespaceDecl* decl = dyn_cast<NamespaceDecl>(context);
+ std::string name_str;
+ llvm::raw_string_ostream OS(name_str);
+ if (decl->isAnonymousNamespace())
+ OS << "<anonymous namespace>";
+ else
+ OS << *decl;
+ return GetNamespaceImpl(context->getParent(),
+ OS.str());
+ }
+ default: {
+ return GetNamespaceImpl(context->getParent(), candidate);
+ }
+ }
+}
+
+bool ChromeClassTester::InBannedDirectory(SourceLocation loc) {
+ std::string filename;
+ if (!GetFilename(loc, &filename)) {
+ // If the filename cannot be determined, simply treat this as a banned
+ // location, instead of going through the full lookup process.
+ return true;
+ }
+
+ // We need to special case scratch space; which is where clang does its
+ // macro expansion. We explicitly want to allow people to do otherwise bad
+ // things through macros that were defined due to third party libraries.
+ if (filename == "<scratch space>")
+ return true;
+
+ // Don't complain about autogenerated protobuf files.
+ if (ends_with(filename, ".pb.h")) {
+ return true;
+ }
+
+ // We need to munge the paths so that they are relative to the repository
+ // srcroot. We first resolve the symlinktastic relative path and then
+ // remove our known srcroot from it if needed.
+ char resolvedPath[MAXPATHLEN];
+ if (realpath(filename.c_str(), resolvedPath)) {
+ filename = resolvedPath;
+ }
+
+ // On linux, chrome is often checked out to /usr/local/google. Due to the
+ // "usr" rule in banned_directories_, all diagnostics would be suppressed
+ // in that case. As a workaround, strip that prefix.
+ filename = lstrip(filename, "/usr/local/google");
+
+ for (size_t i = 0; i < banned_directories_.size(); ++i) {
+ // If any of the banned directories occur as a component in filename,
+ // this file is rejected.
+ const std::string& banned_dir = banned_directories_[i];
+ assert(banned_dir.front() == '/' && "Banned dir must start with '/'");
+ assert(banned_dir.back() == '/' && "Banned dir must end with '/'");
+
+ if (filename.find(banned_dir) != std::string::npos)
+ return true;
+ }
+
+ return false;
+}
+
+bool ChromeClassTester::IsIgnoredType(const std::string& base_name) {
+ return ignored_record_names_.find(base_name) != ignored_record_names_.end();
+}
+
+bool ChromeClassTester::GetFilename(SourceLocation loc,
+ std::string* filename) {
+ const SourceManager& source_manager = instance_.getSourceManager();
+ SourceLocation spelling_location = source_manager.getSpellingLoc(loc);
+ PresumedLoc ploc = source_manager.getPresumedLoc(spelling_location);
+ if (ploc.isInvalid()) {
+ // If we're in an invalid location, we're looking at things that aren't
+ // actually stated in the source.
+ return false;
+ }
+
+ *filename = ploc.getFilename();
+ return true;
+}
diff --git a/tools/clang/plugins/ChromeClassTester.h b/tools/clang/plugins/ChromeClassTester.h
new file mode 100644
index 0000000..6bd19a6
--- /dev/null
+++ b/tools/clang/plugins/ChromeClassTester.h
@@ -0,0 +1,90 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_CLANG_PLUGINS_CHROMECLASSTESTER_H_
+#define TOOLS_CLANG_PLUGINS_CHROMECLASSTESTER_H_
+
+#include <set>
+#include <vector>
+
+#include "clang/AST/ASTConsumer.h"
+#include "clang/AST/TypeLoc.h"
+#include "clang/Frontend/CompilerInstance.h"
+
+// A class on top of ASTConsumer that forwards classes defined in Chromium
+// headers to subclasses which implement CheckChromeClass().
+class ChromeClassTester : public clang::ASTConsumer {
+ public:
+ explicit ChromeClassTester(clang::CompilerInstance& instance);
+ virtual ~ChromeClassTester();
+
+ // clang::ASTConsumer:
+ virtual void HandleTagDeclDefinition(clang::TagDecl* tag);
+ virtual bool HandleTopLevelDecl(clang::DeclGroupRef group_ref);
+
+ protected:
+ clang::CompilerInstance& instance() { return instance_; }
+ clang::DiagnosticsEngine& diagnostic() { return diagnostic_; }
+
+ // Emits a simple warning; this shouldn't be used if you require printf-style
+ // printing.
+ void emitWarning(clang::SourceLocation loc, const char* error);
+
+ // Utility method for subclasses to check if this class is in a banned
+ // namespace.
+ bool InBannedNamespace(const clang::Decl* record);
+
+ // Utility method for subclasses to determine the namespace of the
+ // specified record, if any. Unnamed namespaces will be identified as
+ // "<anonymous namespace>".
+ std::string GetNamespace(const clang::Decl* record);
+
+ // Utility method for subclasses to check if this class is within an
+ // implementation (.cc, .cpp, .mm) file.
+ bool InImplementationFile(clang::SourceLocation location);
+
+ private:
+ void BuildBannedLists();
+
+ void CheckTag(clang::TagDecl*);
+
+ // Filtered versions of tags that are only called with things defined in
+ // chrome header files.
+ virtual void CheckChromeClass(clang::SourceLocation record_location,
+ clang::CXXRecordDecl* record) = 0;
+
+ // Filtered versions of enum type that are only called with things defined
+ // in chrome header files.
+ virtual void CheckChromeEnum(clang::SourceLocation enum_location,
+ clang::EnumDecl* enum_decl) {
+ }
+
+ // Utility methods used for filtering out non-chrome classes (and ones we
+ // deliberately ignore) in HandleTagDeclDefinition().
+ std::string GetNamespaceImpl(const clang::DeclContext* context,
+ const std::string& candidate);
+ bool InBannedDirectory(clang::SourceLocation loc);
+ bool IsIgnoredType(const std::string& base_name);
+
+ // Attempts to determine the filename for the given SourceLocation.
+ // Returns false if the filename could not be determined.
+ bool GetFilename(clang::SourceLocation loc, std::string* filename);
+
+ clang::CompilerInstance& instance_;
+ clang::DiagnosticsEngine& diagnostic_;
+
+ // List of banned namespaces.
+ std::vector<std::string> banned_namespaces_;
+
+ // List of banned directories.
+ std::vector<std::string> banned_directories_;
+
+ // List of types that we don't check.
+ std::set<std::string> ignored_record_names_;
+
+ // List of decls to check once the current top-level decl is parsed.
+ std::vector<clang::TagDecl*> pending_class_decls_;
+};
+
+#endif // TOOLS_CLANG_PLUGINS_CHROMECLASSTESTER_H_
diff --git a/tools/clang/plugins/FindBadConstructsAction.cpp b/tools/clang/plugins/FindBadConstructsAction.cpp
new file mode 100644
index 0000000..ddaf419
--- /dev/null
+++ b/tools/clang/plugins/FindBadConstructsAction.cpp
@@ -0,0 +1,54 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "FindBadConstructsAction.h"
+
+#include "clang/Frontend/FrontendPluginRegistry.h"
+
+#include "FindBadConstructsConsumer.h"
+
+using namespace clang;
+
+namespace chrome_checker {
+
+FindBadConstructsAction::FindBadConstructsAction() {
+}
+
+std::unique_ptr<ASTConsumer> FindBadConstructsAction::CreateASTConsumer(
+ CompilerInstance& instance,
+ llvm::StringRef ref) {
+ return llvm::make_unique<FindBadConstructsConsumer>(instance, options_);
+}
+
+bool FindBadConstructsAction::ParseArgs(const CompilerInstance& instance,
+ const std::vector<std::string>& args) {
+ bool parsed = true;
+
+ for (size_t i = 0; i < args.size() && parsed; ++i) {
+ if (args[i] == "check-base-classes") {
+ // TODO(rsleevi): Remove this once http://crbug.com/123295 is fixed.
+ options_.check_base_classes = true;
+ } else if (args[i] == "check-weak-ptr-factory-order") {
+ // TODO(dmichael): Remove this once http://crbug.com/303818 is fixed.
+ options_.check_weak_ptr_factory_order = true;
+ } else if (args[i] == "check-enum-last-value") {
+ // TODO(tsepez): Enable this by default once http://crbug.com/356815
+ // and http://crbug.com/356816 are fixed.
+ options_.check_enum_last_value = true;
+ } else if (args[i] == "strict-virtual-specifiers") {
+ options_.strict_virtual_specifiers = true;
+ } else {
+ parsed = false;
+ llvm::errs() << "Unknown clang plugin argument: " << args[i] << "\n";
+ }
+ }
+
+ return parsed;
+}
+
+} // namespace chrome_checker
+
+static FrontendPluginRegistry::Add<chrome_checker::FindBadConstructsAction> X(
+ "find-bad-constructs",
+ "Finds bad C++ constructs");
diff --git a/tools/clang/plugins/FindBadConstructsAction.h b/tools/clang/plugins/FindBadConstructsAction.h
new file mode 100644
index 0000000..383db84
--- /dev/null
+++ b/tools/clang/plugins/FindBadConstructsAction.h
@@ -0,0 +1,32 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_CLANG_PLUGINS_FINDBADCONSTRUCTIONS_ACTION_H_
+#define TOOLS_CLANG_PLUGINS_FINDBADCONSTRUCTIONS_ACTION_H_
+
+#include "clang/Frontend/FrontendAction.h"
+
+#include "Options.h"
+
+namespace chrome_checker {
+
+class FindBadConstructsAction : public clang::PluginASTAction {
+ public:
+ FindBadConstructsAction();
+
+ protected:
+ // Overridden from PluginASTAction:
+ virtual std::unique_ptr<clang::ASTConsumer> CreateASTConsumer(
+ clang::CompilerInstance& instance,
+ llvm::StringRef ref);
+ virtual bool ParseArgs(const clang::CompilerInstance& instance,
+ const std::vector<std::string>& args);
+
+ private:
+ Options options_;
+};
+
+} // namespace chrome_checker
+
+#endif // TOOLS_CLANG_PLUGINS_FINDBADCONSTRUCTIONS_ACTION_H_
diff --git a/tools/clang/plugins/FindBadConstructsConsumer.cpp b/tools/clang/plugins/FindBadConstructsConsumer.cpp
new file mode 100644
index 0000000..3ff133d
--- /dev/null
+++ b/tools/clang/plugins/FindBadConstructsConsumer.cpp
@@ -0,0 +1,743 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "FindBadConstructsConsumer.h"
+
+#include "clang/Frontend/CompilerInstance.h"
+#include "clang/AST/Attr.h"
+#include "clang/Lex/Lexer.h"
+#include "llvm/Support/raw_ostream.h"
+
+using namespace clang;
+
+namespace chrome_checker {
+
+namespace {
+
+const char kMethodRequiresOverride[] =
+ "[chromium-style] Overriding method must be marked with 'override' or "
+ "'final'.";
+const char kRedundantVirtualSpecifier[] =
+ "[chromium-style] %0 is redundant; %1 implies %0.";
+// http://llvm.org/bugs/show_bug.cgi?id=21051 has been filed to make this a
+// Clang warning.
+const char kBaseMethodVirtualAndFinal[] =
+ "[chromium-style] The virtual method does not override anything and is "
+ "final; consider making it non-virtual.";
+const char kNoExplicitDtor[] =
+ "[chromium-style] Classes that are ref-counted should have explicit "
+ "destructors that are declared protected or private.";
+const char kPublicDtor[] =
+ "[chromium-style] Classes that are ref-counted should have "
+ "destructors that are declared protected or private.";
+const char kProtectedNonVirtualDtor[] =
+ "[chromium-style] Classes that are ref-counted and have non-private "
+ "destructors should declare their destructor virtual.";
+const char kWeakPtrFactoryOrder[] =
+ "[chromium-style] WeakPtrFactory members which refer to their outer class "
+ "must be the last member in the outer class definition.";
+const char kBadLastEnumValue[] =
+ "[chromium-style] _LAST/Last constants of enum types must have the maximal "
+ "value for any constant of that type.";
+const char kNoteInheritance[] = "[chromium-style] %0 inherits from %1 here";
+const char kNoteImplicitDtor[] =
+ "[chromium-style] No explicit destructor for %0 defined";
+const char kNotePublicDtor[] =
+ "[chromium-style] Public destructor declared here";
+const char kNoteProtectedNonVirtualDtor[] =
+ "[chromium-style] Protected non-virtual destructor declared here";
+
+bool TypeHasNonTrivialDtor(const Type* type) {
+ if (const CXXRecordDecl* cxx_r = type->getPointeeCXXRecordDecl())
+ return !cxx_r->hasTrivialDestructor();
+
+ return false;
+}
+
+// Returns the underlying Type for |type| by expanding typedefs and removing
+// any namespace qualifiers. This is similar to desugaring, except that for
+// ElaboratedTypes, desugar will unwrap too much.
+const Type* UnwrapType(const Type* type) {
+ if (const ElaboratedType* elaborated = dyn_cast<ElaboratedType>(type))
+ return UnwrapType(elaborated->getNamedType().getTypePtr());
+ if (const TypedefType* typedefed = dyn_cast<TypedefType>(type))
+ return UnwrapType(typedefed->desugar().getTypePtr());
+ return type;
+}
+
+FixItHint FixItRemovalForVirtual(const SourceManager& manager,
+ const CXXMethodDecl* method) {
+ // Unfortunately, there doesn't seem to be a good way to determine the
+ // location of the 'virtual' keyword. It's available in Declarator, but that
+ // isn't accessible from the AST. So instead, make an educated guess that the
+ // first token is probably the virtual keyword. Strictly speaking, this
+ // doesn't have to be true, but it probably will be.
+ // TODO(dcheng): Add a warning to force virtual to always appear first ;-)
+ SourceRange range(method->getLocStart());
+ // Get the spelling loc just in case it was expanded from a macro.
+ SourceRange spelling_range(manager.getSpellingLoc(range.getBegin()));
+ // Sanity check that the text looks like virtual.
+ StringRef text = clang::Lexer::getSourceText(
+ CharSourceRange::getTokenRange(spelling_range), manager, LangOptions());
+ if (text.trim() != "virtual")
+ return FixItHint();
+ return FixItHint::CreateRemoval(range);
+}
+
+} // namespace
+
+FindBadConstructsConsumer::FindBadConstructsConsumer(CompilerInstance& instance,
+ const Options& options)
+ : ChromeClassTester(instance), options_(options) {
+ // Messages for virtual method specifiers.
+ diag_method_requires_override_ =
+ diagnostic().getCustomDiagID(getErrorLevel(), kMethodRequiresOverride);
+ diag_redundant_virtual_specifier_ =
+ diagnostic().getCustomDiagID(getErrorLevel(), kRedundantVirtualSpecifier);
+ diag_base_method_virtual_and_final_ =
+ diagnostic().getCustomDiagID(getErrorLevel(), kBaseMethodVirtualAndFinal);
+
+ // Messages for destructors.
+ diag_no_explicit_dtor_ =
+ diagnostic().getCustomDiagID(getErrorLevel(), kNoExplicitDtor);
+ diag_public_dtor_ =
+ diagnostic().getCustomDiagID(getErrorLevel(), kPublicDtor);
+ diag_protected_non_virtual_dtor_ =
+ diagnostic().getCustomDiagID(getErrorLevel(), kProtectedNonVirtualDtor);
+
+ // Miscellaneous messages.
+ diag_weak_ptr_factory_order_ =
+ diagnostic().getCustomDiagID(getErrorLevel(), kWeakPtrFactoryOrder);
+ diag_bad_enum_last_value_ =
+ diagnostic().getCustomDiagID(getErrorLevel(), kBadLastEnumValue);
+
+ // Registers notes to make it easier to interpret warnings.
+ diag_note_inheritance_ =
+ diagnostic().getCustomDiagID(DiagnosticsEngine::Note, kNoteInheritance);
+ diag_note_implicit_dtor_ =
+ diagnostic().getCustomDiagID(DiagnosticsEngine::Note, kNoteImplicitDtor);
+ diag_note_public_dtor_ =
+ diagnostic().getCustomDiagID(DiagnosticsEngine::Note, kNotePublicDtor);
+ diag_note_protected_non_virtual_dtor_ = diagnostic().getCustomDiagID(
+ DiagnosticsEngine::Note, kNoteProtectedNonVirtualDtor);
+}
+
+void FindBadConstructsConsumer::CheckChromeClass(SourceLocation record_location,
+ CXXRecordDecl* record) {
+ bool implementation_file = InImplementationFile(record_location);
+
+ if (!implementation_file) {
+ // Only check for "heavy" constructors/destructors in header files;
+ // within implementation files, there is no performance cost.
+ CheckCtorDtorWeight(record_location, record);
+ }
+
+ bool warn_on_inline_bodies = !implementation_file;
+
+ // Check that all virtual methods are annotated with override or final.
+ CheckVirtualMethods(record_location, record, warn_on_inline_bodies);
+
+ CheckRefCountedDtors(record_location, record);
+
+ if (options_.check_weak_ptr_factory_order)
+ CheckWeakPtrFactoryMembers(record_location, record);
+}
+
+void FindBadConstructsConsumer::CheckChromeEnum(SourceLocation enum_location,
+ EnumDecl* enum_decl) {
+ if (!options_.check_enum_last_value)
+ return;
+
+ bool got_one = false;
+ bool is_signed = false;
+ llvm::APSInt max_so_far;
+ EnumDecl::enumerator_iterator iter;
+ for (iter = enum_decl->enumerator_begin();
+ iter != enum_decl->enumerator_end();
+ ++iter) {
+ llvm::APSInt current_value = iter->getInitVal();
+ if (!got_one) {
+ max_so_far = current_value;
+ is_signed = current_value.isSigned();
+ got_one = true;
+ } else {
+ if (is_signed != current_value.isSigned()) {
+ // This only happens in some cases when compiling C (not C++) files,
+ // so it is OK to bail out here.
+ return;
+ }
+ if (current_value > max_so_far)
+ max_so_far = current_value;
+ }
+ }
+ for (iter = enum_decl->enumerator_begin();
+ iter != enum_decl->enumerator_end();
+ ++iter) {
+ std::string name = iter->getNameAsString();
+ if (((name.size() > 4 && name.compare(name.size() - 4, 4, "Last") == 0) ||
+ (name.size() > 5 && name.compare(name.size() - 5, 5, "_LAST") == 0)) &&
+ iter->getInitVal() < max_so_far) {
+ diagnostic().Report(iter->getLocation(), diag_bad_enum_last_value_);
+ }
+ }
+}
+
+void FindBadConstructsConsumer::CheckCtorDtorWeight(
+ SourceLocation record_location,
+ CXXRecordDecl* record) {
+ // We don't handle anonymous structs. If this record doesn't have a
+ // name, it's of the form:
+ //
+ // struct {
+ // ...
+ // } name_;
+ if (record->getIdentifier() == NULL)
+ return;
+
+ // Count the number of templated base classes as a feature of whether the
+ // destructor can be inlined.
+ int templated_base_classes = 0;
+ for (CXXRecordDecl::base_class_const_iterator it = record->bases_begin();
+ it != record->bases_end();
+ ++it) {
+ if (it->getTypeSourceInfo()->getTypeLoc().getTypeLocClass() ==
+ TypeLoc::TemplateSpecialization) {
+ ++templated_base_classes;
+ }
+ }
+
+ // Count the number of trivial and non-trivial member variables.
+ int trivial_member = 0;
+ int non_trivial_member = 0;
+ int templated_non_trivial_member = 0;
+ for (RecordDecl::field_iterator it = record->field_begin();
+ it != record->field_end();
+ ++it) {
+ CountType(it->getType().getTypePtr(),
+ &trivial_member,
+ &non_trivial_member,
+ &templated_non_trivial_member);
+ }
+
+ // Check to see if we need to ban inlined/synthesized constructors. Note
+ // that the cutoffs here are kind of arbitrary. Scores over 10 break.
+ int dtor_score = 0;
+ // Deriving from a templated base class shouldn't be enough to trigger
+ // the ctor warning, but if you do *anything* else, it should.
+ //
+ // TODO(erg): This is motivated by templated base classes that don't have
+ // any data members. Somehow detect when templated base classes have data
+ // members and treat them differently.
+ dtor_score += templated_base_classes * 9;
+ // Instantiating a template is an insta-hit.
+ dtor_score += templated_non_trivial_member * 10;
+ // The fourth normal class member should trigger the warning.
+ dtor_score += non_trivial_member * 3;
+
+ int ctor_score = dtor_score;
+ // You should be able to have 9 ints before we warn you.
+ ctor_score += trivial_member;
+
+ if (ctor_score >= 10) {
+ if (!record->hasUserDeclaredConstructor()) {
+ emitWarning(record_location,
+ "Complex class/struct needs an explicit out-of-line "
+ "constructor.");
+ } else {
+ // Iterate across all the constructors in this file and yell if we
+ // find one that tries to be inline.
+ for (CXXRecordDecl::ctor_iterator it = record->ctor_begin();
+ it != record->ctor_end();
+ ++it) {
+ if (it->hasInlineBody()) {
+ if (it->isCopyConstructor() &&
+ !record->hasUserDeclaredCopyConstructor()) {
+ emitWarning(record_location,
+ "Complex class/struct needs an explicit out-of-line "
+ "copy constructor.");
+ } else {
+ emitWarning(it->getInnerLocStart(),
+ "Complex constructor has an inlined body.");
+ }
+ }
+ }
+ }
+ }
+
+ // The destructor side is equivalent except that we don't check for
+ // trivial members; 20 ints don't need a destructor.
+ if (dtor_score >= 10 && !record->hasTrivialDestructor()) {
+ if (!record->hasUserDeclaredDestructor()) {
+ emitWarning(record_location,
+ "Complex class/struct needs an explicit out-of-line "
+ "destructor.");
+ } else if (CXXDestructorDecl* dtor = record->getDestructor()) {
+ if (dtor->hasInlineBody()) {
+ emitWarning(dtor->getInnerLocStart(),
+ "Complex destructor has an inline body.");
+ }
+ }
+ }
+}
+
+bool FindBadConstructsConsumer::InTestingNamespace(const Decl* record) {
+ return GetNamespace(record).find("testing") != std::string::npos;
+}
+
+bool FindBadConstructsConsumer::IsMethodInBannedOrTestingNamespace(
+ const CXXMethodDecl* method) {
+ if (InBannedNamespace(method))
+ return true;
+ for (CXXMethodDecl::method_iterator i = method->begin_overridden_methods();
+ i != method->end_overridden_methods();
+ ++i) {
+ const CXXMethodDecl* overridden = *i;
+ if (IsMethodInBannedOrTestingNamespace(overridden) ||
+ InTestingNamespace(overridden)) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+// Checks that virtual methods are correctly annotated, and have no body in a
+// header file.
+void FindBadConstructsConsumer::CheckVirtualMethods(
+ SourceLocation record_location,
+ CXXRecordDecl* record,
+ bool warn_on_inline_bodies) {
+ // Gmock objects trigger these for each MOCK_BLAH() macro used. So we have a
+ // trick to get around that. If a class has member variables whose types are
+ // in the "testing" namespace (which is how gmock works behind the scenes),
+ // there's a really high chance we won't care about these errors
+ for (CXXRecordDecl::field_iterator it = record->field_begin();
+ it != record->field_end();
+ ++it) {
+ CXXRecordDecl* record_type = it->getTypeSourceInfo()
+ ->getTypeLoc()
+ .getTypePtr()
+ ->getAsCXXRecordDecl();
+ if (record_type) {
+ if (InTestingNamespace(record_type)) {
+ return;
+ }
+ }
+ }
+
+ for (CXXRecordDecl::method_iterator it = record->method_begin();
+ it != record->method_end();
+ ++it) {
+ if (it->isCopyAssignmentOperator() || isa<CXXConstructorDecl>(*it)) {
+ // Ignore constructors and assignment operators.
+ } else if (isa<CXXDestructorDecl>(*it) &&
+ !record->hasUserDeclaredDestructor()) {
+ // Ignore non-user-declared destructors.
+ } else if (!it->isVirtual()) {
+ continue;
+ } else {
+ CheckVirtualSpecifiers(*it);
+ if (warn_on_inline_bodies)
+ CheckVirtualBodies(*it);
+ }
+ }
+}
+
+// Makes sure that virtual methods use the most appropriate specifier. If a
+// virtual method overrides a method from a base class, only the override
+// specifier should be used. If the method should not be overridden by derived
+// classes, only the final specifier should be used.
+void FindBadConstructsConsumer::CheckVirtualSpecifiers(
+ const CXXMethodDecl* method) {
+ bool is_override = method->size_overridden_methods() > 0;
+ bool has_virtual = method->isVirtualAsWritten();
+ OverrideAttr* override_attr = method->getAttr<OverrideAttr>();
+ FinalAttr* final_attr = method->getAttr<FinalAttr>();
+
+ if (method->isPure())
+ return;
+
+ if (IsMethodInBannedOrTestingNamespace(method))
+ return;
+
+ if (isa<CXXDestructorDecl>(method) && !options_.strict_virtual_specifiers)
+ return;
+
+ SourceManager& manager = instance().getSourceManager();
+
+ // Complain if a method is annotated virtual && (override || final).
+ if (has_virtual && (override_attr || final_attr) &&
+ options_.strict_virtual_specifiers) {
+ diagnostic().Report(method->getLocStart(),
+ diag_redundant_virtual_specifier_)
+ << "'virtual'"
+ << (override_attr ? static_cast<Attr*>(override_attr) : final_attr)
+ << FixItRemovalForVirtual(manager, method);
+ }
+
+ // Complain if a method is an override and is not annotated with override or
+ // final.
+ if (is_override && !override_attr && !final_attr) {
+ SourceRange type_info_range =
+ method->getTypeSourceInfo()->getTypeLoc().getSourceRange();
+ FullSourceLoc loc(type_info_range.getBegin(), manager);
+
+ // Build the FixIt insertion point after the end of the method definition,
+ // including any const-qualifiers and attributes, and before the opening
+ // of the l-curly-brace (if inline) or the semi-color (if a declaration).
+ SourceLocation spelling_end =
+ manager.getSpellingLoc(type_info_range.getEnd());
+ if (spelling_end.isValid()) {
+ SourceLocation token_end =
+ Lexer::getLocForEndOfToken(spelling_end, 0, manager, LangOptions());
+ diagnostic().Report(token_end, diag_method_requires_override_)
+ << FixItHint::CreateInsertion(token_end, " override");
+ } else {
+ diagnostic().Report(loc, diag_method_requires_override_);
+ }
+ }
+
+ if (final_attr && override_attr && options_.strict_virtual_specifiers) {
+ diagnostic().Report(override_attr->getLocation(),
+ diag_redundant_virtual_specifier_)
+ << override_attr << final_attr
+ << FixItHint::CreateRemoval(override_attr->getRange());
+ }
+
+ if (final_attr && !is_override && options_.strict_virtual_specifiers) {
+ diagnostic().Report(method->getLocStart(),
+ diag_base_method_virtual_and_final_)
+ << FixItRemovalForVirtual(manager, method)
+ << FixItHint::CreateRemoval(final_attr->getRange());
+ }
+}
+
+void FindBadConstructsConsumer::CheckVirtualBodies(
+ const CXXMethodDecl* method) {
+ // Virtual methods should not have inline definitions beyond "{}". This
+ // only matters for header files.
+ if (method->hasBody() && method->hasInlineBody()) {
+ if (CompoundStmt* cs = dyn_cast<CompoundStmt>(method->getBody())) {
+ if (cs->size()) {
+ emitWarning(cs->getLBracLoc(),
+ "virtual methods with non-empty bodies shouldn't be "
+ "declared inline.");
+ }
+ }
+ }
+}
+
+void FindBadConstructsConsumer::CountType(const Type* type,
+ int* trivial_member,
+ int* non_trivial_member,
+ int* templated_non_trivial_member) {
+ switch (type->getTypeClass()) {
+ case Type::Record: {
+ // Simplifying; the whole class isn't trivial if the dtor is, but
+ // we use this as a signal about complexity.
+ if (TypeHasNonTrivialDtor(type))
+ (*trivial_member)++;
+ else
+ (*non_trivial_member)++;
+ break;
+ }
+ case Type::TemplateSpecialization: {
+ TemplateName name =
+ dyn_cast<TemplateSpecializationType>(type)->getTemplateName();
+ bool whitelisted_template = false;
+
+ // HACK: I'm at a loss about how to get the syntax checker to get
+ // whether a template is exterened or not. For the first pass here,
+ // just do retarded string comparisons.
+ if (TemplateDecl* decl = name.getAsTemplateDecl()) {
+ std::string base_name = decl->getNameAsString();
+ if (base_name == "basic_string")
+ whitelisted_template = true;
+ }
+
+ if (whitelisted_template)
+ (*non_trivial_member)++;
+ else
+ (*templated_non_trivial_member)++;
+ break;
+ }
+ case Type::Elaborated: {
+ CountType(dyn_cast<ElaboratedType>(type)->getNamedType().getTypePtr(),
+ trivial_member,
+ non_trivial_member,
+ templated_non_trivial_member);
+ break;
+ }
+ case Type::Typedef: {
+ while (const TypedefType* TT = dyn_cast<TypedefType>(type)) {
+ type = TT->getDecl()->getUnderlyingType().getTypePtr();
+ }
+ CountType(type,
+ trivial_member,
+ non_trivial_member,
+ templated_non_trivial_member);
+ break;
+ }
+ default: {
+ // Stupid assumption: anything we see that isn't the above is one of
+ // the 20 integer types.
+ (*trivial_member)++;
+ break;
+ }
+ }
+}
+
+// Check |record| for issues that are problematic for ref-counted types.
+// Note that |record| may not be a ref-counted type, but a base class for
+// a type that is.
+// If there are issues, update |loc| with the SourceLocation of the issue
+// and returns appropriately, or returns None if there are no issues.
+FindBadConstructsConsumer::RefcountIssue
+FindBadConstructsConsumer::CheckRecordForRefcountIssue(
+ const CXXRecordDecl* record,
+ SourceLocation& loc) {
+ if (!record->hasUserDeclaredDestructor()) {
+ loc = record->getLocation();
+ return ImplicitDestructor;
+ }
+
+ if (CXXDestructorDecl* dtor = record->getDestructor()) {
+ if (dtor->getAccess() == AS_public) {
+ loc = dtor->getInnerLocStart();
+ return PublicDestructor;
+ }
+ }
+
+ return None;
+}
+
+// Adds either a warning or error, based on the current handling of
+// -Werror.
+DiagnosticsEngine::Level FindBadConstructsConsumer::getErrorLevel() {
+ return diagnostic().getWarningsAsErrors() ? DiagnosticsEngine::Error
+ : DiagnosticsEngine::Warning;
+}
+
+// Returns true if |base| specifies one of the Chromium reference counted
+// classes (base::RefCounted / base::RefCountedThreadSafe).
+bool FindBadConstructsConsumer::IsRefCountedCallback(
+ const CXXBaseSpecifier* base,
+ CXXBasePath& path,
+ void* user_data) {
+ FindBadConstructsConsumer* self =
+ static_cast<FindBadConstructsConsumer*>(user_data);
+
+ const TemplateSpecializationType* base_type =
+ dyn_cast<TemplateSpecializationType>(
+ UnwrapType(base->getType().getTypePtr()));
+ if (!base_type) {
+ // Base-most definition is not a template, so this cannot derive from
+ // base::RefCounted. However, it may still be possible to use with a
+ // scoped_refptr<> and support ref-counting, so this is not a perfect
+ // guarantee of safety.
+ return false;
+ }
+
+ TemplateName name = base_type->getTemplateName();
+ if (TemplateDecl* decl = name.getAsTemplateDecl()) {
+ std::string base_name = decl->getNameAsString();
+
+ // Check for both base::RefCounted and base::RefCountedThreadSafe.
+ if (base_name.compare(0, 10, "RefCounted") == 0 &&
+ self->GetNamespace(decl) == "base") {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+// Returns true if |base| specifies a class that has a public destructor,
+// either explicitly or implicitly.
+bool FindBadConstructsConsumer::HasPublicDtorCallback(
+ const CXXBaseSpecifier* base,
+ CXXBasePath& path,
+ void* user_data) {
+ // Only examine paths that have public inheritance, as they are the
+ // only ones which will result in the destructor potentially being
+ // exposed. This check is largely redundant, as Chromium code should be
+ // exclusively using public inheritance.
+ if (path.Access != AS_public)
+ return false;
+
+ CXXRecordDecl* record =
+ dyn_cast<CXXRecordDecl>(base->getType()->getAs<RecordType>()->getDecl());
+ SourceLocation unused;
+ return None != CheckRecordForRefcountIssue(record, unused);
+}
+
+// Outputs a C++ inheritance chain as a diagnostic aid.
+void FindBadConstructsConsumer::PrintInheritanceChain(const CXXBasePath& path) {
+ for (CXXBasePath::const_iterator it = path.begin(); it != path.end(); ++it) {
+ diagnostic().Report(it->Base->getLocStart(), diag_note_inheritance_)
+ << it->Class << it->Base->getType();
+ }
+}
+
+unsigned FindBadConstructsConsumer::DiagnosticForIssue(RefcountIssue issue) {
+ switch (issue) {
+ case ImplicitDestructor:
+ return diag_no_explicit_dtor_;
+ case PublicDestructor:
+ return diag_public_dtor_;
+ case None:
+ assert(false && "Do not call DiagnosticForIssue with issue None");
+ return 0;
+ }
+ assert(false);
+ return 0;
+}
+
+// Check |record| to determine if it has any problematic refcounting
+// issues and, if so, print them as warnings/errors based on the current
+// value of getErrorLevel().
+//
+// If |record| is a C++ class, and if it inherits from one of the Chromium
+// ref-counting classes (base::RefCounted / base::RefCountedThreadSafe),
+// ensure that there are no public destructors in the class hierarchy. This
+// is to guard against accidentally stack-allocating a RefCounted class or
+// sticking it in a non-ref-counted container (like scoped_ptr<>).
+void FindBadConstructsConsumer::CheckRefCountedDtors(
+ SourceLocation record_location,
+ CXXRecordDecl* record) {
+ // Skip anonymous structs.
+ if (record->getIdentifier() == NULL)
+ return;
+
+ // Determine if the current type is even ref-counted.
+ CXXBasePaths refcounted_path;
+ if (!record->lookupInBases(&FindBadConstructsConsumer::IsRefCountedCallback,
+ this,
+ refcounted_path)) {
+ return; // Class does not derive from a ref-counted base class.
+ }
+
+ // Easy check: Check to see if the current type is problematic.
+ SourceLocation loc;
+ RefcountIssue issue = CheckRecordForRefcountIssue(record, loc);
+ if (issue != None) {
+ diagnostic().Report(loc, DiagnosticForIssue(issue));
+ PrintInheritanceChain(refcounted_path.front());
+ return;
+ }
+ if (CXXDestructorDecl* dtor =
+ refcounted_path.begin()->back().Class->getDestructor()) {
+ if (dtor->getAccess() == AS_protected && !dtor->isVirtual()) {
+ loc = dtor->getInnerLocStart();
+ diagnostic().Report(loc, diag_protected_non_virtual_dtor_);
+ return;
+ }
+ }
+
+ // Long check: Check all possible base classes for problematic
+ // destructors. This checks for situations involving multiple
+ // inheritance, where the ref-counted class may be implementing an
+ // interface that has a public or implicit destructor.
+ //
+ // struct SomeInterface {
+ // virtual void DoFoo();
+ // };
+ //
+ // struct RefCountedInterface
+ // : public base::RefCounted<RefCountedInterface>,
+ // public SomeInterface {
+ // private:
+ // friend class base::Refcounted<RefCountedInterface>;
+ // virtual ~RefCountedInterface() {}
+ // };
+ //
+ // While RefCountedInterface is "safe", in that its destructor is
+ // private, it's possible to do the following "unsafe" code:
+ // scoped_refptr<RefCountedInterface> some_class(
+ // new RefCountedInterface);
+ // // Calls SomeInterface::~SomeInterface(), which is unsafe.
+ // delete static_cast<SomeInterface*>(some_class.get());
+ if (!options_.check_base_classes)
+ return;
+
+ // Find all public destructors. This will record the class hierarchy
+ // that leads to the public destructor in |dtor_paths|.
+ CXXBasePaths dtor_paths;
+ if (!record->lookupInBases(&FindBadConstructsConsumer::HasPublicDtorCallback,
+ this,
+ dtor_paths)) {
+ return;
+ }
+
+ for (CXXBasePaths::const_paths_iterator it = dtor_paths.begin();
+ it != dtor_paths.end();
+ ++it) {
+ // The record with the problem will always be the last record
+ // in the path, since it is the record that stopped the search.
+ const CXXRecordDecl* problem_record = dyn_cast<CXXRecordDecl>(
+ it->back().Base->getType()->getAs<RecordType>()->getDecl());
+
+ issue = CheckRecordForRefcountIssue(problem_record, loc);
+
+ if (issue == ImplicitDestructor) {
+ diagnostic().Report(record_location, diag_no_explicit_dtor_);
+ PrintInheritanceChain(refcounted_path.front());
+ diagnostic().Report(loc, diag_note_implicit_dtor_) << problem_record;
+ PrintInheritanceChain(*it);
+ } else if (issue == PublicDestructor) {
+ diagnostic().Report(record_location, diag_public_dtor_);
+ PrintInheritanceChain(refcounted_path.front());
+ diagnostic().Report(loc, diag_note_public_dtor_);
+ PrintInheritanceChain(*it);
+ }
+ }
+}
+
+// Check for any problems with WeakPtrFactory class members. This currently
+// only checks that any WeakPtrFactory<T> member of T appears as the last
+// data member in T. We could consider checking for bad uses of
+// WeakPtrFactory to refer to other data members, but that would require
+// looking at the initializer list in constructors to see what the factory
+// points to.
+// Note, if we later add other unrelated checks of data members, we should
+// consider collapsing them in to one loop to avoid iterating over the data
+// members more than once.
+void FindBadConstructsConsumer::CheckWeakPtrFactoryMembers(
+ SourceLocation record_location,
+ CXXRecordDecl* record) {
+ // Skip anonymous structs.
+ if (record->getIdentifier() == NULL)
+ return;
+
+ // Iterate through members of the class.
+ RecordDecl::field_iterator iter(record->field_begin()),
+ the_end(record->field_end());
+ SourceLocation weak_ptr_factory_location; // Invalid initially.
+ for (; iter != the_end; ++iter) {
+ // If we enter the loop but have already seen a matching WeakPtrFactory,
+ // it means there is at least one member after the factory.
+ if (weak_ptr_factory_location.isValid()) {
+ diagnostic().Report(weak_ptr_factory_location,
+ diag_weak_ptr_factory_order_);
+ }
+ const TemplateSpecializationType* template_spec_type =
+ iter->getType().getTypePtr()->getAs<TemplateSpecializationType>();
+ if (template_spec_type) {
+ const TemplateDecl* template_decl =
+ template_spec_type->getTemplateName().getAsTemplateDecl();
+ if (template_decl && template_spec_type->getNumArgs() >= 1) {
+ if (template_decl->getNameAsString().compare("WeakPtrFactory") == 0 &&
+ GetNamespace(template_decl) == "base") {
+ const TemplateArgument& arg = template_spec_type->getArg(0);
+ if (arg.getAsType().getTypePtr()->getAsCXXRecordDecl() ==
+ record->getTypeForDecl()->getAsCXXRecordDecl()) {
+ weak_ptr_factory_location = iter->getLocation();
+ }
+ }
+ }
+ }
+ }
+}
+
+} // namespace chrome_checker
diff --git a/tools/clang/plugins/FindBadConstructsConsumer.h b/tools/clang/plugins/FindBadConstructsConsumer.h
new file mode 100644
index 0000000..cf048ae
--- /dev/null
+++ b/tools/clang/plugins/FindBadConstructsConsumer.h
@@ -0,0 +1,99 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file defines a bunch of recurring problems in the Chromium C++ code.
+//
+// Checks that are implemented:
+// - Constructors/Destructors should not be inlined if they are of a complex
+// class type.
+// - Missing "virtual" keywords on methods that should be virtual.
+// - Non-annotated overriding virtual methods.
+// - Virtual methods with nonempty implementations in their headers.
+// - Classes that derive from base::RefCounted / base::RefCountedThreadSafe
+// should have protected or private destructors.
+// - WeakPtrFactory members that refer to their outer class should be the last
+// member.
+// - Enum types with a xxxx_LAST or xxxxLast const actually have that constant
+// have the maximal value for that type.
+
+#include "clang/AST/AST.h"
+#include "clang/AST/ASTConsumer.h"
+#include "clang/AST/Attr.h"
+#include "clang/AST/CXXInheritance.h"
+#include "clang/AST/TypeLoc.h"
+#include "clang/Basic/SourceManager.h"
+
+#include "ChromeClassTester.h"
+#include "Options.h"
+
+namespace chrome_checker {
+
+// Searches for constructs that we know we don't want in the Chromium code base.
+class FindBadConstructsConsumer : public ChromeClassTester {
+ public:
+ FindBadConstructsConsumer(clang::CompilerInstance& instance,
+ const Options& options);
+
+ // ChromeClassTester overrides:
+ void CheckChromeClass(clang::SourceLocation record_location,
+ clang::CXXRecordDecl* record) override;
+ void CheckChromeEnum(clang::SourceLocation enum_location,
+ clang::EnumDecl* enum_decl) override;
+
+ private:
+ // The type of problematic ref-counting pattern that was encountered.
+ enum RefcountIssue { None, ImplicitDestructor, PublicDestructor };
+
+ void CheckCtorDtorWeight(clang::SourceLocation record_location,
+ clang::CXXRecordDecl* record);
+
+ bool InTestingNamespace(const clang::Decl* record);
+ bool IsMethodInBannedOrTestingNamespace(const clang::CXXMethodDecl* method);
+
+ void CheckVirtualMethods(clang::SourceLocation record_location,
+ clang::CXXRecordDecl* record,
+ bool warn_on_inline_bodies);
+ void CheckVirtualSpecifiers(const clang::CXXMethodDecl* method);
+ void CheckVirtualBodies(const clang::CXXMethodDecl* method);
+
+ void CountType(const clang::Type* type,
+ int* trivial_member,
+ int* non_trivial_member,
+ int* templated_non_trivial_member);
+
+ static RefcountIssue CheckRecordForRefcountIssue(
+ const clang::CXXRecordDecl* record,
+ clang::SourceLocation& loc);
+ clang::DiagnosticsEngine::Level getErrorLevel();
+ static bool IsRefCountedCallback(const clang::CXXBaseSpecifier* base,
+ clang::CXXBasePath& path,
+ void* user_data);
+ static bool HasPublicDtorCallback(const clang::CXXBaseSpecifier* base,
+ clang::CXXBasePath& path,
+ void* user_data);
+ void PrintInheritanceChain(const clang::CXXBasePath& path);
+ unsigned DiagnosticForIssue(RefcountIssue issue);
+ void CheckRefCountedDtors(clang::SourceLocation record_location,
+ clang::CXXRecordDecl* record);
+
+ void CheckWeakPtrFactoryMembers(clang::SourceLocation record_location,
+ clang::CXXRecordDecl* record);
+
+ const Options options_;
+
+ unsigned diag_method_requires_override_;
+ unsigned diag_redundant_virtual_specifier_;
+ unsigned diag_base_method_virtual_and_final_;
+ unsigned diag_no_explicit_dtor_;
+ unsigned diag_public_dtor_;
+ unsigned diag_protected_non_virtual_dtor_;
+ unsigned diag_weak_ptr_factory_order_;
+ unsigned diag_bad_enum_last_value_;
+ unsigned diag_note_inheritance_;
+ unsigned diag_note_implicit_dtor_;
+ unsigned diag_note_public_dtor_;
+ unsigned diag_note_protected_non_virtual_dtor_;
+};
+
+} // namespace chrome_checker
diff --git a/tools/clang/plugins/OWNERS b/tools/clang/plugins/OWNERS
new file mode 100644
index 0000000..4733a4f
--- /dev/null
+++ b/tools/clang/plugins/OWNERS
@@ -0,0 +1 @@
+erg@chromium.org
diff --git a/tools/clang/plugins/Options.h b/tools/clang/plugins/Options.h
new file mode 100644
index 0000000..2cdeaf3
--- /dev/null
+++ b/tools/clang/plugins/Options.h
@@ -0,0 +1,25 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_CLANG_PLUGINS_OPTIONS_H_
+#define TOOLS_CLANG_PLUGINS_OPTIONS_H_
+
+namespace chrome_checker {
+
+struct Options {
+ Options()
+ : check_base_classes(false),
+ check_weak_ptr_factory_order(false),
+ check_enum_last_value(false),
+ strict_virtual_specifiers(false) {}
+
+ bool check_base_classes;
+ bool check_weak_ptr_factory_order;
+ bool check_enum_last_value;
+ bool strict_virtual_specifiers;
+};
+
+} // namespace chrome_checker
+
+#endif // TOOLS_CLANG_PLUGINS_OPTIONS_H_
diff --git a/tools/clang/plugins/README.chromium b/tools/clang/plugins/README.chromium
new file mode 100644
index 0000000..a2ce0ff
--- /dev/null
+++ b/tools/clang/plugins/README.chromium
@@ -0,0 +1,4 @@
+Documentation for this code is:
+
+- http://code.google.com/p/chromium/wiki/Clang
+- http://code.google.com/p/chromium/wiki/WritingClangPlugins
diff --git a/tools/clang/plugins/tests/base_refcounted.cpp b/tools/clang/plugins/tests/base_refcounted.cpp
new file mode 100644
index 0000000..698bf7b
--- /dev/null
+++ b/tools/clang/plugins/tests/base_refcounted.cpp
@@ -0,0 +1,79 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base_refcounted.h"
+
+#include <cstddef>
+
+namespace {
+
+// Unsafe; should error.
+class AnonymousDerivedProtectedToPublicInImpl
+ : public ProtectedRefCountedVirtualDtorInHeader {
+ public:
+ AnonymousDerivedProtectedToPublicInImpl() {}
+ virtual ~AnonymousDerivedProtectedToPublicInImpl() {}
+};
+
+// Unsafe; but we should only warn on the base class.
+class AnonymousDerivedProtectedOnDerived
+ : public ProtectedRefCountedDtorInHeader {
+ protected:
+ ~AnonymousDerivedProtectedOnDerived() {}
+};
+
+} // namespace
+
+// Unsafe; should error.
+class PublicRefCountedDtorInImpl
+ : public base::RefCounted<PublicRefCountedDtorInImpl> {
+ public:
+ PublicRefCountedDtorInImpl() {}
+ ~PublicRefCountedDtorInImpl() {}
+
+ private:
+ friend class base::RefCounted<PublicRefCountedDtorInImpl>;
+};
+
+class Foo {
+ public:
+ class BarInterface {
+ protected:
+ virtual ~BarInterface() {}
+ };
+
+ typedef base::RefCounted<BarInterface> RefCountedBar;
+ typedef RefCountedBar AnotherTypedef;
+};
+
+class Baz {
+ public:
+ typedef typename Foo::AnotherTypedef MyLocalTypedef;
+};
+
+// Unsafe; should error.
+class UnsafeTypedefChainInImpl : public Baz::MyLocalTypedef {
+ public:
+ UnsafeTypedefChainInImpl() {}
+ ~UnsafeTypedefChainInImpl() {}
+};
+
+int main() {
+ PublicRefCountedDtorInHeader bad;
+ PublicRefCountedDtorInImpl also_bad;
+
+ ProtectedRefCountedDtorInHeader* even_badder = NULL;
+ PrivateRefCountedDtorInHeader* private_ok = NULL;
+
+ DerivedProtectedToPublicInHeader still_bad;
+ PublicRefCountedThreadSafeDtorInHeader another_bad_variation;
+ AnonymousDerivedProtectedToPublicInImpl and_this_is_bad_too;
+ ImplicitDerivedProtectedToPublicInHeader bad_yet_again;
+ UnsafeTypedefChainInImpl and_again_this_is_bad;
+
+ WebKitPublicDtorInHeader ignored;
+ WebKitDerivedPublicDtorInHeader still_ignored;
+
+ return 0;
+}
diff --git a/tools/clang/plugins/tests/base_refcounted.flags b/tools/clang/plugins/tests/base_refcounted.flags
new file mode 100644
index 0000000..5cf5d10
--- /dev/null
+++ b/tools/clang/plugins/tests/base_refcounted.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-find-bad-constructs -Xclang check-base-classes
diff --git a/tools/clang/plugins/tests/base_refcounted.h b/tools/clang/plugins/tests/base_refcounted.h
new file mode 100644
index 0000000..4b4077c
--- /dev/null
+++ b/tools/clang/plugins/tests/base_refcounted.h
@@ -0,0 +1,223 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef BASE_REFCOUNTED_H_
+#define BASE_REFCOUNTED_H_
+
+namespace base {
+
+template <typename T>
+class RefCounted {
+ public:
+ RefCounted() {}
+ protected:
+ ~RefCounted() {}
+};
+
+template <typename T>
+class RefCountedThreadSafe {
+ public:
+ RefCountedThreadSafe() {}
+ protected:
+ ~RefCountedThreadSafe() {}
+};
+
+} // namespace base
+
+// Ignore classes whose inheritance tree ends in WebKit's RefCounted base
+// class. Though prone to error, this pattern is very prevalent in WebKit
+// code, so do not issue any warnings.
+namespace WebKit {
+
+template <typename T>
+class RefCounted {
+ public:
+ RefCounted() {}
+ ~RefCounted() {}
+};
+
+} // namespace WebKit
+
+// Unsafe; should error.
+class PublicRefCountedDtorInHeader
+ : public base::RefCounted<PublicRefCountedDtorInHeader> {
+ public:
+ PublicRefCountedDtorInHeader() {}
+ ~PublicRefCountedDtorInHeader() {}
+
+ private:
+ friend class base::RefCounted<PublicRefCountedDtorInHeader>;
+};
+
+// Unsafe; should error.
+class PublicRefCountedThreadSafeDtorInHeader
+ : public base::RefCountedThreadSafe<
+ PublicRefCountedThreadSafeDtorInHeader> {
+ public:
+ PublicRefCountedThreadSafeDtorInHeader() {}
+ ~PublicRefCountedThreadSafeDtorInHeader() {}
+
+ private:
+ friend class base::RefCountedThreadSafe<
+ PublicRefCountedThreadSafeDtorInHeader>;
+};
+
+// Unsafe; should error.
+class ProtectedRefCountedDtorInHeader
+ : public base::RefCounted<ProtectedRefCountedDtorInHeader> {
+ public:
+ ProtectedRefCountedDtorInHeader() {}
+
+ protected:
+ ~ProtectedRefCountedDtorInHeader() {}
+
+ private:
+ friend class base::RefCounted<ProtectedRefCountedDtorInHeader>;
+};
+
+// Safe; should not have errors
+class ProtectedRefCountedVirtualDtorInHeader
+ : public base::RefCounted<ProtectedRefCountedVirtualDtorInHeader> {
+ public:
+ ProtectedRefCountedVirtualDtorInHeader() {}
+
+ protected:
+ virtual ~ProtectedRefCountedVirtualDtorInHeader() {}
+
+ private:
+ friend class base::RefCounted<ProtectedRefCountedVirtualDtorInHeader>;
+};
+
+
+// Safe; should not have errors.
+class PrivateRefCountedDtorInHeader
+ : public base::RefCounted<PrivateRefCountedDtorInHeader> {
+ public:
+ PrivateRefCountedDtorInHeader() {}
+
+ private:
+ ~PrivateRefCountedDtorInHeader() {}
+ friend class base::RefCounted<PrivateRefCountedDtorInHeader>;
+};
+
+// Unsafe; A grandchild class ends up exposing their parent and grandparent's
+// destructors.
+class DerivedProtectedToPublicInHeader
+ : public ProtectedRefCountedVirtualDtorInHeader {
+ public:
+ DerivedProtectedToPublicInHeader() {}
+ virtual ~DerivedProtectedToPublicInHeader() {}
+};
+
+// Unsafe; A grandchild ends up implicitly exposing their parent and
+// grantparent's destructors.
+class ImplicitDerivedProtectedToPublicInHeader
+ : public ProtectedRefCountedVirtualDtorInHeader {
+ public:
+ ImplicitDerivedProtectedToPublicInHeader() {}
+};
+
+// Unsafe-but-ignored; should not have errors.
+class WebKitPublicDtorInHeader
+ : public WebKit::RefCounted<WebKitPublicDtorInHeader> {
+ public:
+ WebKitPublicDtorInHeader() {}
+ ~WebKitPublicDtorInHeader() {}
+};
+
+// Unsafe-but-ignored; should not have errors.
+class WebKitDerivedPublicDtorInHeader
+ : public WebKitPublicDtorInHeader {
+ public:
+ WebKitDerivedPublicDtorInHeader() {}
+ ~WebKitDerivedPublicDtorInHeader() {}
+};
+
+class APublicInterface {
+ public:
+ virtual ~APublicInterface() {}
+ virtual void DoFoo() = 0;
+};
+
+// Unsafe. "ImplementsAPublicInterface* foo" can be deleted via
+// "delete (APublicInterface*)foo;".
+class ImplementsAPublicInterface
+ : public APublicInterface,
+ public base::RefCounted<ImplementsAPublicInterface> {
+ public:
+ virtual void DoFoo() override {}
+
+ protected:
+ virtual ~ImplementsAPublicInterface() {}
+
+ private:
+ friend class base::RefCounted<ImplementsAPublicInterface>;
+};
+
+class AnImplicitInterface {
+ public:
+ virtual void DoBar() {}
+};
+
+// Unsafe.
+class ImplementsAnImplicitInterface
+ : public AnImplicitInterface,
+ public base::RefCounted<ImplementsAnImplicitInterface> {
+ public:
+ virtual void DoBar() override {}
+
+ private:
+ friend class base::RefCounted<ImplementsAnImplicitInterface>;
+ ~ImplementsAnImplicitInterface() {}
+};
+
+// Safe. Private inheritance does not expose the base destructor.
+class PrivatelyImplementsAPublicInterface
+ : private APublicInterface,
+ public base::RefCounted<PrivatelyImplementsAPublicInterface> {
+ public:
+ virtual void DoFoo() override {}
+
+ private:
+ friend class base::RefCounted<PrivatelyImplementsAPublicInterface>;
+ virtual ~PrivatelyImplementsAPublicInterface() {}
+};
+
+// Unsafe.
+class BaseInterface {
+ public:
+ virtual ~BaseInterface() {}
+ virtual void DoFoo() {}
+};
+class DerivedInterface : public BaseInterface {
+ protected:
+ virtual ~DerivedInterface() {}
+};
+class SomeOtherInterface {
+ public:
+ virtual ~SomeOtherInterface() {}
+ virtual void DoBar() {}
+};
+class RefcountedType : public base::RefCounted<RefcountedType> {
+ protected:
+ ~RefcountedType() {}
+ private:
+ friend class base::RefCounted<RefcountedType>;
+};
+class UnsafeInheritanceChain
+ : public DerivedInterface,
+ public SomeOtherInterface,
+ public RefcountedType {
+ public:
+ // DerivedInterface
+ virtual void DoFoo() override {}
+
+ // SomeOtherInterface
+ virtual void DoBar() override {}
+
+ protected:
+ virtual ~UnsafeInheritanceChain() {}
+};
+
+#endif // BASE_REFCOUNTED_H_
diff --git a/tools/clang/plugins/tests/base_refcounted.txt b/tools/clang/plugins/tests/base_refcounted.txt
new file mode 100644
index 0000000..20c0cdf
--- /dev/null
+++ b/tools/clang/plugins/tests/base_refcounted.txt
@@ -0,0 +1,87 @@
+In file included from base_refcounted.cpp:5:
+./base_refcounted.h:47:3: warning: [chromium-style] Classes that are ref-counted should have destructors that are declared protected or private.
+ ~PublicRefCountedDtorInHeader() {}
+ ^
+./base_refcounted.h:44:7: note: [chromium-style] 'PublicRefCountedDtorInHeader' inherits from 'base::RefCounted<PublicRefCountedDtorInHeader>' here
+ : public base::RefCounted<PublicRefCountedDtorInHeader> {
+ ^
+./base_refcounted.h:59:3: warning: [chromium-style] Classes that are ref-counted should have destructors that are declared protected or private.
+ ~PublicRefCountedThreadSafeDtorInHeader() {}
+ ^
+./base_refcounted.h:55:7: note: [chromium-style] 'PublicRefCountedThreadSafeDtorInHeader' inherits from 'base::RefCountedThreadSafe<PublicRefCountedThreadSafeDtorInHeader>' here
+ : public base::RefCountedThreadSafe<
+ ^
+./base_refcounted.h:73:3: warning: [chromium-style] Classes that are ref-counted and have non-private destructors should declare their destructor virtual.
+ ~ProtectedRefCountedDtorInHeader() {}
+ ^
+./base_refcounted.h:110:3: warning: [chromium-style] Classes that are ref-counted should have destructors that are declared protected or private.
+ virtual ~DerivedProtectedToPublicInHeader() {}
+ ^
+./base_refcounted.h:107:7: note: [chromium-style] 'DerivedProtectedToPublicInHeader' inherits from 'ProtectedRefCountedVirtualDtorInHeader' here
+ : public ProtectedRefCountedVirtualDtorInHeader {
+ ^
+./base_refcounted.h:81:7: note: [chromium-style] 'ProtectedRefCountedVirtualDtorInHeader' inherits from 'base::RefCounted<ProtectedRefCountedVirtualDtorInHeader>' here
+ : public base::RefCounted<ProtectedRefCountedVirtualDtorInHeader> {
+ ^
+./base_refcounted.h:115:7: warning: [chromium-style] Classes that are ref-counted should have explicit destructors that are declared protected or private.
+class ImplicitDerivedProtectedToPublicInHeader
+ ^
+./base_refcounted.h:116:7: note: [chromium-style] 'ImplicitDerivedProtectedToPublicInHeader' inherits from 'ProtectedRefCountedVirtualDtorInHeader' here
+ : public ProtectedRefCountedVirtualDtorInHeader {
+ ^
+./base_refcounted.h:81:7: note: [chromium-style] 'ProtectedRefCountedVirtualDtorInHeader' inherits from 'base::RefCounted<ProtectedRefCountedVirtualDtorInHeader>' here
+ : public base::RefCounted<ProtectedRefCountedVirtualDtorInHeader> {
+ ^
+./base_refcounted.h:145:1: warning: [chromium-style] Classes that are ref-counted should have destructors that are declared protected or private.
+class ImplementsAPublicInterface
+^
+./base_refcounted.h:147:7: note: [chromium-style] 'ImplementsAPublicInterface' inherits from 'base::RefCounted<ImplementsAPublicInterface>' here
+ public base::RefCounted<ImplementsAPublicInterface> {
+ ^
+./base_refcounted.h:139:3: note: [chromium-style] Public destructor declared here
+ virtual ~APublicInterface() {}
+ ^
+./base_refcounted.h:146:7: note: [chromium-style] 'ImplementsAPublicInterface' inherits from 'APublicInterface' here
+ : public APublicInterface,
+ ^
+./base_refcounted.h:164:1: warning: [chromium-style] Classes that are ref-counted should have explicit destructors that are declared protected or private.
+class ImplementsAnImplicitInterface
+^
+./base_refcounted.h:166:7: note: [chromium-style] 'ImplementsAnImplicitInterface' inherits from 'base::RefCounted<ImplementsAnImplicitInterface>' here
+ public base::RefCounted<ImplementsAnImplicitInterface> {
+ ^
+./base_refcounted.h:158:7: note: [chromium-style] No explicit destructor for 'AnImplicitInterface' defined
+class AnImplicitInterface {
+ ^
+./base_refcounted.h:165:7: note: [chromium-style] 'ImplementsAnImplicitInterface' inherits from 'AnImplicitInterface' here
+ : public AnImplicitInterface,
+ ^
+./base_refcounted.h:204:3: warning: [chromium-style] Classes that are ref-counted and have non-private destructors should declare their destructor virtual.
+ ~RefcountedType() {}
+ ^
+./base_refcounted.h:204:3: warning: [chromium-style] Classes that are ref-counted and have non-private destructors should declare their destructor virtual.
+base_refcounted.cpp:16:3: warning: [chromium-style] Classes that are ref-counted should have destructors that are declared protected or private.
+ virtual ~AnonymousDerivedProtectedToPublicInImpl() {}
+ ^
+base_refcounted.cpp:13:7: note: [chromium-style] 'AnonymousDerivedProtectedToPublicInImpl' inherits from 'ProtectedRefCountedVirtualDtorInHeader' here
+ : public ProtectedRefCountedVirtualDtorInHeader {
+ ^
+./base_refcounted.h:81:7: note: [chromium-style] 'ProtectedRefCountedVirtualDtorInHeader' inherits from 'base::RefCounted<ProtectedRefCountedVirtualDtorInHeader>' here
+ : public base::RefCounted<ProtectedRefCountedVirtualDtorInHeader> {
+ ^
+./base_refcounted.h:73:3: warning: [chromium-style] Classes that are ref-counted and have non-private destructors should declare their destructor virtual.
+ ~ProtectedRefCountedDtorInHeader() {}
+ ^
+base_refcounted.cpp:33:3: warning: [chromium-style] Classes that are ref-counted should have destructors that are declared protected or private.
+ ~PublicRefCountedDtorInImpl() {}
+ ^
+base_refcounted.cpp:30:7: note: [chromium-style] 'PublicRefCountedDtorInImpl' inherits from 'base::RefCounted<PublicRefCountedDtorInImpl>' here
+ : public base::RefCounted<PublicRefCountedDtorInImpl> {
+ ^
+base_refcounted.cpp:59:3: warning: [chromium-style] Classes that are ref-counted should have destructors that are declared protected or private.
+ ~UnsafeTypedefChainInImpl() {}
+ ^
+base_refcounted.cpp:56:34: note: [chromium-style] 'UnsafeTypedefChainInImpl' inherits from 'Baz::MyLocalTypedef' (aka 'RefCounted<Foo::BarInterface>') here
+class UnsafeTypedefChainInImpl : public Baz::MyLocalTypedef {
+ ^
+13 warnings generated.
diff --git a/tools/clang/plugins/tests/blacklisted_dirs.cpp b/tools/clang/plugins/tests/blacklisted_dirs.cpp
new file mode 100644
index 0000000..a94eedd
--- /dev/null
+++ b/tools/clang/plugins/tests/blacklisted_dirs.cpp
@@ -0,0 +1,22 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+struct Base {
+ virtual void foo();
+};
+
+#line 1 "/src/chromium/src/myheader.h"
+struct Derived : public Base {
+ virtual void foo(); // Should warn about missing 'override'.
+};
+
+#line 123 "/src/chrome-breakpad/src/myheader.h"
+struct Derived2 : public Base {
+ virtual void foo(); // Should warn about missing 'override'.
+};
+
+#line 123 "/src/chrome-breakpad/src/breakpad/myheader.h"
+struct Derived3 : public Base {
+ virtual void foo(); // Should not warn; file is in a blacklisted dir.
+};
diff --git a/tools/clang/plugins/tests/blacklisted_dirs.txt b/tools/clang/plugins/tests/blacklisted_dirs.txt
new file mode 100644
index 0000000..5df9e7c
--- /dev/null
+++ b/tools/clang/plugins/tests/blacklisted_dirs.txt
@@ -0,0 +1,9 @@
+/src/chromium/src/myheader.h:2:21: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void foo(); // Should warn about missing 'override'.
+ ^
+ override
+/src/chrome-breakpad/src/myheader.h:124:21: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void foo(); // Should warn about missing 'override'.
+ ^
+ override
+2 warnings generated.
diff --git a/tools/clang/plugins/tests/enum_last_value.cpp b/tools/clang/plugins/tests/enum_last_value.cpp
new file mode 100644
index 0000000..c189f23
--- /dev/null
+++ b/tools/clang/plugins/tests/enum_last_value.cpp
@@ -0,0 +1,39 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// We warn when xxxLAST constants aren't last.
+enum BadOne {
+ kBadOneInvalid = -1,
+ kBadOneRed,
+ kBadOneGreen,
+ kBadOneBlue,
+ kBadOneLast = kBadOneGreen
+};
+
+// We warn when xxx_LAST constants aren't last.
+enum BadTwo {
+ BAD_TWO_INVALID,
+ BAD_TWO_RED,
+ BAD_TWO_GREEN,
+ BAD_TWO_BLUE = 0xfffffffc,
+ BAD_TWO_LAST = BAD_TWO_GREEN
+};
+
+// We don't warn when xxxLAST constants are last.
+enum GoodOne {
+ kGoodOneInvalid = -1,
+ kGoodOneRed,
+ kGoodOneGreen,
+ kGoodOneBlue,
+ kGoodOneLast = kGoodOneBlue
+};
+
+// We don't warn when xxx_LAST constants are last.
+enum GoodTwo {
+ GOOD_TWO_INVALID,
+ GOOD_TWO_RED,
+ GOOD_TWO_GREEN,
+ GOOD_TWO_BLUE = 0xfffffffc,
+ GOOD_TWO_LAST = GOOD_TWO_BLUE
+};
diff --git a/tools/clang/plugins/tests/enum_last_value.flags b/tools/clang/plugins/tests/enum_last_value.flags
new file mode 100644
index 0000000..ee2ef7e
--- /dev/null
+++ b/tools/clang/plugins/tests/enum_last_value.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-find-bad-constructs -Xclang check-enum-last-value
diff --git a/tools/clang/plugins/tests/enum_last_value.txt b/tools/clang/plugins/tests/enum_last_value.txt
new file mode 100644
index 0000000..2d9e51d
--- /dev/null
+++ b/tools/clang/plugins/tests/enum_last_value.txt
@@ -0,0 +1,7 @@
+enum_last_value.cpp:11:3: warning: [chromium-style] _LAST/Last constants of enum types must have the maximal value for any constant of that type.
+ kBadOneLast = kBadOneGreen
+ ^
+enum_last_value.cpp:20:3: warning: [chromium-style] _LAST/Last constants of enum types must have the maximal value for any constant of that type.
+ BAD_TWO_LAST = BAD_TWO_GREEN
+ ^
+2 warnings generated.
diff --git a/tools/clang/plugins/tests/enum_last_value_from_c.c b/tools/clang/plugins/tests/enum_last_value_from_c.c
new file mode 100644
index 0000000..7fecbc0
--- /dev/null
+++ b/tools/clang/plugins/tests/enum_last_value_from_c.c
@@ -0,0 +1,40 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// We warn when xxxLAST constants aren't last.
+enum BadOne {
+ kBadOneInvalid = -1,
+ kBadOneRed,
+ kBadOneGreen,
+ kBadOneBlue,
+ kBadOneLast = kBadOneGreen
+};
+
+// We don't handle this case when called from C due to sign mismatch issues.
+// No matter; we're not looking for this issue outside of C++.
+enum FailOne {
+ FAIL_ONE_INVALID,
+ FAIL_ONE_RED,
+ FAIL_ONE_GREEN,
+ FAIL_ONE_BLUE = 0xfffffffc,
+ FAIL_ONE_LAST = FAIL_ONE_GREEN
+};
+
+// We don't warn when xxxLAST constants are last.
+enum GoodOne {
+ kGoodOneInvalid = -1,
+ kGoodOneRed,
+ kGoodOneGreen,
+ kGoodOneBlue,
+ kGoodOneLast = kGoodOneBlue
+};
+
+// We don't warn when xxx_LAST constants are last.
+enum GoodTwo {
+ GOOD_TWO_INVALID,
+ GOOD_TWO_RED,
+ GOOD_TWO_GREEN,
+ GOOD_TWO_BLUE = 0xfffffffc,
+ GOOD_TWO_LAST = GOOD_TWO_BLUE
+};
diff --git a/tools/clang/plugins/tests/enum_last_value_from_c.flags b/tools/clang/plugins/tests/enum_last_value_from_c.flags
new file mode 100644
index 0000000..ee2ef7e
--- /dev/null
+++ b/tools/clang/plugins/tests/enum_last_value_from_c.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-find-bad-constructs -Xclang check-enum-last-value
diff --git a/tools/clang/plugins/tests/enum_last_value_from_c.txt b/tools/clang/plugins/tests/enum_last_value_from_c.txt
new file mode 100644
index 0000000..3aa1d16
--- /dev/null
+++ b/tools/clang/plugins/tests/enum_last_value_from_c.txt
@@ -0,0 +1,4 @@
+enum_last_value_from_c.c:11:3: warning: [chromium-style] _LAST/Last constants of enum types must have the maximal value for any constant of that type.
+ kBadOneLast = kBadOneGreen
+ ^
+1 warning generated.
diff --git a/tools/clang/plugins/tests/inline_copy_ctor.cpp b/tools/clang/plugins/tests/inline_copy_ctor.cpp
new file mode 100644
index 0000000..dcd9002
--- /dev/null
+++ b/tools/clang/plugins/tests/inline_copy_ctor.cpp
@@ -0,0 +1,5 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "inline_copy_ctor.h"
diff --git a/tools/clang/plugins/tests/inline_copy_ctor.h b/tools/clang/plugins/tests/inline_copy_ctor.h
new file mode 100644
index 0000000..619a183
--- /dev/null
+++ b/tools/clang/plugins/tests/inline_copy_ctor.h
@@ -0,0 +1,12 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+struct C {
+ C();
+ ~C();
+
+ static C foo() { return C(); }
+
+ int a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p , q, r, s, t, u, v, w, x;
+};
diff --git a/tools/clang/plugins/tests/inline_copy_ctor.txt b/tools/clang/plugins/tests/inline_copy_ctor.txt
new file mode 100644
index 0000000..bc4bd89
--- /dev/null
+++ b/tools/clang/plugins/tests/inline_copy_ctor.txt
@@ -0,0 +1,5 @@
+In file included from inline_copy_ctor.cpp:5:
+./inline_copy_ctor.h:5:1: warning: [chromium-style] Complex class/struct needs an explicit out-of-line copy constructor.
+struct C {
+^
+1 warning generated.
diff --git a/tools/clang/plugins/tests/inline_ctor.cpp b/tools/clang/plugins/tests/inline_ctor.cpp
new file mode 100644
index 0000000..6a751fb
--- /dev/null
+++ b/tools/clang/plugins/tests/inline_ctor.cpp
@@ -0,0 +1,25 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "inline_ctor.h"
+
+#include <string>
+#include <vector>
+
+// We don't warn on classes that are in CPP files.
+class InlineInCPPOK {
+ public:
+ InlineInCPPOK() {}
+ ~InlineInCPPOK() {}
+
+ private:
+ std::vector<int> one_;
+ std::vector<std::string> two_;
+};
+
+int main() {
+ InlineInCPPOK one;
+ InlineCtorsArentOKInHeader two;
+ return 0;
+}
diff --git a/tools/clang/plugins/tests/inline_ctor.h b/tools/clang/plugins/tests/inline_ctor.h
new file mode 100644
index 0000000..d053b2f
--- /dev/null
+++ b/tools/clang/plugins/tests/inline_ctor.h
@@ -0,0 +1,21 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef INLINE_CTOR_H_
+#define INLINE_CTOR_H_
+
+#include <string>
+#include <vector>
+
+class InlineCtorsArentOKInHeader {
+ public:
+ InlineCtorsArentOKInHeader() {}
+ ~InlineCtorsArentOKInHeader() {}
+
+ private:
+ std::vector<int> one_;
+ std::vector<std::string> two_;
+};
+
+#endif // INLINE_CTOR_H_
diff --git a/tools/clang/plugins/tests/inline_ctor.txt b/tools/clang/plugins/tests/inline_ctor.txt
new file mode 100644
index 0000000..caa0cb4
--- /dev/null
+++ b/tools/clang/plugins/tests/inline_ctor.txt
@@ -0,0 +1,8 @@
+In file included from inline_ctor.cpp:5:
+./inline_ctor.h:13:3: warning: [chromium-style] Complex constructor has an inlined body.
+ InlineCtorsArentOKInHeader() {}
+ ^
+./inline_ctor.h:14:3: warning: [chromium-style] Complex destructor has an inline body.
+ ~InlineCtorsArentOKInHeader() {}
+ ^
+2 warnings generated.
diff --git a/tools/clang/plugins/tests/missing_ctor.cpp b/tools/clang/plugins/tests/missing_ctor.cpp
new file mode 100644
index 0000000..8ee2fb2
--- /dev/null
+++ b/tools/clang/plugins/tests/missing_ctor.cpp
@@ -0,0 +1,23 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "missing_ctor.h"
+
+#include <string>
+#include <vector>
+
+// We don't warn on classes that use default ctors in cpp files.
+class MissingInCPPOK {
+ public:
+
+ private:
+ std::vector<int> one_;
+ std::vector<std::string> two_;
+};
+
+int main() {
+ MissingInCPPOK one;
+ MissingCtorsArentOKInHeader two;
+ return 0;
+}
diff --git a/tools/clang/plugins/tests/missing_ctor.h b/tools/clang/plugins/tests/missing_ctor.h
new file mode 100644
index 0000000..1050457
--- /dev/null
+++ b/tools/clang/plugins/tests/missing_ctor.h
@@ -0,0 +1,19 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MISSING_CTOR_H_
+#define MISSING_CTOR_H_
+
+#include <string>
+#include <vector>
+
+class MissingCtorsArentOKInHeader {
+ public:
+
+ private:
+ std::vector<int> one_;
+ std::vector<std::string> two_;
+};
+
+#endif // MISSING_CTOR_H_
diff --git a/tools/clang/plugins/tests/missing_ctor.txt b/tools/clang/plugins/tests/missing_ctor.txt
new file mode 100644
index 0000000..301449c
--- /dev/null
+++ b/tools/clang/plugins/tests/missing_ctor.txt
@@ -0,0 +1,6 @@
+In file included from missing_ctor.cpp:5:
+./missing_ctor.h:11:1: warning: [chromium-style] Complex class/struct needs an explicit out-of-line constructor.
+class MissingCtorsArentOKInHeader {
+^
+./missing_ctor.h:11:1: warning: [chromium-style] Complex class/struct needs an explicit out-of-line destructor.
+2 warnings generated.
diff --git a/tools/clang/plugins/tests/nested_class_inline_ctor.cpp b/tools/clang/plugins/tests/nested_class_inline_ctor.cpp
new file mode 100644
index 0000000..aa90a95
--- /dev/null
+++ b/tools/clang/plugins/tests/nested_class_inline_ctor.cpp
@@ -0,0 +1,5 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "nested_class_inline_ctor.h"
diff --git a/tools/clang/plugins/tests/nested_class_inline_ctor.h b/tools/clang/plugins/tests/nested_class_inline_ctor.h
new file mode 100644
index 0000000..01cfea9
--- /dev/null
+++ b/tools/clang/plugins/tests/nested_class_inline_ctor.h
@@ -0,0 +1,22 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef NESTED_CLASS_INLINE_CTOR_H_
+#define NESTED_CLASS_INLINE_CTOR_H_
+
+#include <string>
+#include <vector>
+
+// See crbug.com/136863.
+
+class Foo {
+ class Bar {
+ Bar() {}
+ ~Bar() {}
+
+ std::vector<std::string> a;
+ };
+};
+
+#endif // NESTED_CLASS_INLINE_CTOR_H_
diff --git a/tools/clang/plugins/tests/nested_class_inline_ctor.txt b/tools/clang/plugins/tests/nested_class_inline_ctor.txt
new file mode 100644
index 0000000..39bd6e1
--- /dev/null
+++ b/tools/clang/plugins/tests/nested_class_inline_ctor.txt
@@ -0,0 +1,8 @@
+In file included from nested_class_inline_ctor.cpp:5:
+./nested_class_inline_ctor.h:15:5: warning: [chromium-style] Complex constructor has an inlined body.
+ Bar() {}
+ ^
+./nested_class_inline_ctor.h:16:5: warning: [chromium-style] Complex destructor has an inline body.
+ ~Bar() {}
+ ^
+2 warnings generated.
diff --git a/tools/clang/plugins/tests/overridden_methods.cpp b/tools/clang/plugins/tests/overridden_methods.cpp
new file mode 100644
index 0000000..398d6a4
--- /dev/null
+++ b/tools/clang/plugins/tests/overridden_methods.cpp
@@ -0,0 +1,49 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "overridden_methods.h"
+
+// Fill in the implementations
+void DerivedClass::SomeMethod() {}
+void DerivedClass::SomeOtherMethod() {}
+void DerivedClass::WebKitModifiedSomething() {}
+
+class ImplementationInterimClass : public BaseClass {
+ public:
+ // Should not warn about pure virtual methods.
+ virtual void SomeMethod() = 0;
+};
+
+class ImplementationDerivedClass : public ImplementationInterimClass,
+ public webkit_glue::WebKitObserverImpl {
+ public:
+ // Should not warn about destructors.
+ virtual ~ImplementationDerivedClass() {}
+ // Should warn.
+ virtual void SomeMethod();
+ // Should not warn if marked as override.
+ virtual void SomeOtherMethod() override;
+ // Should not warn for inline implementations in implementation files.
+ virtual void SomeInlineMethod() {}
+ // Should not warn if overriding a method whose origin is blink.
+ virtual void WebKitModifiedSomething();
+ // Should warn with the insertion point after the const.
+ virtual void SomeConstMethod() const {}
+ // Should warn with the insertion point after the throw spec.
+ virtual void SomeMethodWithExceptionSpec() throw() {}
+ // Should warn with the insertion point after both the const and the throw
+ // specifiers.
+ virtual void SomeConstMethodWithExceptionSpec() const throw(int) {}
+ // Should warn even if overridden method isn't pure.
+ virtual void SomeNonPureBaseMethod() {}
+ // Should warn and place correctly even when there is a comment.
+ virtual void SomeMethodWithComment(); // This is a comment.
+ // Should warn and place correctly even if there is a comment and body.
+ virtual void SomeMethodWithCommentAndBody() {} // This is a comment.
+};
+
+int main() {
+ DerivedClass something;
+ ImplementationDerivedClass something_else;
+}
diff --git a/tools/clang/plugins/tests/overridden_methods.h b/tools/clang/plugins/tests/overridden_methods.h
new file mode 100644
index 0000000..c5af914
--- /dev/null
+++ b/tools/clang/plugins/tests/overridden_methods.h
@@ -0,0 +1,70 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef OVERRIDDEN_METHODS_H_
+#define OVERRIDDEN_METHODS_H_
+
+// Should warn about overriding of methods.
+class BaseClass {
+ public:
+ virtual ~BaseClass() {}
+ virtual void SomeMethod() = 0;
+ virtual void SomeOtherMethod() = 0;
+ virtual void SomeInlineMethod() = 0;
+ virtual void SomeConstMethod() const = 0;
+ virtual void SomeMethodWithExceptionSpec() throw() = 0;
+ virtual void SomeConstMethodWithExceptionSpec() const throw(int) = 0;
+ virtual void SomeNonPureBaseMethod() {}
+ virtual void SomeMethodWithComment() = 0;
+ virtual void SomeMethodWithCommentAndBody() = 0;
+};
+
+class InterimClass : public BaseClass {
+ // Should not warn about pure virtual methods.
+ virtual void SomeMethod() = 0;
+};
+
+namespace blink {
+class WebKitObserver {
+ public:
+ virtual void WebKitModifiedSomething() {};
+};
+} // namespace blink
+
+namespace webkit_glue {
+class WebKitObserverImpl : blink::WebKitObserver {
+ public:
+ virtual void WebKitModifiedSomething() {};
+};
+} // namespace webkit_glue
+
+class DerivedClass : public InterimClass,
+ public webkit_glue::WebKitObserverImpl {
+ public:
+ // Should not warn about destructors.
+ virtual ~DerivedClass() {}
+ // Should warn.
+ virtual void SomeMethod();
+ // Should not warn if marked as override.
+ virtual void SomeOtherMethod() override;
+ // Should warn for inline implementations.
+ virtual void SomeInlineMethod() {}
+ // Should not warn if overriding a method whose origin is blink.
+ virtual void WebKitModifiedSomething();
+ // Should warn with the insertion point after the const.
+ virtual void SomeConstMethod() const {}
+ // Should warn with the insertion point after the throw spec.
+ virtual void SomeMethodWithExceptionSpec() throw() {}
+ // Should warn with the insertion point after both the const and the throw
+ // specifiers.
+ virtual void SomeConstMethodWithExceptionSpec() const throw(int) {}
+ // Should warn even if overridden method isn't pure.
+ virtual void SomeNonPureBaseMethod() {}
+ // Should warn and place correctly even when there is a comment.
+ virtual void SomeMethodWithComment(); // This is a comment.
+ // Should warn and place correctly even if there is a comment and body.
+ virtual void SomeMethodWithCommentAndBody() {} // This is a comment.
+};
+
+#endif // OVERRIDDEN_METHODS_H_
diff --git a/tools/clang/plugins/tests/overridden_methods.txt b/tools/clang/plugins/tests/overridden_methods.txt
new file mode 100644
index 0000000..199876b
--- /dev/null
+++ b/tools/clang/plugins/tests/overridden_methods.txt
@@ -0,0 +1,66 @@
+In file included from overridden_methods.cpp:5:
+./overridden_methods.h:48:28: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeMethod();
+ ^
+ override
+./overridden_methods.h:52:34: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeInlineMethod() {}
+ ^
+ override
+./overridden_methods.h:56:39: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeConstMethod() const {}
+ ^
+ override
+./overridden_methods.h:58:53: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeMethodWithExceptionSpec() throw() {}
+ ^
+ override
+./overridden_methods.h:61:67: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeConstMethodWithExceptionSpec() const throw(int) {}
+ ^
+ override
+./overridden_methods.h:63:39: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeNonPureBaseMethod() {}
+ ^
+ override
+./overridden_methods.h:65:39: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeMethodWithComment(); // This is a comment.
+ ^
+ override
+./overridden_methods.h:67:46: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeMethodWithCommentAndBody() {} // This is a comment.
+ ^
+ override
+overridden_methods.cpp:24:28: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeMethod();
+ ^
+ override
+overridden_methods.cpp:28:34: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeInlineMethod() {}
+ ^
+ override
+overridden_methods.cpp:32:39: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeConstMethod() const {}
+ ^
+ override
+overridden_methods.cpp:34:53: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeMethodWithExceptionSpec() throw() {}
+ ^
+ override
+overridden_methods.cpp:37:67: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeConstMethodWithExceptionSpec() const throw(int) {}
+ ^
+ override
+overridden_methods.cpp:39:39: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeNonPureBaseMethod() {}
+ ^
+ override
+overridden_methods.cpp:41:39: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeMethodWithComment(); // This is a comment.
+ ^
+ override
+overridden_methods.cpp:43:46: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ virtual void SomeMethodWithCommentAndBody() {} // This is a comment.
+ ^
+ override
+16 warnings generated.
diff --git a/tools/clang/plugins/tests/test.sh b/tools/clang/plugins/tests/test.sh
new file mode 100755
index 0000000..9f63f7a
--- /dev/null
+++ b/tools/clang/plugins/tests/test.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+#
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Hacky, primitive testing: This runs the style plugin for a set of input files
+# and compares the output with golden result files.
+
+E_BADARGS=65
+E_FAILEDTEST=1
+
+failed_any_test=
+
+# Prints usage information.
+usage() {
+ echo "Usage: $(basename "${0}")" \
+ "<path to clang>" \
+ "<path to plugin>"
+ echo ""
+ echo " Runs all the libFindBadConstructs unit tests"
+ echo ""
+}
+
+# Runs a single test case.
+do_testcase() {
+ local flags=""
+ if [ -e "${3}" ]; then
+ flags="$(cat "${3}")"
+ fi
+
+ if [ "$(uname -s)" = "Darwin" ]; then
+ flags="${flags} -isysroot $(xcrun --show-sdk-path) -stdlib=libstdc++"
+ fi
+
+ local output="$("${CLANG_PATH}" -fsyntax-only -Wno-c++11-extensions \
+ -Xclang -load -Xclang "${PLUGIN_PATH}" \
+ -Xclang -add-plugin -Xclang find-bad-constructs ${flags} ${1} 2>&1)"
+ local diffout="$(echo "${output}" | diff - "${2}")"
+ if [ "${diffout}" = "" ]; then
+ echo "PASS: ${1}"
+ else
+ failed_any_test=yes
+ echo "FAIL: ${1}"
+ echo "Output of compiler:"
+ echo "${output}"
+ cat > ${2}-actual << EOF
+${output}
+EOF
+
+ echo "Expected output:"
+ cat "${2}"
+ echo
+ fi
+}
+
+# Validate input to the script.
+if [[ -z "${1}" ]]; then
+ usage
+ exit ${E_BADARGS}
+elif [[ -z "${2}" ]]; then
+ usage
+ exit ${E_BADARGS}
+elif [[ ! -x "${1}" ]]; then
+ echo "${1} is not an executable"
+ usage
+ exit ${E_BADARGS}
+elif [[ ! -f "${2}" ]]; then
+ echo "${2} could not be found"
+ usage
+ exit ${E_BADARGS}
+else
+ export CLANG_PATH="${1}"
+ export PLUGIN_PATH="${2}"
+ echo "Using clang ${CLANG_PATH}..."
+ echo "Using plugin ${PLUGIN_PATH}..."
+
+ # The golden files assume that the cwd is this directory. To make the script
+ # work no matter what the cwd is, explicitly cd to there.
+ cd "$(dirname "${0}")"
+fi
+
+for input in *.cpp; do
+ do_testcase "${input}" "${input%cpp}txt" "${input%cpp}flags"
+done
+
+for input in *.c; do
+ do_testcase "${input}" "${input%c}txt" "${input%c}flags"
+done
+
+if [[ "${failed_any_test}" ]]; then
+ exit ${E_FAILEDTEST}
+fi
diff --git a/tools/clang/plugins/tests/virtual_base_method_also_final.cpp b/tools/clang/plugins/tests/virtual_base_method_also_final.cpp
new file mode 100644
index 0000000..93723f5
--- /dev/null
+++ b/tools/clang/plugins/tests/virtual_base_method_also_final.cpp
@@ -0,0 +1,17 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#define VIRTUAL virtual
+#define VIRTUAL_VOID virtual void
+
+class A {
+ public:
+ VIRTUAL void F() final {}
+ // Make sure an out-of-place virtual doesn't cause an incorrect fixit removal
+ // to be emitted.
+ void VIRTUAL G() final {}
+ // Make sure a fixit removal isn't generated for macros that expand to more
+ // than just 'virtual'.
+ VIRTUAL_VOID H() final {}
+};
diff --git a/tools/clang/plugins/tests/virtual_base_method_also_final.flags b/tools/clang/plugins/tests/virtual_base_method_also_final.flags
new file mode 100644
index 0000000..a8915fc
--- /dev/null
+++ b/tools/clang/plugins/tests/virtual_base_method_also_final.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-find-bad-constructs -Xclang strict-virtual-specifiers
diff --git a/tools/clang/plugins/tests/virtual_base_method_also_final.txt b/tools/clang/plugins/tests/virtual_base_method_also_final.txt
new file mode 100644
index 0000000..80208dc
--- /dev/null
+++ b/tools/clang/plugins/tests/virtual_base_method_also_final.txt
@@ -0,0 +1,29 @@
+virtual_base_method_also_final.cpp:10:3: warning: [chromium-style] 'virtual' is redundant; 'final' implies 'virtual'.
+ VIRTUAL void F() final {}
+ ^~~~~~~~
+virtual_base_method_also_final.cpp:5:17: note: expanded from macro 'VIRTUAL'
+#define VIRTUAL virtual
+ ^
+virtual_base_method_also_final.cpp:10:3: warning: [chromium-style] The virtual method does not override anything and is final; consider making it non-virtual.
+ VIRTUAL void F() final {}
+ ^~~~~~~~ ~~~~~~
+virtual_base_method_also_final.cpp:5:17: note: expanded from macro 'VIRTUAL'
+#define VIRTUAL virtual
+ ^
+virtual_base_method_also_final.cpp:13:3: warning: [chromium-style] 'virtual' is redundant; 'final' implies 'virtual'.
+ void VIRTUAL G() final {}
+ ^
+virtual_base_method_also_final.cpp:13:3: warning: [chromium-style] The virtual method does not override anything and is final; consider making it non-virtual.
+ void VIRTUAL G() final {}
+ ^ ~~~~~~
+virtual_base_method_also_final.cpp:16:3: warning: [chromium-style] 'virtual' is redundant; 'final' implies 'virtual'.
+ VIRTUAL_VOID H() final {}
+ ^
+virtual_base_method_also_final.cpp:6:22: note: expanded from macro 'VIRTUAL_VOID'
+#define VIRTUAL_VOID virtual void
+ ^
+virtual_base_method_also_final.cpp:16:3: warning: [chromium-style] The virtual method does not override anything and is final; consider making it non-virtual.
+virtual_base_method_also_final.cpp:6:22: note: expanded from macro 'VIRTUAL_VOID'
+#define VIRTUAL_VOID virtual void
+ ^
+6 warnings generated.
diff --git a/tools/clang/plugins/tests/virtual_bodies.cpp b/tools/clang/plugins/tests/virtual_bodies.cpp
new file mode 100644
index 0000000..8815dc2
--- /dev/null
+++ b/tools/clang/plugins/tests/virtual_bodies.cpp
@@ -0,0 +1,39 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "virtual_bodies.h"
+
+// Shouldn't warn about method usage in the implementation file.
+class VirtualMethodsInImplementation {
+ public:
+ virtual void MethodIsAbstract() = 0;
+ virtual void MethodHasNoArguments();
+ virtual void MethodHasEmptyDefaultImpl() {}
+ virtual bool ComplainAboutThis() { return true; }
+};
+
+// Stubs to fill in the abstract method
+class ConcreteVirtualMethodsInHeaders : public VirtualMethodsInHeaders {
+ public:
+ virtual void MethodIsAbstract() override {}
+};
+
+class ConcreteVirtualMethodsInImplementation
+ : public VirtualMethodsInImplementation {
+ public:
+ virtual void MethodIsAbstract() override {}
+};
+
+// Fill in the implementations
+void VirtualMethodsInHeaders::MethodHasNoArguments() {
+}
+void WarnOnMissingVirtual::MethodHasNoArguments() {
+}
+void VirtualMethodsInImplementation::MethodHasNoArguments() {
+}
+
+int main() {
+ ConcreteVirtualMethodsInHeaders one;
+ ConcreteVirtualMethodsInImplementation two;
+}
diff --git a/tools/clang/plugins/tests/virtual_bodies.h b/tools/clang/plugins/tests/virtual_bodies.h
new file mode 100644
index 0000000..4ebe695
--- /dev/null
+++ b/tools/clang/plugins/tests/virtual_bodies.h
@@ -0,0 +1,40 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef VIRTUAL_METHODS_H_
+#define VIRTUAL_METHODS_H_
+
+// Should warn about virtual method usage.
+class VirtualMethodsInHeaders {
+ public:
+ // Don't complain about these.
+ virtual void MethodIsAbstract() = 0;
+ virtual void MethodHasNoArguments();
+ virtual void MethodHasEmptyDefaultImpl() {}
+
+ // But complain about this:
+ virtual bool ComplainAboutThis() { return true; }
+};
+
+// Complain on missing 'virtual' keyword in overrides.
+class WarnOnMissingVirtual : public VirtualMethodsInHeaders {
+ public:
+ void MethodHasNoArguments() override;
+};
+
+// Don't complain about things in a 'testing' namespace.
+namespace testing {
+struct TestStruct {};
+} // namespace testing
+
+class VirtualMethodsInHeadersTesting : public VirtualMethodsInHeaders {
+ public:
+ // Don't complain about no virtual testing methods.
+ void MethodHasNoArguments();
+
+ private:
+ testing::TestStruct tester_;
+};
+
+#endif // VIRTUAL_METHODS_H_
diff --git a/tools/clang/plugins/tests/virtual_bodies.txt b/tools/clang/plugins/tests/virtual_bodies.txt
new file mode 100644
index 0000000..121d1a9
--- /dev/null
+++ b/tools/clang/plugins/tests/virtual_bodies.txt
@@ -0,0 +1,5 @@
+In file included from virtual_bodies.cpp:5:
+./virtual_bodies.h:17:36: warning: [chromium-style] virtual methods with non-empty bodies shouldn't be declared inline.
+ virtual bool ComplainAboutThis() { return true; }
+ ^
+1 warning generated.
diff --git a/tools/clang/plugins/tests/virtual_specifiers.cpp b/tools/clang/plugins/tests/virtual_specifiers.cpp
new file mode 100644
index 0000000..f4479a8
--- /dev/null
+++ b/tools/clang/plugins/tests/virtual_specifiers.cpp
@@ -0,0 +1,63 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Tests for chromium style checks for virtual/override/final specifiers on
+// virtual methods.
+
+// Purposely use macros to test that the FixIt hints don't try to remove the
+// macro body.
+#define OVERRIDE override
+#define FINAL final
+
+// Base class can only use virtual.
+class Base {
+ public:
+ virtual ~Base() {}
+ virtual void F() = 0;
+};
+
+// Derived classes correctly use only override or final specifier.
+class CorrectOverride : public Base {
+ public:
+ ~CorrectOverride() OVERRIDE {}
+ void F() OVERRIDE {}
+};
+
+class CorrectFinal : public CorrectOverride {
+ public:
+ ~CorrectFinal() FINAL {}
+ void F() FINAL {}
+};
+
+// No override on an overridden method should trigger a diagnostic.
+class MissingOverride : public Base {
+ public:
+ ~MissingOverride() {}
+ void F() {}
+};
+
+// Redundant specifiers should trigger a diagnostic.
+class VirtualAndOverride : public Base {
+ public:
+ virtual ~VirtualAndOverride() OVERRIDE {}
+ virtual void F() OVERRIDE {}
+};
+
+class VirtualAndFinal : public Base {
+ public:
+ virtual ~VirtualAndFinal() FINAL {}
+ virtual void F() FINAL {}
+};
+
+class VirtualAndOverrideFinal : public Base {
+ public:
+ virtual ~VirtualAndOverrideFinal() OVERRIDE FINAL {}
+ virtual void F() OVERRIDE FINAL {}
+};
+
+class OverrideAndFinal : public Base {
+ public:
+ ~OverrideAndFinal() OVERRIDE FINAL {}
+ void F() OVERRIDE FINAL {}
+};
diff --git a/tools/clang/plugins/tests/virtual_specifiers.flags b/tools/clang/plugins/tests/virtual_specifiers.flags
new file mode 100644
index 0000000..a8915fc
--- /dev/null
+++ b/tools/clang/plugins/tests/virtual_specifiers.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-find-bad-constructs -Xclang strict-virtual-specifiers
diff --git a/tools/clang/plugins/tests/virtual_specifiers.txt b/tools/clang/plugins/tests/virtual_specifiers.txt
new file mode 100644
index 0000000..2ad1420
--- /dev/null
+++ b/tools/clang/plugins/tests/virtual_specifiers.txt
@@ -0,0 +1,51 @@
+virtual_specifiers.cpp:36:21: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ ~MissingOverride() {}
+ ^
+ override
+virtual_specifiers.cpp:37:11: warning: [chromium-style] Overriding method must be marked with 'override' or 'final'.
+ void F() {}
+ ^
+ override
+virtual_specifiers.cpp:43:3: warning: [chromium-style] 'virtual' is redundant; 'override' implies 'virtual'.
+ virtual ~VirtualAndOverride() OVERRIDE {}
+ ^~~~~~~~
+virtual_specifiers.cpp:44:3: warning: [chromium-style] 'virtual' is redundant; 'override' implies 'virtual'.
+ virtual void F() OVERRIDE {}
+ ^~~~~~~~
+virtual_specifiers.cpp:49:3: warning: [chromium-style] 'virtual' is redundant; 'final' implies 'virtual'.
+ virtual ~VirtualAndFinal() FINAL {}
+ ^~~~~~~~
+virtual_specifiers.cpp:50:3: warning: [chromium-style] 'virtual' is redundant; 'final' implies 'virtual'.
+ virtual void F() FINAL {}
+ ^~~~~~~~
+virtual_specifiers.cpp:55:3: warning: [chromium-style] 'virtual' is redundant; 'override' implies 'virtual'.
+ virtual ~VirtualAndOverrideFinal() OVERRIDE FINAL {}
+ ^~~~~~~~
+virtual_specifiers.cpp:55:38: warning: [chromium-style] 'override' is redundant; 'final' implies 'override'.
+ virtual ~VirtualAndOverrideFinal() OVERRIDE FINAL {}
+ ^~~~~~~~~
+virtual_specifiers.cpp:10:18: note: expanded from macro 'OVERRIDE'
+#define OVERRIDE override
+ ^
+virtual_specifiers.cpp:56:3: warning: [chromium-style] 'virtual' is redundant; 'override' implies 'virtual'.
+ virtual void F() OVERRIDE FINAL {}
+ ^~~~~~~~
+virtual_specifiers.cpp:56:20: warning: [chromium-style] 'override' is redundant; 'final' implies 'override'.
+ virtual void F() OVERRIDE FINAL {}
+ ^~~~~~~~~
+virtual_specifiers.cpp:10:18: note: expanded from macro 'OVERRIDE'
+#define OVERRIDE override
+ ^
+virtual_specifiers.cpp:61:23: warning: [chromium-style] 'override' is redundant; 'final' implies 'override'.
+ ~OverrideAndFinal() OVERRIDE FINAL {}
+ ^~~~~~~~~
+virtual_specifiers.cpp:10:18: note: expanded from macro 'OVERRIDE'
+#define OVERRIDE override
+ ^
+virtual_specifiers.cpp:62:12: warning: [chromium-style] 'override' is redundant; 'final' implies 'override'.
+ void F() OVERRIDE FINAL {}
+ ^~~~~~~~~
+virtual_specifiers.cpp:10:18: note: expanded from macro 'OVERRIDE'
+#define OVERRIDE override
+ ^
+12 warnings generated.
diff --git a/tools/clang/plugins/tests/weak_ptr_factory.cpp b/tools/clang/plugins/tests/weak_ptr_factory.cpp
new file mode 100644
index 0000000..79c23b4
--- /dev/null
+++ b/tools/clang/plugins/tests/weak_ptr_factory.cpp
@@ -0,0 +1,49 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "weak_ptr_factory.h"
+namespace should_succeed {
+
+class OnlyMember {
+ base::WeakPtrFactory<OnlyMember> factory_;
+};
+
+class FactoryLast {
+ bool bool_member_;
+ int int_member_;
+ base::WeakPtrFactory<FactoryLast> factory_;
+};
+
+class FactoryRefersToOtherType {
+ bool bool_member_;
+ base::WeakPtrFactory<bool> bool_ptr_factory_;
+};
+
+class FirstFactoryRefersToOtherType {
+ bool bool_member_;
+ base::WeakPtrFactory<bool> bool_ptr_factory_;
+ int int_member_;
+ base::WeakPtrFactory<FirstFactoryRefersToOtherType> factory_;
+};
+
+} // namespace should_succeed
+
+namespace should_fail {
+
+class FactoryFirst {
+ base::WeakPtrFactory<FactoryFirst> factory_;
+ int int_member;
+};
+
+class FactoryMiddle {
+ bool bool_member_;
+ base::WeakPtrFactory<FactoryMiddle> factory_;
+ int int_member_;
+};
+
+} // namespace should_fail
+
+int main() {
+}
+
diff --git a/tools/clang/plugins/tests/weak_ptr_factory.flags b/tools/clang/plugins/tests/weak_ptr_factory.flags
new file mode 100644
index 0000000..e865249
--- /dev/null
+++ b/tools/clang/plugins/tests/weak_ptr_factory.flags
@@ -0,0 +1 @@
+-Xclang -plugin-arg-find-bad-constructs -Xclang check-weak-ptr-factory-order
diff --git a/tools/clang/plugins/tests/weak_ptr_factory.h b/tools/clang/plugins/tests/weak_ptr_factory.h
new file mode 100644
index 0000000..7aebfc5
--- /dev/null
+++ b/tools/clang/plugins/tests/weak_ptr_factory.h
@@ -0,0 +1,18 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef WEAK_PTR_FACTORY_H_
+#define WEAK_PTR_FACTORY_H_
+
+namespace base {
+
+template <typename T>
+class WeakPtrFactory {
+ public:
+ explicit WeakPtrFactory(T*) {}
+};
+
+} // namespace base
+
+#endif // WEAK_PTR_FACTORY_H_
diff --git a/tools/clang/plugins/tests/weak_ptr_factory.txt b/tools/clang/plugins/tests/weak_ptr_factory.txt
new file mode 100644
index 0000000..f9c2c0a
--- /dev/null
+++ b/tools/clang/plugins/tests/weak_ptr_factory.txt
@@ -0,0 +1,7 @@
+weak_ptr_factory.cpp:35:38: warning: [chromium-style] WeakPtrFactory members which refer to their outer class must be the last member in the outer class definition.
+ base::WeakPtrFactory<FactoryFirst> factory_;
+ ^
+weak_ptr_factory.cpp:41:39: warning: [chromium-style] WeakPtrFactory members which refer to their outer class must be the last member in the outer class definition.
+ base::WeakPtrFactory<FactoryMiddle> factory_;
+ ^
+2 warnings generated.
diff --git a/tools/clang/rewrite_scoped_refptr/CMakeLists.txt b/tools/clang/rewrite_scoped_refptr/CMakeLists.txt
new file mode 100644
index 0000000..87ce4d8
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/CMakeLists.txt
@@ -0,0 +1,26 @@
+set(LLVM_LINK_COMPONENTS
+ BitReader
+ MCParser
+ Option
+ )
+
+add_llvm_executable(rewrite_scoped_refptr
+ RewriteScopedRefptr.cpp
+ )
+
+target_link_libraries(rewrite_scoped_refptr
+ clangAST
+ clangASTMatchers
+ clangAnalysis
+ clangBasic
+ clangDriver
+ clangEdit
+ clangFrontend
+ clangLex
+ clangParse
+ clangSema
+ clangSerialization
+ clangTooling
+ )
+
+install(TARGETS rewrite_scoped_refptr RUNTIME DESTINATION bin)
diff --git a/tools/clang/rewrite_scoped_refptr/RewriteScopedRefptr.cpp b/tools/clang/rewrite_scoped_refptr/RewriteScopedRefptr.cpp
new file mode 100644
index 0000000..b2cd31f
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/RewriteScopedRefptr.cpp
@@ -0,0 +1,428 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This implements a Clang tool to rewrite all instances of
+// scoped_refptr<T>'s implicit cast to T (operator T*) to an explicit call to
+// the .get() method.
+
+#include <assert.h>
+#include <algorithm>
+#include <memory>
+#include <string>
+
+#include "clang/AST/ASTContext.h"
+#include "clang/ASTMatchers/ASTMatchers.h"
+#include "clang/ASTMatchers/ASTMatchersMacros.h"
+#include "clang/ASTMatchers/ASTMatchFinder.h"
+#include "clang/Basic/SourceManager.h"
+#include "clang/Frontend/FrontendActions.h"
+#include "clang/Lex/Lexer.h"
+#include "clang/Tooling/CommonOptionsParser.h"
+#include "clang/Tooling/Refactoring.h"
+#include "clang/Tooling/Tooling.h"
+#include "llvm/Support/CommandLine.h"
+
+using namespace clang::ast_matchers;
+using clang::tooling::CommonOptionsParser;
+using clang::tooling::Replacement;
+using clang::tooling::Replacements;
+using llvm::StringRef;
+
+namespace clang {
+namespace ast_matchers {
+
+const internal::VariadicDynCastAllOfMatcher<Decl, CXXConversionDecl>
+ conversionDecl;
+
+AST_MATCHER(QualType, isBoolean) {
+ return Node->isBooleanType();
+}
+
+} // namespace ast_matchers
+} // namespace clang
+
+namespace {
+
+// Returns true if expr needs to be put in parens (eg: when it is an operator
+// syntactically).
+bool NeedsParens(const clang::Expr* expr) {
+ if (llvm::dyn_cast<clang::UnaryOperator>(expr) ||
+ llvm::dyn_cast<clang::BinaryOperator>(expr) ||
+ llvm::dyn_cast<clang::ConditionalOperator>(expr)) {
+ return true;
+ }
+ // Calls to an overloaded operator also need parens, except for foo(...) and
+ // foo[...] expressions.
+ if (const clang::CXXOperatorCallExpr* op =
+ llvm::dyn_cast<clang::CXXOperatorCallExpr>(expr)) {
+ return op->getOperator() != clang::OO_Call &&
+ op->getOperator() != clang::OO_Subscript;
+ }
+ return false;
+}
+
+Replacement RewriteImplicitToExplicitConversion(
+ const MatchFinder::MatchResult& result,
+ const clang::Expr* expr) {
+ clang::CharSourceRange range = clang::CharSourceRange::getTokenRange(
+ result.SourceManager->getSpellingLoc(expr->getLocStart()),
+ result.SourceManager->getSpellingLoc(expr->getLocEnd()));
+ assert(range.isValid() && "Invalid range!");
+
+ // Handle cases where an implicit cast is being done by dereferencing a
+ // pointer to a scoped_refptr<> (sadly, it happens...)
+ //
+ // This rewrites both "*foo" and "*(foo)" as "foo->get()".
+ if (const clang::UnaryOperator* op =
+ llvm::dyn_cast<clang::UnaryOperator>(expr)) {
+ if (op->getOpcode() == clang::UO_Deref) {
+ const clang::Expr* const sub_expr =
+ op->getSubExpr()->IgnoreParenImpCasts();
+ clang::CharSourceRange sub_expr_range =
+ clang::CharSourceRange::getTokenRange(
+ result.SourceManager->getSpellingLoc(sub_expr->getLocStart()),
+ result.SourceManager->getSpellingLoc(sub_expr->getLocEnd()));
+ assert(sub_expr_range.isValid() && "Invalid subexpression range!");
+
+ std::string inner_text = clang::Lexer::getSourceText(
+ sub_expr_range, *result.SourceManager, result.Context->getLangOpts());
+ assert(!inner_text.empty() && "No text for subexpression!");
+ if (NeedsParens(sub_expr)) {
+ inner_text.insert(0, "(");
+ inner_text.append(")");
+ }
+ inner_text.append("->get()");
+ return Replacement(*result.SourceManager, range, inner_text);
+ }
+ }
+
+ std::string text = clang::Lexer::getSourceText(
+ range, *result.SourceManager, result.Context->getLangOpts());
+ assert(!text.empty() && "No text for expression!");
+
+ // Unwrap any temporaries - for example, custom iterators that return
+ // scoped_refptr<T> as part of operator*. Any such iterators should also
+ // be declaring a scoped_refptr<T>* operator->, per C++03 24.4.1.1 (Table 72)
+ if (const clang::CXXBindTemporaryExpr* op =
+ llvm::dyn_cast<clang::CXXBindTemporaryExpr>(expr)) {
+ expr = op->getSubExpr();
+ }
+
+ // Handle iterators (which are operator* calls, followed by implicit
+ // conversions) by rewriting *it as it->get()
+ if (const clang::CXXOperatorCallExpr* op =
+ llvm::dyn_cast<clang::CXXOperatorCallExpr>(expr)) {
+ if (op->getOperator() == clang::OO_Star) {
+ // Note that this doesn't rewrite **it correctly, since it should be
+ // rewritten using parens, e.g. (*it)->get(). However, this shouldn't
+ // happen frequently, if at all, since it would likely indicate code is
+ // storing pointers to a scoped_refptr in a container.
+ text.erase(0, 1);
+ text.append("->get()");
+ return Replacement(*result.SourceManager, range, text);
+ }
+ }
+
+ // The only remaining calls should be non-dereferencing calls (eg: member
+ // calls), so a simple ".get()" appending should suffice.
+ if (NeedsParens(expr)) {
+ text.insert(0, "(");
+ text.append(")");
+ }
+ text.append(".get()");
+ return Replacement(*result.SourceManager, range, text);
+}
+
+Replacement RewriteRawPtrToScopedRefptr(const MatchFinder::MatchResult& result,
+ clang::SourceLocation begin,
+ clang::SourceLocation end) {
+ clang::CharSourceRange range = clang::CharSourceRange::getTokenRange(
+ result.SourceManager->getSpellingLoc(begin),
+ result.SourceManager->getSpellingLoc(end));
+ assert(range.isValid() && "Invalid range!");
+
+ std::string text = clang::Lexer::getSourceText(
+ range, *result.SourceManager, result.Context->getLangOpts());
+ text.erase(text.rfind('*'));
+
+ std::string replacement_text("scoped_refptr<");
+ replacement_text += text;
+ replacement_text += ">";
+
+ return Replacement(*result.SourceManager, range, replacement_text);
+}
+
+class GetRewriterCallback : public MatchFinder::MatchCallback {
+ public:
+ explicit GetRewriterCallback(Replacements* replacements)
+ : replacements_(replacements) {}
+ virtual void run(const MatchFinder::MatchResult& result) override;
+
+ private:
+ Replacements* const replacements_;
+};
+
+void GetRewriterCallback::run(const MatchFinder::MatchResult& result) {
+ const clang::Expr* arg = result.Nodes.getNodeAs<clang::Expr>("arg");
+ assert(arg && "Unexpected match! No Expr captured!");
+ replacements_->insert(RewriteImplicitToExplicitConversion(result, arg));
+}
+
+class VarRewriterCallback : public MatchFinder::MatchCallback {
+ public:
+ explicit VarRewriterCallback(Replacements* replacements)
+ : replacements_(replacements) {}
+ virtual void run(const MatchFinder::MatchResult& result) override;
+
+ private:
+ Replacements* const replacements_;
+};
+
+void VarRewriterCallback::run(const MatchFinder::MatchResult& result) {
+ const clang::DeclaratorDecl* const var_decl =
+ result.Nodes.getNodeAs<clang::DeclaratorDecl>("var");
+ assert(var_decl && "Unexpected match! No VarDecl captured!");
+
+ const clang::TypeSourceInfo* tsi = var_decl->getTypeSourceInfo();
+
+ // TODO(dcheng): This mishandles a case where a variable has multiple
+ // declarations, e.g.:
+ //
+ // in .h:
+ // Foo* my_global_magical_foo;
+ //
+ // in .cc:
+ // Foo* my_global_magical_foo = CreateFoo();
+ //
+ // In this case, it will only rewrite the .cc definition. Oh well. This should
+ // be rare enough that these cases can be manually handled, since the style
+ // guide prohibits globals of non-POD type.
+ replacements_->insert(RewriteRawPtrToScopedRefptr(
+ result, tsi->getTypeLoc().getBeginLoc(), tsi->getTypeLoc().getEndLoc()));
+}
+
+class FunctionRewriterCallback : public MatchFinder::MatchCallback {
+ public:
+ explicit FunctionRewriterCallback(Replacements* replacements)
+ : replacements_(replacements) {}
+ virtual void run(const MatchFinder::MatchResult& result) override;
+
+ private:
+ Replacements* const replacements_;
+};
+
+void FunctionRewriterCallback::run(const MatchFinder::MatchResult& result) {
+ const clang::FunctionDecl* const function_decl =
+ result.Nodes.getNodeAs<clang::FunctionDecl>("fn");
+ assert(function_decl && "Unexpected match! No FunctionDecl captured!");
+
+ // If matched against an implicit conversion to a DeclRefExpr, make sure the
+ // referenced declaration is of class type, e.g. the tool skips trying to
+ // chase pointers/references to determine if the pointee is a scoped_refptr<T>
+ // with local storage. Instead, let a human manually handle those cases.
+ const clang::VarDecl* const var_decl =
+ result.Nodes.getNodeAs<clang::VarDecl>("var");
+ if (var_decl && !var_decl->getTypeSourceInfo()->getType()->isClassType()) {
+ return;
+ }
+
+ for (clang::FunctionDecl* f : function_decl->redecls()) {
+ clang::SourceRange range = f->getReturnTypeSourceRange();
+ replacements_->insert(
+ RewriteRawPtrToScopedRefptr(result, range.getBegin(), range.getEnd()));
+ }
+}
+
+class MacroRewriterCallback : public MatchFinder::MatchCallback {
+ public:
+ explicit MacroRewriterCallback(Replacements* replacements)
+ : replacements_(replacements) {}
+ virtual void run(const MatchFinder::MatchResult& result) override;
+
+ private:
+ Replacements* const replacements_;
+};
+
+void MacroRewriterCallback::run(const MatchFinder::MatchResult& result) {
+ const clang::Expr* const expr = result.Nodes.getNodeAs<clang::Expr>("expr");
+ assert(expr && "Unexpected match! No Expr captured!");
+ replacements_->insert(RewriteImplicitToExplicitConversion(result, expr));
+}
+
+} // namespace
+
+static llvm::cl::extrahelp common_help(CommonOptionsParser::HelpMessage);
+
+int main(int argc, const char* argv[]) {
+ llvm::cl::OptionCategory category("Remove scoped_refptr conversions");
+ CommonOptionsParser options(argc, argv, category);
+ clang::tooling::ClangTool tool(options.getCompilations(),
+ options.getSourcePathList());
+
+ MatchFinder match_finder;
+ Replacements replacements;
+
+ auto is_scoped_refptr = recordDecl(isSameOrDerivedFrom("::scoped_refptr"),
+ isTemplateInstantiation());
+
+ // Finds all calls to conversion operator member function. This catches calls
+ // to "operator T*", "operator Testable", and "operator bool" equally.
+ auto base_matcher = memberCallExpr(thisPointerType(is_scoped_refptr),
+ callee(conversionDecl()),
+ on(id("arg", expr())));
+
+ // The heuristic for whether or not converting a temporary is 'unsafe'. An
+ // unsafe conversion is one where a temporary scoped_refptr<T> is converted to
+ // another type. The matcher provides an exception for a temporary
+ // scoped_refptr that is the result of an operator call. In this case, assume
+ // that it's the result of an iterator dereference, and the container itself
+ // retains the necessary reference, since this is a common idiom to see in
+ // loop bodies.
+ auto is_unsafe_temporary_conversion =
+ on(bindTemporaryExpr(unless(has(operatorCallExpr()))));
+
+ // Returning a scoped_refptr<T> as a T* is considered unsafe if either are
+ // true:
+ // - The scoped_refptr<T> is a temporary.
+ // - The scoped_refptr<T> has local lifetime.
+ auto returned_as_raw_ptr = hasParent(
+ returnStmt(hasAncestor(id("fn", functionDecl(returns(pointerType()))))));
+ // This matcher intentionally matches more than it should. For example, this
+ // will match:
+ // scoped_refptr<Foo>& foo = some_other_foo;
+ // return foo;
+ // The matcher callback filters out VarDecls that aren't a scoped_refptr<T>,
+ // so those cases can be manually handled.
+ auto is_local_variable =
+ on(declRefExpr(to(id("var", varDecl(hasLocalStorage())))));
+ auto is_unsafe_return =
+ anyOf(allOf(hasParent(implicitCastExpr(returned_as_raw_ptr)),
+ is_local_variable),
+ allOf(hasParent(implicitCastExpr(
+ hasParent(exprWithCleanups(returned_as_raw_ptr)))),
+ is_unsafe_temporary_conversion));
+
+ // This catches both user-defined conversions (eg: "operator bool") and
+ // standard conversion sequence (C++03 13.3.3.1.1), such as converting a
+ // pointer to a bool.
+ auto implicit_to_bool =
+ implicitCastExpr(hasImplicitDestinationType(isBoolean()));
+
+ // Avoid converting calls to of "operator Testable" -> "bool" and calls of
+ // "operator T*" -> "bool".
+ auto bool_conversion_matcher = hasParent(
+ expr(anyOf(implicit_to_bool, expr(hasParent(implicit_to_bool)))));
+
+ auto is_logging_helper =
+ functionDecl(anyOf(hasName("CheckEQImpl"), hasName("CheckNEImpl")));
+ auto is_gtest_helper = functionDecl(
+ anyOf(methodDecl(ofClass(recordDecl(isSameOrDerivedFrom(
+ hasName("::testing::internal::EqHelper")))),
+ hasName("Compare")),
+ hasName("::testing::internal::CmpHelperNE")));
+ auto is_gtest_assertion_result_ctor = constructorDecl(ofClass(
+ recordDecl(isSameOrDerivedFrom(hasName("::testing::AssertionResult")))));
+
+ // Find all calls to an operator overload that are 'safe'.
+ //
+ // All bool conversions will be handled with the Testable trick, but that
+ // can only be used once "operator T*" is removed, since otherwise it leaves
+ // the call ambiguous.
+ GetRewriterCallback get_callback(&replacements);
+ match_finder.addMatcher(
+ memberCallExpr(
+ base_matcher,
+ // Excluded since the conversion may be unsafe.
+ unless(anyOf(is_unsafe_temporary_conversion, is_unsafe_return)),
+ // Excluded since the conversion occurs inside a helper function that
+ // the macro wraps. Letting this callback handle the rewrite would
+ // result in an incorrect replacement that changes the helper function
+ // itself. Instead, the right replacement is to rewrite the macro's
+ // arguments.
+ unless(hasAncestor(decl(anyOf(is_logging_helper,
+ is_gtest_helper,
+ is_gtest_assertion_result_ctor))))),
+ &get_callback);
+
+ // Find temporary scoped_refptr<T>'s being unsafely assigned to a T*.
+ VarRewriterCallback var_callback(&replacements);
+ auto initialized_with_temporary = ignoringImpCasts(exprWithCleanups(
+ has(memberCallExpr(base_matcher, is_unsafe_temporary_conversion))));
+ match_finder.addMatcher(id("var",
+ varDecl(hasInitializer(initialized_with_temporary),
+ hasType(pointerType()))),
+ &var_callback);
+ match_finder.addMatcher(
+ constructorDecl(forEachConstructorInitializer(
+ allOf(withInitializer(initialized_with_temporary),
+ forField(id("var", fieldDecl(hasType(pointerType()))))))),
+ &var_callback);
+
+ // Rewrite functions that unsafely turn a scoped_refptr<T> into a T* when
+ // returning a value.
+ FunctionRewriterCallback fn_callback(&replacements);
+ match_finder.addMatcher(memberCallExpr(base_matcher, is_unsafe_return),
+ &fn_callback);
+
+ // Rewrite logging / gtest expressions that result in an implicit conversion.
+ // Luckily, the matchers don't need to handle the case where one of the macro
+ // arguments is NULL, such as:
+ // CHECK_EQ(my_scoped_refptr, NULL)
+ // because it simply doesn't compile--since NULL is actually of integral type,
+ // this doesn't trigger scoped_refptr<T>'s implicit conversion. Since there is
+ // no comparison overload for scoped_refptr<T> and int, this fails to compile.
+ MacroRewriterCallback macro_callback(&replacements);
+ // CHECK_EQ/CHECK_NE helpers.
+ match_finder.addMatcher(
+ callExpr(callee(is_logging_helper),
+ argumentCountIs(3),
+ hasAnyArgument(id("expr", expr(hasType(is_scoped_refptr)))),
+ hasAnyArgument(hasType(pointerType())),
+ hasArgument(2, stringLiteral())),
+ ¯o_callback);
+ // ASSERT_EQ/ASSERT_NE/EXPECT_EQ/EXPECT_EQ, which use the same underlying
+ // helper functions. Even though gtest has special handling for pointer to
+ // NULL comparisons, it doesn't trigger in this case, so no special handling
+ // is needed for the replacements.
+ match_finder.addMatcher(
+ callExpr(callee(is_gtest_helper),
+ argumentCountIs(4),
+ hasArgument(0, stringLiteral()),
+ hasArgument(1, stringLiteral()),
+ hasAnyArgument(id("expr", expr(hasType(is_scoped_refptr)))),
+ hasAnyArgument(hasType(pointerType()))),
+ ¯o_callback);
+ // ASSERT_TRUE/EXPECT_TRUE helpers. Note that this matcher doesn't need to
+ // handle ASSERT_FALSE/EXPECT_FALSE, because it gets coerced to bool before
+ // being passed as an argument to AssertionResult's constructor. As a result,
+ // GetRewriterCallback handles this case properly since the conversion isn't
+ // hidden inside AssertionResult, and the generated replacement properly
+ // rewrites the macro argument.
+ // However, the tool does need to handle the _TRUE counterparts, since the
+ // conversion occurs inside the constructor in those cases.
+ match_finder.addMatcher(
+ constructExpr(
+ argumentCountIs(2),
+ hasArgument(0, id("expr", expr(hasType(is_scoped_refptr)))),
+ hasDeclaration(is_gtest_assertion_result_ctor)),
+ ¯o_callback);
+
+ std::unique_ptr<clang::tooling::FrontendActionFactory> factory =
+ clang::tooling::newFrontendActionFactory(&match_finder);
+ int result = tool.run(factory.get());
+ if (result != 0)
+ return result;
+
+ // Serialization format is documented in tools/clang/scripts/run_tool.py
+ llvm::outs() << "==== BEGIN EDITS ====\n";
+ for (const auto& r : replacements) {
+ std::string replacement_text = r.getReplacementText().str();
+ std::replace(replacement_text.begin(), replacement_text.end(), '\n', '\0');
+ llvm::outs() << "r:" << r.getFilePath() << ":" << r.getOffset() << ":"
+ << r.getLength() << ":" << replacement_text << "\n";
+ }
+ llvm::outs() << "==== END EDITS ====\n";
+
+ return 0;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/const-scoped_refptr&-to-raw-adds-get-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/const-scoped_refptr&-to-raw-adds-get-expected.cc
new file mode 100644
index 0000000..87624c0
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/const-scoped_refptr&-to-raw-adds-get-expected.cc
@@ -0,0 +1,22 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+class Foo {
+ int dummy;
+};
+
+class Bar {
+ public:
+ const scoped_refptr<Foo>& foo() const { return foo_; }
+
+ private:
+ scoped_refptr<Foo> foo_;
+};
+
+void TestFunction() {
+ Bar b;
+ Foo* f = b.foo().get();
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/const-scoped_refptr&-to-raw-adds-get-original.cc b/tools/clang/rewrite_scoped_refptr/tests/const-scoped_refptr&-to-raw-adds-get-original.cc
new file mode 100644
index 0000000..9b799d5
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/const-scoped_refptr&-to-raw-adds-get-original.cc
@@ -0,0 +1,22 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+class Foo {
+ int dummy;
+};
+
+class Bar {
+ public:
+ const scoped_refptr<Foo>& foo() const { return foo_; }
+
+ private:
+ scoped_refptr<Foo> foo_;
+};
+
+void TestFunction() {
+ Bar b;
+ Foo* f = b.foo();
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/gtest-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/gtest-expected.cc
new file mode 100644
index 0000000..bd07572
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/gtest-expected.cc
@@ -0,0 +1,30 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/memory/ref_counted.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+struct Foo : public base::RefCounted<Foo> {
+ int dummy;
+};
+
+void TestFunction() {
+ scoped_refptr<Foo> a;
+ Foo* b;
+
+ ASSERT_EQ(a.get(), b);
+ ASSERT_EQ(b, a.get());
+ EXPECT_EQ(a.get(), b);
+ EXPECT_EQ(b, a.get());
+
+ ASSERT_NE(a.get(), b);
+ ASSERT_NE(b, a.get());
+ EXPECT_NE(a.get(), b);
+ EXPECT_NE(b, a.get());
+
+ ASSERT_TRUE(a.get());
+ ASSERT_FALSE(!a.get());
+ EXPECT_TRUE(a.get());
+ EXPECT_FALSE(!a.get());
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/gtest-original.cc b/tools/clang/rewrite_scoped_refptr/tests/gtest-original.cc
new file mode 100644
index 0000000..ac172e5
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/gtest-original.cc
@@ -0,0 +1,30 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/memory/ref_counted.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+struct Foo : public base::RefCounted<Foo> {
+ int dummy;
+};
+
+void TestFunction() {
+ scoped_refptr<Foo> a;
+ Foo* b;
+
+ ASSERT_EQ(a, b);
+ ASSERT_EQ(b, a);
+ EXPECT_EQ(a, b);
+ EXPECT_EQ(b, a);
+
+ ASSERT_NE(a, b);
+ ASSERT_NE(b, a);
+ EXPECT_NE(a, b);
+ EXPECT_NE(b, a);
+
+ ASSERT_TRUE(a);
+ ASSERT_FALSE(!a);
+ EXPECT_TRUE(a);
+ EXPECT_FALSE(!a);
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/local-returned-as-raw-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/local-returned-as-raw-expected.cc
new file mode 100644
index 0000000..30b0f83
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/local-returned-as-raw-expected.cc
@@ -0,0 +1,18 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+// An example of an unsafe conversion, where the object is freed by the time the
+// function returns.
+scoped_refptr<Foo> GetBuggyFoo();
+
+scoped_refptr<Foo> GetBuggyFoo() {
+ scoped_refptr<Foo> unsafe(new Foo);
+ return unsafe;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/local-returned-as-raw-original.cc b/tools/clang/rewrite_scoped_refptr/tests/local-returned-as-raw-original.cc
new file mode 100644
index 0000000..7103cab
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/local-returned-as-raw-original.cc
@@ -0,0 +1,18 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+// An example of an unsafe conversion, where the object is freed by the time the
+// function returns.
+Foo* GetBuggyFoo();
+
+Foo* GetBuggyFoo() {
+ scoped_refptr<Foo> unsafe(new Foo);
+ return unsafe;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/logging-checks-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/logging-checks-expected.cc
new file mode 100644
index 0000000..000638f
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/logging-checks-expected.cc
@@ -0,0 +1,28 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/logging.h"
+#include "base/memory/ref_counted.h"
+
+struct Foo : public base::RefCounted<Foo> {
+ int dummy;
+};
+
+// Rewriting the logging macros is a bit tricky. The CHECK_OP macros actually
+// wrap a function where the actual comparison happens. Make sure that the tool
+// is correctly matching the AST nodes generated by the macros and generating
+// the appropriate replacements.
+void TestFunction() {
+ scoped_refptr<Foo> a;
+ Foo* b;
+
+ CHECK_EQ(a.get(), b);
+ CHECK_EQ(b, a.get());
+
+ CHECK_NE(a.get(), b);
+ CHECK_NE(b, a.get());
+
+ CHECK(a.get());
+ CHECK(!a.get());
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/logging-checks-original.cc b/tools/clang/rewrite_scoped_refptr/tests/logging-checks-original.cc
new file mode 100644
index 0000000..4b8442f
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/logging-checks-original.cc
@@ -0,0 +1,28 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/logging.h"
+#include "base/memory/ref_counted.h"
+
+struct Foo : public base::RefCounted<Foo> {
+ int dummy;
+};
+
+// Rewriting the logging macros is a bit tricky. The CHECK_OP macros actually
+// wrap a function where the actual comparison happens. Make sure that the tool
+// is correctly matching the AST nodes generated by the macros and generating
+// the appropriate replacements.
+void TestFunction() {
+ scoped_refptr<Foo> a;
+ Foo* b;
+
+ CHECK_EQ(a, b);
+ CHECK_EQ(b, a);
+
+ CHECK_NE(a, b);
+ CHECK_NE(b, a);
+
+ CHECK(a);
+ CHECK(!a);
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/ref-to-local-returned-as-raw-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/ref-to-local-returned-as-raw-expected.cc
new file mode 100644
index 0000000..8608120
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/ref-to-local-returned-as-raw-expected.cc
@@ -0,0 +1,18 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/memory/ref_counted.h"
+
+struct Foo : public base::RefCounted<Foo> {
+ int dummy;
+};
+
+// An example of an unsafe conversion, since the reference is bound to a
+// scoped_refptr with local storage. The tool should ignore this, since it
+// should prefer letting a human manually resolve trickier cases like this.
+Foo* TestFunction() {
+ scoped_refptr<Foo> a;
+ scoped_refptr<Foo>& b = a;
+ return b;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/ref-to-local-returned-as-raw-original.cc b/tools/clang/rewrite_scoped_refptr/tests/ref-to-local-returned-as-raw-original.cc
new file mode 100644
index 0000000..8608120
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/ref-to-local-returned-as-raw-original.cc
@@ -0,0 +1,18 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/memory/ref_counted.h"
+
+struct Foo : public base::RefCounted<Foo> {
+ int dummy;
+};
+
+// An example of an unsafe conversion, since the reference is bound to a
+// scoped_refptr with local storage. The tool should ignore this, since it
+// should prefer letting a human manually resolve trickier cases like this.
+Foo* TestFunction() {
+ scoped_refptr<Foo> a;
+ scoped_refptr<Foo>& b = a;
+ return b;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/scoped_refptr.h b/tools/clang/rewrite_scoped_refptr/tests/scoped_refptr.h
new file mode 100644
index 0000000..1ebd323
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/scoped_refptr.h
@@ -0,0 +1,43 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef SCOPED_REFPTR_H_
+#define SCOPED_REFPTR_H_
+
+// Stub scoped_refptr<T> class that supports an implicit cast to T*.
+template <class T>
+class scoped_refptr {
+ public:
+ typedef T element_type;
+ scoped_refptr() : ptr_(0) {}
+ scoped_refptr(T* p) : ptr_(p) {}
+ scoped_refptr(const scoped_refptr<T>& r) : ptr_(r.ptr_) {}
+
+ template <typename U>
+ scoped_refptr(const scoped_refptr<U>& r)
+ : ptr_(r.get()) {}
+
+ ~scoped_refptr() {}
+
+ T* get() const { return ptr_; }
+ operator T*() const { return ptr_; }
+ T* operator->() const { return ptr_; }
+
+ scoped_refptr<T>& operator=(T* p) {
+ ptr_ = p;
+ return *this;
+ }
+ scoped_refptr<T>& operator=(const scoped_refptr<T>& r) {
+ return *this = r.ptr_;
+ }
+ template <typename U>
+ scoped_refptr<T>& operator=(const scoped_refptr<U>& r) {
+ return *this = r.get();
+ }
+
+ protected:
+ T* ptr_;
+};
+
+#endif // SCOPED_REFPTR_H_
diff --git a/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-field-init-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-field-init-expected.cc
new file mode 100644
index 0000000..1067dba
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-field-init-expected.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+// Similar to case 2, but with a field initializer.
+scoped_refptr<Foo> GetBuggyFoo() {
+ return new Foo;
+}
+
+class ABuggyCtor {
+ ABuggyCtor() : f_(GetBuggyFoo()) {}
+ scoped_refptr<Foo> f_;
+};
diff --git a/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-field-init-original.cc b/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-field-init-original.cc
new file mode 100644
index 0000000..c81503c
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-field-init-original.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+// Similar to case 2, but with a field initializer.
+scoped_refptr<Foo> GetBuggyFoo() {
+ return new Foo;
+}
+
+class ABuggyCtor {
+ ABuggyCtor() : f_(GetBuggyFoo()) {}
+ Foo* f_;
+};
diff --git a/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-raw-var-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-raw-var-expected.cc
new file mode 100644
index 0000000..b7b02ad
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-raw-var-expected.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+// Case 2: An example of an unsafe conversion, where the scoped_refptr<> is
+// returned as a temporary, and as such both it and its object are only valid
+// for the duration of the full expression.
+scoped_refptr<Foo> GetBuggyFoo() {
+ return new Foo;
+}
+void UseBuggyFoo() {
+ scoped_refptr<Foo> unsafe = GetBuggyFoo();
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-raw-var-original.cc b/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-raw-var-original.cc
new file mode 100644
index 0000000..dd6e0ee
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/temp-assigned-to-raw-var-original.cc
@@ -0,0 +1,19 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+// Case 2: An example of an unsafe conversion, where the scoped_refptr<> is
+// returned as a temporary, and as such both it and its object are only valid
+// for the duration of the full expression.
+scoped_refptr<Foo> GetBuggyFoo() {
+ return new Foo;
+}
+void UseBuggyFoo() {
+ Foo* unsafe = GetBuggyFoo();
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/temp-bool-test-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/temp-bool-test-expected.cc
new file mode 100644
index 0000000..815d78a
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/temp-bool-test-expected.cc
@@ -0,0 +1,20 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+// A temporary scoped_refptr<T> is used in a boolean test. This doesn't result
+// in memory safety issues, but probably indicates a code smell. As such, the
+// tool intentionally skips this case so it can be manually handled.
+scoped_refptr<Foo> GetBuggyFoo() {
+ return new Foo;
+}
+void UseBuggyFoo() {
+ if (GetBuggyFoo())
+ return;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/temp-bool-test-original.cc b/tools/clang/rewrite_scoped_refptr/tests/temp-bool-test-original.cc
new file mode 100644
index 0000000..815d78a
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/temp-bool-test-original.cc
@@ -0,0 +1,20 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+// A temporary scoped_refptr<T> is used in a boolean test. This doesn't result
+// in memory safety issues, but probably indicates a code smell. As such, the
+// tool intentionally skips this case so it can be manually handled.
+scoped_refptr<Foo> GetBuggyFoo() {
+ return new Foo;
+}
+void UseBuggyFoo() {
+ if (GetBuggyFoo())
+ return;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/temp-passed-as-raw-arg-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/temp-passed-as-raw-arg-expected.cc
new file mode 100644
index 0000000..00435f0
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/temp-passed-as-raw-arg-expected.cc
@@ -0,0 +1,24 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+// A temporary scoped_refptr is passed as a raw pointer function argument. Since
+// temporaries are destroyed at the end of the full expression, this is 'safe'
+// and could be rewritten to use get(). However, the tool just skips this case
+// This should be rare enough that manual intervention is sufficient, since
+// seeing this pattern probably indicates a code smell.
+scoped_refptr<Foo> GetBuggyFoo() {
+ return new Foo;
+}
+
+void Bar(Foo* f);
+
+void UseBuggyFoo() {
+ Bar(GetBuggyFoo());
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/temp-passed-as-raw-arg-original.cc b/tools/clang/rewrite_scoped_refptr/tests/temp-passed-as-raw-arg-original.cc
new file mode 100644
index 0000000..00435f0
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/temp-passed-as-raw-arg-original.cc
@@ -0,0 +1,24 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+// A temporary scoped_refptr is passed as a raw pointer function argument. Since
+// temporaries are destroyed at the end of the full expression, this is 'safe'
+// and could be rewritten to use get(). However, the tool just skips this case
+// This should be rare enough that manual intervention is sufficient, since
+// seeing this pattern probably indicates a code smell.
+scoped_refptr<Foo> GetBuggyFoo() {
+ return new Foo;
+}
+
+void Bar(Foo* f);
+
+void UseBuggyFoo() {
+ Bar(GetBuggyFoo());
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/temp-returned-as-raw-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/temp-returned-as-raw-expected.cc
new file mode 100644
index 0000000..1987bbb
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/temp-returned-as-raw-expected.cc
@@ -0,0 +1,22 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/memory/ref_counted.h"
+
+struct Foo : public base::RefCounted<Foo> {
+ int dummy;
+};
+
+class Bar {
+ scoped_refptr<Foo> TestFunction();
+};
+
+scoped_refptr<Foo> CreateFoo();
+
+// An example of an unsafe conversion--the scoped_refptr will be destroyed by
+// the time function returns, since it's a temporary, so the returned raw
+// pointer may point to a deleted object.
+scoped_refptr<Foo> Bar::TestFunction() {
+ return CreateFoo();
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/temp-returned-as-raw-original.cc b/tools/clang/rewrite_scoped_refptr/tests/temp-returned-as-raw-original.cc
new file mode 100644
index 0000000..e0fd791
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/temp-returned-as-raw-original.cc
@@ -0,0 +1,22 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/memory/ref_counted.h"
+
+struct Foo : public base::RefCounted<Foo> {
+ int dummy;
+};
+
+class Bar {
+ Foo* TestFunction();
+};
+
+scoped_refptr<Foo> CreateFoo();
+
+// An example of an unsafe conversion--the scoped_refptr will be destroyed by
+// the time function returns, since it's a temporary, so the returned raw
+// pointer may point to a deleted object.
+Foo* Bar::TestFunction() {
+ return CreateFoo();
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test10-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/test10-expected.cc
new file mode 100644
index 0000000..a74cd7d
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test10-expected.cc
@@ -0,0 +1,16 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+int TestsAScopedRefptr() {
+ scoped_refptr<Foo> foo(new Foo);
+ if (foo.get())
+ return 1;
+ return 0;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test10-original.cc b/tools/clang/rewrite_scoped_refptr/tests/test10-original.cc
new file mode 100644
index 0000000..24ecd74
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test10-original.cc
@@ -0,0 +1,16 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+int TestsAScopedRefptr() {
+ scoped_refptr<Foo> foo(new Foo);
+ if (foo)
+ return 1;
+ return 0;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test11-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/test11-expected.cc
new file mode 100644
index 0000000..4557b52
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test11-expected.cc
@@ -0,0 +1,24 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <vector>
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+typedef std::vector<scoped_refptr<Foo> > FooList;
+
+void TestsAScopedRefptr() {
+ FooList list;
+ list.push_back(new Foo);
+ list.push_back(new Foo);
+ for (FooList::const_iterator it = list.begin(); it != list.end(); ++it) {
+ if (!it->get())
+ continue;
+ Foo* item = it->get();
+ }
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test11-original.cc b/tools/clang/rewrite_scoped_refptr/tests/test11-original.cc
new file mode 100644
index 0000000..c79148b
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test11-original.cc
@@ -0,0 +1,24 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <vector>
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+typedef std::vector<scoped_refptr<Foo> > FooList;
+
+void TestsAScopedRefptr() {
+ FooList list;
+ list.push_back(new Foo);
+ list.push_back(new Foo);
+ for (FooList::const_iterator it = list.begin(); it != list.end(); ++it) {
+ if (!*it)
+ continue;
+ Foo* item = *it;
+ }
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test12-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/test12-expected.cc
new file mode 100644
index 0000000..fdaa80e
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test12-expected.cc
@@ -0,0 +1,46 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <iterator>
+#include <map>
+#include <string>
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+typedef std::map<std::string, scoped_refptr<const Foo> > MyMap;
+
+class MyIter
+ : public std::iterator<std::input_iterator_tag, scoped_refptr<const Foo> > {
+ public:
+ MyIter() {}
+ MyIter(const MyIter& other) : it_(other.it_) {}
+ explicit MyIter(MyMap::const_iterator it) : it_(it) {}
+ MyIter& operator++() {
+ ++it_;
+ return *this;
+ }
+ const scoped_refptr<const Foo> operator*() { return it_->second; }
+ bool operator!=(const MyIter& other) { return it_ != other.it_; }
+ bool operator==(const MyIter& other) { return it_ == other.it_; }
+
+ private:
+ MyMap::const_iterator it_;
+};
+
+void TestsAScopedRefptr() {
+ MyMap map;
+ map["foo"] = new Foo;
+ map["bar"] = new Foo;
+ MyIter my_begin(map.begin());
+ MyIter my_end(map.end());
+ for (MyIter it = my_begin; it != my_end; ++it) {
+ const Foo* item = NULL;
+ if (it->get())
+ item = it->get();
+ }
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test12-original.cc b/tools/clang/rewrite_scoped_refptr/tests/test12-original.cc
new file mode 100644
index 0000000..33f1eb1
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test12-original.cc
@@ -0,0 +1,46 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <iterator>
+#include <map>
+#include <string>
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+typedef std::map<std::string, scoped_refptr<const Foo> > MyMap;
+
+class MyIter
+ : public std::iterator<std::input_iterator_tag, scoped_refptr<const Foo> > {
+ public:
+ MyIter() {}
+ MyIter(const MyIter& other) : it_(other.it_) {}
+ explicit MyIter(MyMap::const_iterator it) : it_(it) {}
+ MyIter& operator++() {
+ ++it_;
+ return *this;
+ }
+ const scoped_refptr<const Foo> operator*() { return it_->second; }
+ bool operator!=(const MyIter& other) { return it_ != other.it_; }
+ bool operator==(const MyIter& other) { return it_ == other.it_; }
+
+ private:
+ MyMap::const_iterator it_;
+};
+
+void TestsAScopedRefptr() {
+ MyMap map;
+ map["foo"] = new Foo;
+ map["bar"] = new Foo;
+ MyIter my_begin(map.begin());
+ MyIter my_end(map.end());
+ for (MyIter it = my_begin; it != my_end; ++it) {
+ const Foo* item = NULL;
+ if (*it)
+ item = *it;
+ }
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test3-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/test3-expected.cc
new file mode 100644
index 0000000..9a0bf60
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test3-expected.cc
@@ -0,0 +1,22 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+void ExpectsScopedRefptr(const scoped_refptr<Foo>& param) {
+ Foo* foo = param.get();
+}
+
+void CallExpectsScopedRefptr() {
+ scoped_refptr<Foo> temp(new Foo);
+ ExpectsScopedRefptr(temp);
+}
+
+void CallExpectsScopedRefptrWithRawPtr() {
+ ExpectsScopedRefptr(new Foo);
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test3-original.cc b/tools/clang/rewrite_scoped_refptr/tests/test3-original.cc
new file mode 100644
index 0000000..eb40952
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test3-original.cc
@@ -0,0 +1,22 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+void ExpectsScopedRefptr(const scoped_refptr<Foo>& param) {
+ Foo* foo = param;
+}
+
+void CallExpectsScopedRefptr() {
+ scoped_refptr<Foo> temp(new Foo);
+ ExpectsScopedRefptr(temp);
+}
+
+void CallExpectsScopedRefptrWithRawPtr() {
+ ExpectsScopedRefptr(new Foo);
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test4-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/test4-expected.cc
new file mode 100644
index 0000000..1fc61bc
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test4-expected.cc
@@ -0,0 +1,22 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+void ExpectsRawPtr(Foo* foo) {
+ Foo* temp = foo;
+}
+
+void CallExpectsRawPtrWithScopedRefptr() {
+ scoped_refptr<Foo> ok(new Foo);
+ ExpectsRawPtr(ok.get());
+}
+
+void CallExpectsRawPtrWithRawPtr() {
+ ExpectsRawPtr(new Foo);
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test4-original.cc b/tools/clang/rewrite_scoped_refptr/tests/test4-original.cc
new file mode 100644
index 0000000..3395b10
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test4-original.cc
@@ -0,0 +1,22 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+void ExpectsRawPtr(Foo* foo) {
+ Foo* temp = foo;
+}
+
+void CallExpectsRawPtrWithScopedRefptr() {
+ scoped_refptr<Foo> ok(new Foo);
+ ExpectsRawPtr(ok);
+}
+
+void CallExpectsRawPtrWithRawPtr() {
+ ExpectsRawPtr(new Foo);
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test5-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/test5-expected.cc
new file mode 100644
index 0000000..d9d3924
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test5-expected.cc
@@ -0,0 +1,23 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+struct Bar : public Foo {
+ int another_dummy;
+};
+
+// Ensure that the correct cast (the user-defined cast) is converted.
+void ExpectsRawFooPtr(Foo* foo) {
+ Foo* temp = foo;
+}
+
+void CallExpectsRawFooPtrWithBar() {
+ scoped_refptr<Bar> temp(new Bar);
+ ExpectsRawFooPtr(temp.get());
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test5-original.cc b/tools/clang/rewrite_scoped_refptr/tests/test5-original.cc
new file mode 100644
index 0000000..eb87a07
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test5-original.cc
@@ -0,0 +1,23 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+struct Bar : public Foo {
+ int another_dummy;
+};
+
+// Ensure that the correct cast (the user-defined cast) is converted.
+void ExpectsRawFooPtr(Foo* foo) {
+ Foo* temp = foo;
+}
+
+void CallExpectsRawFooPtrWithBar() {
+ scoped_refptr<Bar> temp(new Bar);
+ ExpectsRawFooPtr(temp);
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test6-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/test6-expected.cc
new file mode 100644
index 0000000..00bbcda
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test6-expected.cc
@@ -0,0 +1,24 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+struct Bar : public Foo {
+ int another_dummy;
+};
+
+// Ensure that scoped_refptr<A> -> scoped_refptr<B> conversions are not
+// converted.
+void ExpectsScopedPtr(const scoped_refptr<Foo>& foo) {
+ scoped_refptr<Foo> temp(foo);
+}
+
+void CallExpectsScopedPtrWithBar() {
+ scoped_refptr<Bar> temp(new Bar);
+ ExpectsScopedPtr(temp);
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test6-original.cc b/tools/clang/rewrite_scoped_refptr/tests/test6-original.cc
new file mode 100644
index 0000000..00bbcda
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test6-original.cc
@@ -0,0 +1,24 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+struct Bar : public Foo {
+ int another_dummy;
+};
+
+// Ensure that scoped_refptr<A> -> scoped_refptr<B> conversions are not
+// converted.
+void ExpectsScopedPtr(const scoped_refptr<Foo>& foo) {
+ scoped_refptr<Foo> temp(foo);
+}
+
+void CallExpectsScopedPtrWithBar() {
+ scoped_refptr<Bar> temp(new Bar);
+ ExpectsScopedPtr(temp);
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test7-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/test7-expected.cc
new file mode 100644
index 0000000..31b15ca
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test7-expected.cc
@@ -0,0 +1,21 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+void ExpectsRawPtr(Foo* foo) {
+ Foo* temp = foo;
+}
+
+// Ensure that de-referencing scoped_refptr<>'s are properly rewritten as
+// ->get() calls.
+Foo* GetHeapFoo() {
+ scoped_refptr<Foo>* heap_allocated = new scoped_refptr<Foo>();
+ *heap_allocated = new Foo;
+ return heap_allocated->get();
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test7-original.cc b/tools/clang/rewrite_scoped_refptr/tests/test7-original.cc
new file mode 100644
index 0000000..faad6d6
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test7-original.cc
@@ -0,0 +1,21 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+void ExpectsRawPtr(Foo* foo) {
+ Foo* temp = foo;
+}
+
+// Ensure that de-referencing scoped_refptr<>'s are properly rewritten as
+// ->get() calls.
+Foo* GetHeapFoo() {
+ scoped_refptr<Foo>* heap_allocated = new scoped_refptr<Foo>();
+ *heap_allocated = new Foo;
+ return *heap_allocated;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test8-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/test8-expected.cc
new file mode 100644
index 0000000..a7a2119
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test8-expected.cc
@@ -0,0 +1,26 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+struct Bar : public Foo {
+ int another_dummy;
+};
+
+void ExpectsRawPtr(Foo* foo) {
+ Foo* temp = foo;
+}
+
+// Ensure that de-referencing scoped_refptr<>'s are properly rewritten as
+// ->get() calls, and that the correct conversion is rewritten (eg: not the
+// Bar* -> Foo* conversion).
+Foo* GetHeapFoo() {
+ scoped_refptr<Bar>* heap_allocated = new scoped_refptr<Bar>();
+ *heap_allocated = new Bar;
+ return heap_allocated->get();
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test8-original.cc b/tools/clang/rewrite_scoped_refptr/tests/test8-original.cc
new file mode 100644
index 0000000..e485ff0
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test8-original.cc
@@ -0,0 +1,26 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+struct Bar : public Foo {
+ int another_dummy;
+};
+
+void ExpectsRawPtr(Foo* foo) {
+ Foo* temp = foo;
+}
+
+// Ensure that de-referencing scoped_refptr<>'s are properly rewritten as
+// ->get() calls, and that the correct conversion is rewritten (eg: not the
+// Bar* -> Foo* conversion).
+Foo* GetHeapFoo() {
+ scoped_refptr<Bar>* heap_allocated = new scoped_refptr<Bar>();
+ *heap_allocated = new Bar;
+ return *heap_allocated;
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test9-expected.cc b/tools/clang/rewrite_scoped_refptr/tests/test9-expected.cc
new file mode 100644
index 0000000..614aff9
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test9-expected.cc
@@ -0,0 +1,45 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+struct HasAScopedRefptr {
+ scoped_refptr<Foo> member;
+
+ const scoped_refptr<Foo>& GetMemberAsScopedRefptr() const { return member; }
+
+ Foo* GetMemberAsRawPtr() const { return member.get(); }
+};
+
+void ExpectsRawPtr(Foo* param) {
+ Foo* temp = param;
+}
+
+void ExpectsScopedRefptr(const scoped_refptr<Foo>& param) {
+ Foo* temp = param.get();
+}
+
+void CallsRawWithMemberScopedRefptr() {
+ HasAScopedRefptr object;
+ ExpectsRawPtr(object.GetMemberAsScopedRefptr().get());
+}
+
+void CallsRawWithMemberRawPtr() {
+ HasAScopedRefptr object;
+ ExpectsRawPtr(object.GetMemberAsRawPtr());
+}
+
+void CallsScopedWithMemberScopedRefptr() {
+ HasAScopedRefptr object;
+ ExpectsScopedRefptr(object.GetMemberAsScopedRefptr());
+}
+
+void CallsScopedWithMemberRawPtr() {
+ HasAScopedRefptr object;
+ ExpectsScopedRefptr(object.GetMemberAsScopedRefptr());
+}
diff --git a/tools/clang/rewrite_scoped_refptr/tests/test9-original.cc b/tools/clang/rewrite_scoped_refptr/tests/test9-original.cc
new file mode 100644
index 0000000..e59ed6e
--- /dev/null
+++ b/tools/clang/rewrite_scoped_refptr/tests/test9-original.cc
@@ -0,0 +1,45 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "scoped_refptr.h"
+
+struct Foo {
+ int dummy;
+};
+
+struct HasAScopedRefptr {
+ scoped_refptr<Foo> member;
+
+ const scoped_refptr<Foo>& GetMemberAsScopedRefptr() const { return member; }
+
+ Foo* GetMemberAsRawPtr() const { return member; }
+};
+
+void ExpectsRawPtr(Foo* param) {
+ Foo* temp = param;
+}
+
+void ExpectsScopedRefptr(const scoped_refptr<Foo>& param) {
+ Foo* temp = param.get();
+}
+
+void CallsRawWithMemberScopedRefptr() {
+ HasAScopedRefptr object;
+ ExpectsRawPtr(object.GetMemberAsScopedRefptr());
+}
+
+void CallsRawWithMemberRawPtr() {
+ HasAScopedRefptr object;
+ ExpectsRawPtr(object.GetMemberAsRawPtr());
+}
+
+void CallsScopedWithMemberScopedRefptr() {
+ HasAScopedRefptr object;
+ ExpectsScopedRefptr(object.GetMemberAsScopedRefptr());
+}
+
+void CallsScopedWithMemberRawPtr() {
+ HasAScopedRefptr object;
+ ExpectsScopedRefptr(object.GetMemberAsScopedRefptr());
+}
diff --git a/tools/clang/scripts/blink_gc_plugin_flags.sh b/tools/clang/scripts/blink_gc_plugin_flags.sh
new file mode 100755
index 0000000..3654808
--- /dev/null
+++ b/tools/clang/scripts/blink_gc_plugin_flags.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script returns the flags that should be passed to clang.
+
+SRC_DIR=$(cd $(dirname $0)/../../.. && echo $PWD)
+CLANG_LIB_PATH=$SRC_DIR/third_party/llvm-build/Release+Asserts/lib
+
+if uname -s | grep -q Darwin; then
+ LIBSUFFIX=dylib
+else
+ LIBSUFFIX=so
+fi
+LIBNAME=\
+$(grep 'set(LIBRARYNAME' "$SRC_DIR"/tools/clang/blink_gc_plugin/CMakeLists.txt \
+ | cut -d ' ' -f 2 | tr -d ')')
+
+FLAGS=""
+PREFIX="-Xclang -plugin-arg-blink-gc-plugin -Xclang"
+for arg in "$@"; do
+ if [[ "$arg" = "enable-oilpan=1" ]]; then
+ FLAGS="$FLAGS $PREFIX enable-oilpan"
+ elif [[ "$arg" = "dump-graph=1" ]]; then
+ FLAGS="$FLAGS $PREFIX dump-graph"
+ elif [[ "$arg" = "warn-raw-ptr=1" ]]; then
+ FLAGS="$FLAGS $PREFIX warn-raw-ptr"
+ elif [[ "$arg" = "warn-unneeded-finalizer=1" ]]; then
+ FLAGS="$FLAGS $PREFIX warn-unneeded-finalizer"
+ fi
+done
+
+echo -Xclang -load -Xclang $CLANG_LIB_PATH/lib$LIBNAME.$LIBSUFFIX \
+ -Xclang -add-plugin -Xclang blink-gc-plugin $FLAGS
diff --git a/tools/clang/scripts/package.sh b/tools/clang/scripts/package.sh
new file mode 100755
index 0000000..afa43bd
--- /dev/null
+++ b/tools/clang/scripts/package.sh
@@ -0,0 +1,183 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script will check out llvm and clang, and then package the results up
+# to a tgz file.
+
+gcc_toolchain=
+
+# Parse command line options.
+while [[ $# > 0 ]]; do
+ case $1 in
+ --gcc-toolchain)
+ shift
+ if [[ $# == 0 ]]; then
+ echo "--gcc-toolchain requires an argument."
+ exit 1
+ fi
+ if [[ -x "$1/bin/gcc" ]]; then
+ gcc_toolchain=$1
+ else
+ echo "Invalid --gcc-toolchain: '$1'."
+ echo "'$1/bin/gcc' does not appear to be valid."
+ exit 1
+ fi
+ ;;
+
+ --help)
+ echo "usage: $0 [--gcc-toolchain <prefix>]"
+ echo
+ echo "--gcc-toolchain: Set the prefix for which GCC version should"
+ echo " be used for building. For example, to use gcc in"
+ echo " /opt/foo/bin/gcc, use '--gcc-toolchain '/opt/foo"
+ echo
+ exit 1
+ ;;
+ *)
+ echo "Unknown argument: '$1'."
+ echo "Use --help for help."
+ exit 1
+ ;;
+ esac
+ shift
+done
+
+
+THIS_DIR="$(dirname "${0}")"
+LLVM_DIR="${THIS_DIR}/../../../third_party/llvm"
+LLVM_BOOTSTRAP_DIR="${THIS_DIR}/../../../third_party/llvm-bootstrap"
+LLVM_BOOTSTRAP_INSTALL_DIR="${LLVM_DIR}/../llvm-bootstrap-install"
+LLVM_BUILD_DIR="${THIS_DIR}/../../../third_party/llvm-build"
+LLVM_BIN_DIR="${LLVM_BUILD_DIR}/Release+Asserts/bin"
+LLVM_LIB_DIR="${LLVM_BUILD_DIR}/Release+Asserts/lib"
+
+echo "Diff in llvm:" | tee buildlog.txt
+svn stat "${LLVM_DIR}" 2>&1 | tee -a buildlog.txt
+svn diff "${LLVM_DIR}" 2>&1 | tee -a buildlog.txt
+echo "Diff in llvm/tools/clang:" | tee -a buildlog.txt
+svn stat "${LLVM_DIR}/tools/clang" 2>&1 | tee -a buildlog.txt
+svn diff "${LLVM_DIR}/tools/clang" 2>&1 | tee -a buildlog.txt
+echo "Diff in llvm/compiler-rt:" | tee -a buildlog.txt
+svn stat "${LLVM_DIR}/compiler-rt" 2>&1 | tee -a buildlog.txt
+svn diff "${LLVM_DIR}/compiler-rt" 2>&1 | tee -a buildlog.txt
+echo "Diff in llvm/projects/libcxx:" | tee -a buildlog.txt
+svn stat "${LLVM_DIR}/projects/libcxx" 2>&1 | tee -a buildlog.txt
+svn diff "${LLVM_DIR}/projects/libcxx" 2>&1 | tee -a buildlog.txt
+echo "Diff in llvm/projects/libcxxabi:" | tee -a buildlog.txt
+svn stat "${LLVM_DIR}/projects/libcxxabi" 2>&1 | tee -a buildlog.txt
+svn diff "${LLVM_DIR}/projects/libcxxabi" 2>&1 | tee -a buildlog.txt
+
+
+echo "Starting build" | tee -a buildlog.txt
+
+set -exu
+
+# Do a clobber build.
+rm -rf "${LLVM_BOOTSTRAP_DIR}"
+rm -rf "${LLVM_BOOTSTRAP_INSTALL_DIR}"
+rm -rf "${LLVM_BUILD_DIR}"
+extra_flags=
+if [[ -n "${gcc_toolchain}" ]]; then
+ extra_flags="--gcc-toolchain ${gcc_toolchain}"
+fi
+"${THIS_DIR}"/update.sh --bootstrap --force-local-build --run-tests \
+ ${extra_flags} 2>&1 | tee -a buildlog.txt
+
+R=$("${LLVM_BIN_DIR}/clang" --version | \
+ sed -ne 's/clang version .*(\([0-9]*\))/\1/p')
+
+PDIR=clang-$R
+rm -rf $PDIR
+mkdir $PDIR
+mkdir $PDIR/bin
+mkdir $PDIR/lib
+
+if [ "$(uname -s)" = "Darwin" ]; then
+ SO_EXT="dylib"
+else
+ SO_EXT="so"
+fi
+
+# Copy buildlog over.
+cp buildlog.txt $PDIR/
+
+# Copy clang into pdir, symlink clang++ to it.
+cp "${LLVM_BIN_DIR}/clang" $PDIR/bin/
+(cd $PDIR/bin && ln -sf clang clang++)
+cp "${LLVM_BIN_DIR}/llvm-symbolizer" $PDIR/bin/
+if [ "$(uname -s)" = "Darwin" ]; then
+ cp "${LLVM_BIN_DIR}/libc++.1.${SO_EXT}" $PDIR/bin/
+ (cd $PDIR/bin && ln -sf libc++.1.dylib libc++.dylib)
+fi
+
+# Copy libc++ headers.
+if [ "$(uname -s)" = "Darwin" ]; then
+ mkdir $PDIR/include
+ cp -R "${LLVM_BOOTSTRAP_INSTALL_DIR}/include/c++" $PDIR/include
+fi
+
+# Copy plugins. Some of the dylibs are pretty big, so copy only the ones we
+# care about.
+cp "${LLVM_LIB_DIR}/libFindBadConstructs.${SO_EXT}" $PDIR/lib
+
+BLINKGCPLUGIN_LIBNAME=\
+$(grep 'set(LIBRARYNAME' "$THIS_DIR"/../blink_gc_plugin/CMakeLists.txt \
+ | cut -d ' ' -f 2 | tr -d ')')
+cp "${LLVM_LIB_DIR}/lib${BLINKGCPLUGIN_LIBNAME}.${SO_EXT}" $PDIR/lib
+
+if [[ -n "${gcc_toolchain}" ]]; then
+ # Copy the stdlibc++.so.6 we linked Clang against so it can run.
+ cp "${LLVM_LIB_DIR}/libstdc++.so.6" $PDIR/lib
+fi
+
+# Copy built-in headers (lib/clang/3.x.y/include).
+# compiler-rt builds all kinds of libraries, but we want only some.
+if [ "$(uname -s)" = "Darwin" ]; then
+ # Keep only the OSX (ASan and profile) and iossim (ASan) runtime libraries:
+ # Release+Asserts/lib/clang/*/lib/darwin/libclang_rt.{asan,profile}_*
+ find "${LLVM_LIB_DIR}/clang" -type f -path '*lib/darwin*' \
+ ! -name '*asan_osx*' ! -name '*asan_iossim*' ! -name '*profile_osx*' | \
+ xargs rm
+ # Fix LC_ID_DYLIB for the ASan dynamic libraries to be relative to
+ # @executable_path.
+ # TODO(glider): this is transitional. We'll need to fix the dylib name
+ # either in our build system, or in Clang. See also http://crbug.com/344836.
+ ASAN_DYLIB_NAMES="libclang_rt.asan_osx_dynamic.dylib
+ libclang_rt.asan_iossim_dynamic.dylib"
+ for ASAN_DYLIB_NAME in $ASAN_DYLIB_NAMES
+ do
+ ASAN_DYLIB=$(find "${LLVM_LIB_DIR}/clang" \
+ -type f -path "*${ASAN_DYLIB_NAME}")
+ install_name_tool -id @executable_path/${ASAN_DYLIB_NAME} "${ASAN_DYLIB}"
+ strip -x "${ASAN_DYLIB}"
+ done
+else
+ # Keep only
+ # Release+Asserts/lib/clang/*/lib/linux/libclang_rt.{[atm]san,san,ubsan,profile}-*.a
+ # , but not dfsan.
+ find "${LLVM_LIB_DIR}/clang" -type f -path '*lib/linux*' \
+ ! -name '*[atm]san*' ! -name '*ubsan*' ! -name '*libclang_rt.san*' \
+ ! -name '*profile*' | xargs rm -v
+ # Strip the debug info from the runtime libraries.
+ find "${LLVM_LIB_DIR}/clang" -type f -path '*lib/linux*' ! -name '*.syms' | xargs strip -g
+fi
+
+cp -vR "${LLVM_LIB_DIR}/clang" $PDIR/lib
+
+if [ "$(uname -s)" = "Darwin" ]; then
+ tar zcf $PDIR.tgz -C $PDIR bin include lib buildlog.txt
+else
+ tar zcf $PDIR.tgz -C $PDIR bin lib buildlog.txt
+fi
+
+if [ "$(uname -s)" = "Darwin" ]; then
+ PLATFORM=Mac
+else
+ PLATFORM=Linux_x64
+fi
+
+echo To upload, run:
+echo gsutil cp -a public-read $PDIR.tgz \
+ gs://chromium-browser-clang/$PLATFORM/$PDIR.tgz
diff --git a/tools/clang/scripts/plugin_flags.sh b/tools/clang/scripts/plugin_flags.sh
new file mode 100755
index 0000000..41c25c8
--- /dev/null
+++ b/tools/clang/scripts/plugin_flags.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script returns the flags that should be used when GYP_DEFINES contains
+# clang_use_chrome_plugins. The flags are stored in a script so that they can
+# be changed on the bots without requiring a master restart.
+
+SRC_ABS_DIR=$(cd $(dirname $0)/../../.. && echo $PWD)
+CLANG_LIB_PATH=$SRC_ABS_DIR/third_party/llvm-build/Release+Asserts/lib
+
+if uname -s | grep -q Darwin; then
+ LIBSUFFIX=dylib
+else
+ LIBSUFFIX=so
+fi
+
+echo -Xclang -load -Xclang $CLANG_LIB_PATH/libFindBadConstructs.$LIBSUFFIX \
+ -Xclang -add-plugin -Xclang find-bad-constructs
diff --git a/tools/clang/scripts/posix-print-revision.py b/tools/clang/scripts/posix-print-revision.py
new file mode 100644
index 0000000..5ac5277
--- /dev/null
+++ b/tools/clang/scripts/posix-print-revision.py
@@ -0,0 +1,11 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+# GN only supports shelling to python. Until update.py is used on all
+# platforms (currently only Windows), wrap update.sh.
+sys.exit(os.system(os.path.join(os.path.dirname(__file__), 'update.sh') +
+ ' --print-revision'))
diff --git a/tools/clang/scripts/repackage.sh b/tools/clang/scripts/repackage.sh
new file mode 100755
index 0000000..c92447a
--- /dev/null
+++ b/tools/clang/scripts/repackage.sh
@@ -0,0 +1,67 @@
+#!/bin/bash
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script will check out llvm and clang, build a full package
+# with the latest plugin revisions and then repackage an existing
+# clang-package with the new plugin revisions.
+
+# The new package can be uploaded to replace the existing clang
+# package at the same clang revision.
+
+THIS_DIR="$(dirname "${0}")"
+LLVM_BUILD_DIR="${THIS_DIR}/../../../third_party/llvm-build"
+LLVM_TAR_DIR="${LLVM_BUILD_DIR}/Release+Asserts"
+LLVM_BIN_DIR="${LLVM_TAR_DIR}/bin"
+LLVM_LIB_DIR="${LLVM_TAR_DIR}/lib"
+
+set -eu
+
+if [ "$(uname -s)" = "Darwin" ]; then
+ PLATFORM=Mac
+ SO_EXT="dylib"
+else
+ PLATFORM=Linux_x64
+ SO_EXT="so"
+fi
+
+# Build clang with the new plugin revisions.
+"$THIS_DIR"/package.sh $@
+
+R=$("${LLVM_BIN_DIR}/clang" --version | \
+ sed -ne 's/clang version .*(\([0-9]*\))/\1/p')
+PDIR=clang-$R
+
+if [ ! -f "$PDIR.tgz" ]; then
+ echo "Could not find package archive $PDIR.tgz generated by package.sh"
+ exit 1
+fi
+
+# We don't want to change the clang binary, so fetch the current clang
+# package and add the plugin shared-libraries to the existing package.
+rm -rf $LLVM_BUILD_DIR
+"$THIS_DIR"/update.sh
+
+LIBNAME=\
+$(grep 'set(LIBRARYNAME' "$THIS_DIR"/../blink_gc_plugin/CMakeLists.txt \
+ | cut -d ' ' -f 2 | tr -d ')')
+LIBFILE=lib$LIBNAME.$SO_EXT
+
+# Check that we are actually creating the plugin at a new revision.
+if [ -f "$LLVM_LIB_DIR/$LIBFILE" ]; then
+ echo "The plugin revision $LIBNAME is already in the existing package."
+ exit 1
+fi
+
+cp $PDIR/lib/$LIBFILE "$LLVM_LIB_DIR/"
+if [ "$(uname -s)" = "Darwin" ]; then
+ tar zcf ${PDIR}_repack.tgz -C "$LLVM_TAR_DIR" bin include lib buildlog.txt
+else
+ tar zcf ${PDIR}_repack.tgz -C "$LLVM_TAR_DIR" bin lib buildlog.txt
+fi
+
+echo The clang package has been repackaged with $LIBNAME
+echo To upload, run:
+echo gsutil cp -a public-read ${PDIR}_repack.tgz \
+ gs://chromium-browser-clang/$PLATFORM/$PDIR.tgz
diff --git a/tools/clang/scripts/run_tool.py b/tools/clang/scripts/run_tool.py
new file mode 100755
index 0000000..33bb462
--- /dev/null
+++ b/tools/clang/scripts/run_tool.py
@@ -0,0 +1,308 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper script to help run clang tools across Chromium code.
+
+How to use this tool:
+If you want to run the tool across all Chromium code:
+run_tool.py <tool> <path/to/compiledb>
+
+If you only want to run the tool across just chrome/browser and content/browser:
+run_tool.py <tool> <path/to/compiledb> chrome/browser content/browser
+
+Please see https://code.google.com/p/chromium/wiki/ClangToolRefactoring for more
+information, which documents the entire automated refactoring flow in Chromium.
+
+Why use this tool:
+The clang tool implementation doesn't take advantage of multiple cores, and if
+it fails mysteriously in the middle, all the generated replacements will be
+lost.
+
+Unfortunately, if the work is simply sharded across multiple cores by running
+multiple RefactoringTools, problems arise when they attempt to rewrite a file at
+the same time. To work around that, clang tools that are run using this tool
+should output edits to stdout in the following format:
+
+==== BEGIN EDITS ====
+r:<file path>:<offset>:<length>:<replacement text>
+r:<file path>:<offset>:<length>:<replacement text>
+...etc...
+==== END EDITS ====
+
+Any generated edits are applied once the clang tool has finished running
+across Chromium, regardless of whether some instances failed or not.
+"""
+
+import collections
+import functools
+import multiprocessing
+import os.path
+import pipes
+import subprocess
+import sys
+
+
+Edit = collections.namedtuple(
+ 'Edit', ('edit_type', 'offset', 'length', 'replacement'))
+
+
+def _GetFilesFromGit(paths = None):
+ """Gets the list of files in the git repository.
+
+ Args:
+ paths: Prefix filter for the returned paths. May contain multiple entries.
+ """
+ args = ['git', 'ls-files']
+ if paths:
+ args.extend(paths)
+ command = subprocess.Popen(args, stdout=subprocess.PIPE)
+ output, _ = command.communicate()
+ return output.splitlines()
+
+
+def _ExtractEditsFromStdout(build_directory, stdout):
+ """Extracts generated list of edits from the tool's stdout.
+
+ The expected format is documented at the top of this file.
+
+ Args:
+ build_directory: Directory that contains the compile database. Used to
+ normalize the filenames.
+ stdout: The stdout from running the clang tool.
+
+ Returns:
+ A dictionary mapping filenames to the associated edits.
+ """
+ lines = stdout.splitlines()
+ start_index = lines.index('==== BEGIN EDITS ====')
+ end_index = lines.index('==== END EDITS ====')
+ edits = collections.defaultdict(list)
+ for line in lines[start_index + 1:end_index]:
+ try:
+ edit_type, path, offset, length, replacement = line.split(':', 4)
+ replacement = replacement.replace("\0", "\n");
+ # Normalize the file path emitted by the clang tool to be relative to the
+ # current working directory.
+ path = os.path.relpath(os.path.join(build_directory, path))
+ edits[path].append(Edit(edit_type, int(offset), int(length), replacement))
+ except ValueError:
+ print 'Unable to parse edit: %s' % line
+ return edits
+
+
+def _ExecuteTool(toolname, build_directory, filename):
+ """Executes the tool.
+
+ This is defined outside the class so it can be pickled for the multiprocessing
+ module.
+
+ Args:
+ toolname: Path to the tool to execute.
+ build_directory: Directory that contains the compile database.
+ filename: The file to run the tool over.
+
+ Returns:
+ A dictionary that must contain the key "status" and a boolean value
+ associated with it.
+
+ If status is True, then the generated edits are stored with the key "edits"
+ in the dictionary.
+
+ Otherwise, the filename and the output from stderr are associated with the
+ keys "filename" and "stderr" respectively.
+ """
+ command = subprocess.Popen((toolname, '-p', build_directory, filename),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = command.communicate()
+ if command.returncode != 0:
+ return {'status': False, 'filename': filename, 'stderr': stderr}
+ else:
+ return {'status': True,
+ 'edits': _ExtractEditsFromStdout(build_directory, stdout)}
+
+
+class _CompilerDispatcher(object):
+ """Multiprocessing controller for running clang tools in parallel."""
+
+ def __init__(self, toolname, build_directory, filenames):
+ """Initializer method.
+
+ Args:
+ toolname: Path to the tool to execute.
+ build_directory: Directory that contains the compile database.
+ filenames: The files to run the tool over.
+ """
+ self.__toolname = toolname
+ self.__build_directory = build_directory
+ self.__filenames = filenames
+ self.__success_count = 0
+ self.__failed_count = 0
+ self.__edits = collections.defaultdict(list)
+
+ @property
+ def edits(self):
+ return self.__edits
+
+ @property
+ def failed_count(self):
+ return self.__failed_count
+
+ def Run(self):
+ """Does the grunt work."""
+ pool = multiprocessing.Pool()
+ result_iterator = pool.imap_unordered(
+ functools.partial(_ExecuteTool, self.__toolname,
+ self.__build_directory),
+ self.__filenames)
+ for result in result_iterator:
+ self.__ProcessResult(result)
+ sys.stdout.write('\n')
+ sys.stdout.flush()
+
+ def __ProcessResult(self, result):
+ """Handles result processing.
+
+ Args:
+ result: The result dictionary returned by _ExecuteTool.
+ """
+ if result['status']:
+ self.__success_count += 1
+ for k, v in result['edits'].iteritems():
+ self.__edits[k].extend(v)
+ else:
+ self.__failed_count += 1
+ sys.stdout.write('\nFailed to process %s\n' % result['filename'])
+ sys.stdout.write(result['stderr'])
+ sys.stdout.write('\n')
+ percentage = (
+ float(self.__success_count + self.__failed_count) /
+ len(self.__filenames)) * 100
+ sys.stdout.write('Succeeded: %d, Failed: %d [%.2f%%]\r' % (
+ self.__success_count, self.__failed_count, percentage))
+ sys.stdout.flush()
+
+
+def _ApplyEdits(edits, clang_format_diff_path):
+ """Apply the generated edits.
+
+ Args:
+ edits: A dict mapping filenames to Edit instances that apply to that file.
+ clang_format_diff_path: Path to the clang-format-diff.py helper to help
+ automatically reformat diffs to avoid style violations. Pass None if the
+ clang-format step should be skipped.
+ """
+ edit_count = 0
+ for k, v in edits.iteritems():
+ # Sort the edits and iterate through them in reverse order. Sorting allows
+ # duplicate edits to be quickly skipped, while reversing means that
+ # subsequent edits don't need to have their offsets updated with each edit
+ # applied.
+ v.sort()
+ last_edit = None
+ with open(k, 'rb+') as f:
+ contents = bytearray(f.read())
+ for edit in reversed(v):
+ if edit == last_edit:
+ continue
+ last_edit = edit
+ contents[edit.offset:edit.offset + edit.length] = edit.replacement
+ if not edit.replacement:
+ _ExtendDeletionIfElementIsInList(contents, edit.offset)
+ edit_count += 1
+ f.seek(0)
+ f.truncate()
+ f.write(contents)
+ if clang_format_diff_path:
+ # TODO(dcheng): python3.3 exposes this publicly as shlex.quote, but Chrome
+ # uses python2.7. Use the deprecated interface until Chrome uses a newer
+ # Python.
+ if subprocess.call('git diff -U0 %s | python %s -i -p1 -style=file ' % (
+ pipes.quote(k), clang_format_diff_path), shell=True) != 0:
+ print 'clang-format failed for %s' % k
+ print 'Applied %d edits to %d files' % (edit_count, len(edits))
+
+
+_WHITESPACE_BYTES = frozenset((ord('\t'), ord('\n'), ord('\r'), ord(' ')))
+
+
+def _ExtendDeletionIfElementIsInList(contents, offset):
+ """Extends the range of a deletion if the deleted element was part of a list.
+
+ This rewriter helper makes it easy for refactoring tools to remove elements
+ from a list. Even if a matcher callback knows that it is removing an element
+ from a list, it may not have enough information to accurately remove the list
+ element; for example, another matcher callback may end up removing an adjacent
+ list element, or all the list elements may end up being removed.
+
+ With this helper, refactoring tools can simply remove the list element and not
+ worry about having to include the comma in the replacement.
+
+ Args:
+ contents: A bytearray with the deletion already applied.
+ offset: The offset in the bytearray where the deleted range used to be.
+ """
+ char_before = char_after = None
+ left_trim_count = 0
+ for byte in reversed(contents[:offset]):
+ left_trim_count += 1
+ if byte in _WHITESPACE_BYTES:
+ continue
+ if byte in (ord(','), ord(':'), ord('('), ord('{')):
+ char_before = chr(byte)
+ break
+
+ right_trim_count = 0
+ for byte in contents[offset:]:
+ right_trim_count += 1
+ if byte in _WHITESPACE_BYTES:
+ continue
+ if byte == ord(','):
+ char_after = chr(byte)
+ break
+
+ if char_before:
+ if char_after:
+ del contents[offset:offset + right_trim_count]
+ elif char_before in (',', ':'):
+ del contents[offset - left_trim_count:offset]
+
+
+def main(argv):
+ if len(argv) < 2:
+ print 'Usage: run_tool.py <clang tool> <compile DB> <path 1> <path 2> ...'
+ print ' <clang tool> is the clang tool that should be run.'
+ print ' <compile db> is the directory that contains the compile database'
+ print ' <path 1> <path2> ... can be used to filter what files are edited'
+ return 1
+
+ clang_format_diff_path = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ '../../../third_party/llvm/tools/clang/tools/clang-format',
+ 'clang-format-diff.py')
+ # TODO(dcheng): Allow this to be controlled with a flag as well.
+ if not os.path.isfile(clang_format_diff_path):
+ clang_format_diff_path = None
+
+ filenames = frozenset(_GetFilesFromGit(argv[2:]))
+ # Filter out files that aren't C/C++/Obj-C/Obj-C++.
+ extensions = frozenset(('.c', '.cc', '.m', '.mm'))
+ dispatcher = _CompilerDispatcher(argv[0], argv[1],
+ [f for f in filenames
+ if os.path.splitext(f)[1] in extensions])
+ dispatcher.Run()
+ # Filter out edits to files that aren't in the git repository, since it's not
+ # useful to modify files that aren't under source control--typically, these
+ # are generated files or files in a git submodule that's not part of Chromium.
+ _ApplyEdits({k : v for k, v in dispatcher.edits.iteritems()
+ if k in filenames},
+ clang_format_diff_path)
+ if dispatcher.failed_count != 0:
+ return 2
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/clang/scripts/test_tool.py b/tools/clang/scripts/test_tool.py
new file mode 100755
index 0000000..d14dfda
--- /dev/null
+++ b/tools/clang/scripts/test_tool.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test harness for chromium clang tools."""
+
+import difflib
+import glob
+import json
+import os
+import os.path
+import subprocess
+import shutil
+import sys
+
+
+def _GenerateCompileCommands(files, include_paths):
+ """Returns a JSON string containing a compilation database for the input."""
+ include_path_flags = ' '.join('-I %s' % include_path
+ for include_path in include_paths)
+ return json.dumps([{'directory': '.',
+ 'command': 'clang++ -fsyntax-only %s -c %s' % (
+ include_path_flags, f),
+ 'file': f} for f in files], indent=2)
+
+
+def _NumberOfTestsToString(tests):
+ """Returns an English describing the number of tests."""
+ return "%d test%s" % (tests, 's' if tests != 1 else '')
+
+
+def main(argv):
+ if len(argv) < 1:
+ print 'Usage: test_tool.py <clang tool>'
+ print ' <clang tool> is the clang tool to be tested.'
+ sys.exit(1)
+
+ tool_to_test = argv[0]
+ tools_clang_scripts_directory = os.path.dirname(os.path.realpath(__file__))
+ tools_clang_directory = os.path.dirname(tools_clang_scripts_directory)
+ test_directory_for_tool = os.path.join(
+ tools_clang_directory, tool_to_test, 'tests')
+ compile_database = os.path.join(test_directory_for_tool,
+ 'compile_commands.json')
+ source_files = glob.glob(os.path.join(test_directory_for_tool,
+ '*-original.cc'))
+ actual_files = ['-'.join([source_file.rsplit('-', 1)[0], 'actual.cc'])
+ for source_file in source_files]
+ expected_files = ['-'.join([source_file.rsplit('-', 1)[0], 'expected.cc'])
+ for source_file in source_files]
+ include_paths = []
+ include_paths.append(
+ os.path.realpath(os.path.join(tools_clang_directory, '../..')))
+ # Many gtest headers expect to have testing/gtest/include in the include
+ # search path.
+ include_paths.append(
+ os.path.realpath(os.path.join(tools_clang_directory,
+ '../..',
+ 'testing/gtest/include')))
+
+ try:
+ # Set up the test environment.
+ for source, actual in zip(source_files, actual_files):
+ shutil.copyfile(source, actual)
+ # Stage the test files in the git index. If they aren't staged, then
+ # run_tools.py will skip them when applying replacements.
+ args = ['git', 'add']
+ args.extend(actual_files)
+ subprocess.check_call(args)
+ # Generate a temporary compilation database to run the tool over.
+ with open(compile_database, 'w') as f:
+ f.write(_GenerateCompileCommands(actual_files, include_paths))
+
+ args = ['python',
+ os.path.join(tools_clang_scripts_directory, 'run_tool.py'),
+ tool_to_test,
+ test_directory_for_tool]
+ args.extend(actual_files)
+ run_tool = subprocess.Popen(args, stdout=subprocess.PIPE)
+ stdout, _ = run_tool.communicate()
+ if run_tool.returncode != 0:
+ print 'run_tool failed:\n%s' % stdout
+ sys.exit(1)
+
+ passed = 0
+ failed = 0
+ for expected, actual in zip(expected_files, actual_files):
+ print '[ RUN ] %s' % os.path.relpath(actual)
+ expected_output = actual_output = None
+ with open(expected, 'r') as f:
+ expected_output = f.readlines()
+ with open(actual, 'r') as f:
+ actual_output = f.readlines()
+ if actual_output != expected_output:
+ failed += 1
+ for line in difflib.unified_diff(expected_output, actual_output,
+ fromfile=os.path.relpath(expected),
+ tofile=os.path.relpath(actual)):
+ sys.stdout.write(line)
+ print '[ FAILED ] %s' % os.path.relpath(actual)
+ # Don't clean up the file on failure, so the results can be referenced
+ # more easily.
+ continue
+ print '[ OK ] %s' % os.path.relpath(actual)
+ passed += 1
+ os.remove(actual)
+
+ if failed == 0:
+ os.remove(compile_database)
+
+ print '[==========] %s ran.' % _NumberOfTestsToString(len(source_files))
+ if passed > 0:
+ print '[ PASSED ] %s.' % _NumberOfTestsToString(passed)
+ if failed > 0:
+ print '[ FAILED ] %s.' % _NumberOfTestsToString(failed)
+ finally:
+ # No matter what, unstage the git changes we made earlier to avoid polluting
+ # the index.
+ args = ['git', 'reset', '--quiet', 'HEAD']
+ args.extend(actual_files)
+ subprocess.call(args)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/clang/scripts/update.py b/tools/clang/scripts/update.py
new file mode 100755
index 0000000..8ae4dea
--- /dev/null
+++ b/tools/clang/scripts/update.py
@@ -0,0 +1,256 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Windows can't run .sh files, so this is a Python implementation of
+update.sh. This script should replace update.sh on all platforms eventually."""
+
+import os
+import re
+import shutil
+import subprocess
+import sys
+
+# Do NOT CHANGE this if you don't know what you're doing -- see
+# https://code.google.com/p/chromium/wiki/UpdatingClang
+# Reverting problematic clang rolls is safe, though.
+# Note: this revision is only used for Windows. Other platforms use update.sh.
+LLVM_WIN_REVISION = 'HEAD'
+
+# ASan on Windows is useful enough to use it even while the clang/win is still
+# in bringup. Use a pinned revision to make it slightly more stable.
+if (re.search(r'\b(asan)=1', os.environ.get('GYP_DEFINES', '')) and
+ not 'LLVM_FORCE_HEAD_REVISION' in os.environ):
+ LLVM_WIN_REVISION = '217738'
+
+# Path constants. (All of these should be absolute paths.)
+THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+CHROMIUM_DIR = os.path.abspath(os.path.join(THIS_DIR, '..', '..', '..'))
+LLVM_DIR = os.path.join(CHROMIUM_DIR, 'third_party', 'llvm')
+LLVM_BUILD_DIR = os.path.join(CHROMIUM_DIR, 'third_party', 'llvm-build',
+ 'Release+Asserts')
+COMPILER_RT_BUILD_DIR = os.path.join(LLVM_BUILD_DIR, '32bit-compiler-rt')
+CLANG_DIR = os.path.join(LLVM_DIR, 'tools', 'clang')
+COMPILER_RT_DIR = os.path.join(LLVM_DIR, 'projects', 'compiler-rt')
+STAMP_FILE = os.path.join(LLVM_BUILD_DIR, 'cr_build_revision')
+
+LLVM_REPO_URL='https://llvm.org/svn/llvm-project'
+if 'LLVM_REPO_URL' in os.environ:
+ LLVM_REPO_URL = os.environ['LLVM_REPO_URL']
+
+
+def ReadStampFile():
+ """Return the contents of the stamp file, or '' if it doesn't exist."""
+ try:
+ with open(STAMP_FILE, 'r') as f:
+ return f.read();
+ except IOError:
+ return ''
+
+
+def WriteStampFile(s):
+ """Write s to the stamp file."""
+ if not os.path.exists(LLVM_BUILD_DIR):
+ os.makedirs(LLVM_BUILD_DIR)
+ with open(STAMP_FILE, 'w') as f:
+ f.write(s)
+
+
+def DeleteFiles(dir, pattern):
+ """Delete all files in dir matching pattern."""
+ n = 0
+ regex = re.compile(r'^' + pattern + r'$')
+ for root, _, files in os.walk(dir):
+ for f in files:
+ if regex.match(f):
+ os.remove(os.path.join(root, f))
+ n += 1
+ return n
+
+
+def ClobberChromiumBuildFiles():
+ """Clobber Chomium build files."""
+ print 'Clobbering Chromium build files...'
+ out_dir = os.path.join(CHROMIUM_DIR, 'out')
+ if os.path.isdir(out_dir):
+ shutil.rmtree(out_dir)
+ print 'Removed Chromium out dir: %s.' % (out_dir)
+
+
+def RunCommand(command, tries=1):
+ """Run a command, possibly with multiple retries."""
+ for i in range(0, tries):
+ print 'Running %s (try #%d)' % (str(command), i + 1)
+ if subprocess.call(command, shell=True) == 0:
+ return
+ print 'Failed.'
+ sys.exit(1)
+
+
+def CopyFile(src, dst):
+ """Copy a file from src to dst."""
+ shutil.copy(src, dst)
+ print "Copying %s to %s" % (src, dst)
+
+
+def CopyDirectoryContents(src, dst, filename_filter=None):
+ """Copy the files from directory src to dst
+ with an optional filename filter."""
+ if not os.path.exists(dst):
+ os.makedirs(dst)
+ for root, _, files in os.walk(src):
+ for f in files:
+ if filename_filter and not re.match(filename_filter, f):
+ continue
+ CopyFile(os.path.join(root, f), dst)
+
+
+def Checkout(name, url, dir):
+ """Checkout the SVN module at url into dir. Use name for the log message."""
+ print "Checking out %s r%s into '%s'" % (name, LLVM_WIN_REVISION, dir)
+ RunCommand(['svn', 'checkout', '--force',
+ url + '@' + LLVM_WIN_REVISION, dir], tries=2)
+
+
+def AddCMakeToPath():
+ """Look for CMake and add it to PATH if it's not there already."""
+ try:
+ # First check if cmake is already on PATH.
+ subprocess.call(['cmake', '--version'])
+ return
+ except OSError as e:
+ if e.errno != os.errno.ENOENT:
+ raise
+
+ cmake_locations = ['C:\\Program Files (x86)\\CMake\\bin',
+ 'C:\\Program Files (x86)\\CMake 2.8\\bin']
+ for d in cmake_locations:
+ if os.path.isdir(d):
+ os.environ['PATH'] = os.environ.get('PATH', '') + os.pathsep + d
+ return
+ print 'Failed to find CMake!'
+ sys.exit(1)
+
+
+vs_version = None
+def GetVSVersion():
+ global vs_version
+ if vs_version:
+ return vs_version
+
+ # Try using the toolchain in depot_tools.
+ # This sets environment variables used by SelectVisualStudioVersion below.
+ sys.path.append(os.path.join(CHROMIUM_DIR, 'build'))
+ import vs_toolchain
+ vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
+
+ # Use gyp to find the MSVS installation, either in depot_tools as per above,
+ # or a system-wide installation otherwise.
+ sys.path.append(os.path.join(CHROMIUM_DIR, 'tools', 'gyp', 'pylib'))
+ import gyp.MSVSVersion
+ vs_version = gyp.MSVSVersion.SelectVisualStudioVersion('2013')
+ return vs_version
+
+
+def UpdateClang():
+ print 'Updating Clang to %s...' % (LLVM_WIN_REVISION)
+ if LLVM_WIN_REVISION != 'HEAD' and ReadStampFile() == LLVM_WIN_REVISION:
+ print 'Already up to date.'
+ return 0
+
+ AddCMakeToPath()
+ ClobberChromiumBuildFiles()
+
+ # Reset the stamp file in case the build is unsuccessful.
+ WriteStampFile('')
+
+ Checkout('LLVM', LLVM_REPO_URL + '/llvm/trunk', LLVM_DIR)
+ Checkout('Clang', LLVM_REPO_URL + '/cfe/trunk', CLANG_DIR)
+ Checkout('compiler-rt', LLVM_REPO_URL + '/compiler-rt/trunk', COMPILER_RT_DIR)
+
+ if not os.path.exists(LLVM_BUILD_DIR):
+ os.makedirs(LLVM_BUILD_DIR)
+ os.chdir(LLVM_BUILD_DIR)
+
+ RunCommand(GetVSVersion().SetupScript('x64') +
+ ['&&', 'cmake', '-GNinja', '-DCMAKE_BUILD_TYPE=Release',
+ '-DLLVM_ENABLE_ASSERTIONS=ON', LLVM_DIR])
+ RunCommand(GetVSVersion().SetupScript('x64') + ['&&', 'ninja', 'all'])
+
+ # Do an x86 build of compiler-rt to get the 32-bit ASan run-time.
+ # TODO(hans): Remove once the regular build above produces this.
+ if not os.path.exists(COMPILER_RT_BUILD_DIR):
+ os.makedirs(COMPILER_RT_BUILD_DIR)
+ os.chdir(COMPILER_RT_BUILD_DIR)
+ RunCommand(GetVSVersion().SetupScript('x86') +
+ ['&&', 'cmake', '-GNinja', '-DCMAKE_BUILD_TYPE=Release',
+ '-DLLVM_ENABLE_ASSERTIONS=ON', LLVM_DIR])
+ RunCommand(GetVSVersion().SetupScript('x86') + ['&&', 'ninja', 'compiler-rt'])
+
+ asan_rt_bin_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'bin')
+ asan_rt_bin_dst_dir = os.path.join(LLVM_BUILD_DIR, 'bin')
+ CopyDirectoryContents(asan_rt_bin_src_dir, asan_rt_bin_dst_dir,
+ r'^.*-i386\.dll$')
+
+ # TODO(hans): Make this (and the .gypi file) version number independent.
+ asan_rt_lib_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'lib', 'clang',
+ '3.6.0', 'lib', 'windows')
+ asan_rt_lib_dst_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang',
+ '3.6.0', 'lib', 'windows')
+ CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir,
+ r'^.*-i386\.lib$')
+
+ CopyFile(os.path.join(asan_rt_lib_src_dir, '..', '..', 'asan_blacklist.txt'),
+ os.path.join(asan_rt_lib_dst_dir, '..', '..'))
+
+ # Make an extra copy of the sanitizer headers, to be put on the include path
+ # of the fallback compiler.
+ sanitizer_include_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang', '3.6.0',
+ 'include', 'sanitizer')
+ aux_sanitizer_include_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang',
+ '3.6.0', 'include_sanitizer',
+ 'sanitizer')
+ if not os.path.exists(aux_sanitizer_include_dir):
+ os.makedirs(aux_sanitizer_include_dir)
+ for _, _, files in os.walk(sanitizer_include_dir):
+ for f in files:
+ CopyFile(os.path.join(sanitizer_include_dir, f),
+ aux_sanitizer_include_dir)
+
+ WriteStampFile(LLVM_WIN_REVISION)
+ print 'Clang update was successful.'
+ return 0
+
+
+def main():
+ if not sys.platform in ['win32', 'cygwin']:
+ # For non-Windows, fall back to update.sh.
+ # TODO(hans): Make update.py replace update.sh completely.
+
+ # This script is called by gclient. gclient opens its hooks subprocesses
+ # with (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does
+ # custom output processing that breaks printing '\r' characters for
+ # single-line updating status messages as printed by curl and wget.
+ # Work around this by setting stderr of the update.sh process to stdin (!):
+ # gclient doesn't redirect stdin, and while stdin itself is read-only, a
+ # dup()ed sys.stdin is writable, try
+ # fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
+ # TODO: Fix gclient instead, http://crbug.com/95350
+ return subprocess.call(
+ [os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
+ stderr=os.fdopen(os.dup(sys.stdin.fileno())))
+
+ if not re.search(r'\b(clang|asan)=1', os.environ.get('GYP_DEFINES', '')):
+ print 'Skipping Clang update (clang=1 was not set in GYP_DEFINES).'
+ return 0
+
+ if re.search(r'\b(make_clang_dir)=', os.environ.get('GYP_DEFINES', '')):
+ print 'Skipping Clang update (make_clang_dir= was set in GYP_DEFINES).'
+ return 0
+
+ return UpdateClang()
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/clang/scripts/update.sh b/tools/clang/scripts/update.sh
new file mode 100755
index 0000000..eabed4b
--- /dev/null
+++ b/tools/clang/scripts/update.sh
@@ -0,0 +1,564 @@
+#!/usr/bin/env bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script will check out llvm and clang into third_party/llvm and build it.
+
+# Do NOT CHANGE this if you don't know what you're doing -- see
+# https://code.google.com/p/chromium/wiki/UpdatingClang
+# Reverting problematic clang rolls is safe, though.
+CLANG_REVISION=217949
+
+THIS_DIR="$(dirname "${0}")"
+LLVM_DIR="${THIS_DIR}/../../../third_party/llvm"
+LLVM_BUILD_DIR="${LLVM_DIR}/../llvm-build/Release+Asserts"
+COMPILER_RT_BUILD_DIR="${LLVM_DIR}/../llvm-build/compiler-rt"
+LLVM_BOOTSTRAP_DIR="${LLVM_DIR}/../llvm-bootstrap"
+LLVM_BOOTSTRAP_INSTALL_DIR="${LLVM_DIR}/../llvm-bootstrap-install"
+CLANG_DIR="${LLVM_DIR}/tools/clang"
+COMPILER_RT_DIR="${LLVM_DIR}/compiler-rt"
+LIBCXX_DIR="${LLVM_DIR}/projects/libcxx"
+LIBCXXABI_DIR="${LLVM_DIR}/projects/libcxxabi"
+ANDROID_NDK_DIR="${THIS_DIR}/../../../third_party/android_tools/ndk"
+STAMP_FILE="${LLVM_DIR}/../llvm-build/cr_build_revision"
+
+ABS_LIBCXX_DIR="${PWD}/${LIBCXX_DIR}"
+ABS_LIBCXXABI_DIR="${PWD}/${LIBCXXABI_DIR}"
+ABS_LLVM_DIR="${PWD}/${LLVM_DIR}"
+ABS_LLVM_BUILD_DIR="${PWD}/${LLVM_BUILD_DIR}"
+ABS_COMPILER_RT_DIR="${PWD}/${COMPILER_RT_DIR}"
+
+
+# Use both the clang revision and the plugin revisions to test for updates.
+BLINKGCPLUGIN_REVISION=\
+$(grep 'set(LIBRARYNAME' "$THIS_DIR"/../blink_gc_plugin/CMakeLists.txt \
+ | cut -d ' ' -f 2 | tr -cd '[0-9]')
+CLANG_AND_PLUGINS_REVISION="${CLANG_REVISION}-${BLINKGCPLUGIN_REVISION}"
+
+# ${A:-a} returns $A if it's set, a else.
+LLVM_REPO_URL=${LLVM_URL:-https://llvm.org/svn/llvm-project}
+
+if [[ -z "$GYP_DEFINES" ]]; then
+ GYP_DEFINES=
+fi
+if [[ -z "$GYP_GENERATORS" ]]; then
+ GYP_GENERATORS=
+fi
+
+
+# Die if any command dies, error on undefined variable expansions.
+set -eu
+
+OS="$(uname -s)"
+
+# Parse command line options.
+if_needed=
+force_local_build=
+run_tests=
+bootstrap=
+with_android=yes
+chrome_tools="plugins;blink_gc_plugin"
+gcc_toolchain=
+
+if [[ "${OS}" = "Darwin" ]]; then
+ with_android=
+fi
+
+while [[ $# > 0 ]]; do
+ case $1 in
+ --bootstrap)
+ bootstrap=yes
+ ;;
+ --if-needed)
+ if_needed=yes
+ ;;
+ --force-local-build)
+ force_local_build=yes
+ ;;
+ --print-revision)
+ echo $CLANG_REVISION
+ exit 0
+ ;;
+ --run-tests)
+ run_tests=yes
+ ;;
+ --without-android)
+ with_android=
+ ;;
+ --with-chrome-tools)
+ shift
+ if [[ $# == 0 ]]; then
+ echo "--with-chrome-tools requires an argument."
+ exit 1
+ fi
+ chrome_tools=$1
+ ;;
+ --gcc-toolchain)
+ shift
+ if [[ $# == 0 ]]; then
+ echo "--gcc-toolchain requires an argument."
+ exit 1
+ fi
+ if [[ -x "$1/bin/gcc" ]]; then
+ gcc_toolchain=$1
+ else
+ echo "Invalid --gcc-toolchain: '$1'."
+ echo "'$1/bin/gcc' does not appear to be valid."
+ exit 1
+ fi
+ ;;
+
+ --help)
+ echo "usage: $0 [--force-local-build] [--if-needed] [--run-tests] "
+ echo "--bootstrap: First build clang with CC, then with itself."
+ echo "--force-local-build: Don't try to download prebuilt binaries."
+ echo "--if-needed: Download clang only if the script thinks it is needed."
+ echo "--run-tests: Run tests after building. Only for local builds."
+ echo "--print-revision: Print current clang revision and exit."
+ echo "--without-android: Don't build ASan Android runtime library."
+ echo "--with-chrome-tools: Select which chrome tools to build." \
+ "Defaults to plugins;blink_gc_plugin."
+ echo " Example: --with-chrome-tools plugins;empty-string"
+ echo "--gcc-toolchain: Set the prefix for which GCC version should"
+ echo " be used for building. For example, to use gcc in"
+ echo " /opt/foo/bin/gcc, use '--gcc-toolchain '/opt/foo"
+ echo
+ exit 1
+ ;;
+ *)
+ echo "Unknown argument: '$1'."
+ echo "Use --help for help."
+ exit 1
+ ;;
+ esac
+ shift
+done
+
+if [[ -n "$if_needed" ]]; then
+ if [[ "${OS}" == "Darwin" ]]; then
+ # clang is used on Mac.
+ true
+ elif [[ "$GYP_DEFINES" =~ .*(clang|tsan|asan|lsan|msan)=1.* ]]; then
+ # clang requested via $GYP_DEFINES.
+ true
+ elif [[ -d "${LLVM_BUILD_DIR}" ]]; then
+ # clang previously downloaded, remove third_party/llvm-build to prevent
+ # updating.
+ true
+ elif [[ "${OS}" == "Linux" ]]; then
+ # Temporarily use clang on linux. Leave a stamp file behind, so that
+ # this script can remove clang again on machines where it was autoinstalled.
+ mkdir -p "${LLVM_BUILD_DIR}"
+ touch "${LLVM_BUILD_DIR}/autoinstall_stamp"
+ true
+ else
+ # clang wasn't needed, not doing anything.
+ exit 0
+ fi
+fi
+
+
+# Check if there's anything to be done, exit early if not.
+if [[ -f "${STAMP_FILE}" ]]; then
+ PREVIOUSLY_BUILT_REVISON=$(cat "${STAMP_FILE}")
+ if [[ -z "$force_local_build" ]] && \
+ [[ "${PREVIOUSLY_BUILT_REVISON}" = \
+ "${CLANG_AND_PLUGINS_REVISION}" ]]; then
+ echo "Clang already at ${CLANG_AND_PLUGINS_REVISION}"
+ exit 0
+ fi
+fi
+# To always force a new build if someone interrupts their build half way.
+rm -f "${STAMP_FILE}"
+
+
+if [[ -z "$force_local_build" ]]; then
+ # Check if there's a prebuilt binary and if so just fetch that. That's faster,
+ # and goma relies on having matching binary hashes on client and server too.
+ CDS_URL=https://commondatastorage.googleapis.com/chromium-browser-clang
+ CDS_FILE="clang-${CLANG_REVISION}.tgz"
+ CDS_OUT_DIR=$(mktemp -d -t clang_download.XXXXXX)
+ CDS_OUTPUT="${CDS_OUT_DIR}/${CDS_FILE}"
+ if [ "${OS}" = "Linux" ]; then
+ CDS_FULL_URL="${CDS_URL}/Linux_x64/${CDS_FILE}"
+ elif [ "${OS}" = "Darwin" ]; then
+ CDS_FULL_URL="${CDS_URL}/Mac/${CDS_FILE}"
+ fi
+ echo Trying to download prebuilt clang
+ if which curl > /dev/null; then
+ curl -L --fail "${CDS_FULL_URL}" -o "${CDS_OUTPUT}" || \
+ rm -rf "${CDS_OUT_DIR}"
+ elif which wget > /dev/null; then
+ wget "${CDS_FULL_URL}" -O "${CDS_OUTPUT}" || rm -rf "${CDS_OUT_DIR}"
+ else
+ echo "Neither curl nor wget found. Please install one of these."
+ exit 1
+ fi
+ if [ -f "${CDS_OUTPUT}" ]; then
+ rm -rf "${LLVM_BUILD_DIR}"
+ mkdir -p "${LLVM_BUILD_DIR}"
+ tar -xzf "${CDS_OUTPUT}" -C "${LLVM_BUILD_DIR}"
+ echo clang "${CLANG_REVISION}" unpacked
+ echo "${CLANG_AND_PLUGINS_REVISION}" > "${STAMP_FILE}"
+ rm -rf "${CDS_OUT_DIR}"
+ exit 0
+ else
+ echo Did not find prebuilt clang at r"${CLANG_REVISION}", building
+ fi
+fi
+
+if [[ -n "${with_android}" ]] && ! [[ -d "${ANDROID_NDK_DIR}" ]]; then
+ echo "Android NDK not found at ${ANDROID_NDK_DIR}"
+ echo "The Android NDK is needed to build a Clang whose -fsanitize=address"
+ echo "works on Android. See "
+ echo "http://code.google.com/p/chromium/wiki/AndroidBuildInstructions for how"
+ echo "to install the NDK, or pass --without-android."
+ exit 1
+fi
+
+# Check that cmake and ninja are available.
+if ! which cmake > /dev/null; then
+ echo "CMake needed to build clang; please install"
+ exit 1
+fi
+if ! which ninja > /dev/null; then
+ echo "ninja needed to build clang, please install"
+ exit 1
+fi
+
+echo Reverting previously patched files
+for i in \
+ "${CLANG_DIR}/test/Index/crash-recovery-modules.m" \
+ "${CLANG_DIR}/unittests/libclang/LibclangTest.cpp" \
+ "${COMPILER_RT_DIR}/lib/asan/asan_rtl.cc" \
+ "${COMPILER_RT_DIR}/test/asan/TestCases/Linux/new_array_cookie_test.cc" \
+ ; do
+ if [[ -e "${i}" ]]; then
+ svn revert "${i}"
+ fi;
+done
+
+echo Getting LLVM r"${CLANG_REVISION}" in "${LLVM_DIR}"
+if ! svn co --force "${LLVM_REPO_URL}/llvm/trunk@${CLANG_REVISION}" \
+ "${LLVM_DIR}"; then
+ echo Checkout failed, retrying
+ rm -rf "${LLVM_DIR}"
+ svn co --force "${LLVM_REPO_URL}/llvm/trunk@${CLANG_REVISION}" "${LLVM_DIR}"
+fi
+
+echo Getting clang r"${CLANG_REVISION}" in "${CLANG_DIR}"
+svn co --force "${LLVM_REPO_URL}/cfe/trunk@${CLANG_REVISION}" "${CLANG_DIR}"
+
+# We have moved from building compiler-rt in the LLVM tree, to a separate
+# directory. Nuke any previous checkout to avoid building it.
+rm -rf "${LLVM_DIR}/projects/compiler-rt"
+
+echo Getting compiler-rt r"${CLANG_REVISION}" in "${COMPILER_RT_DIR}"
+svn co --force "${LLVM_REPO_URL}/compiler-rt/trunk@${CLANG_REVISION}" \
+ "${COMPILER_RT_DIR}"
+
+# clang needs a libc++ checkout, else -stdlib=libc++ won't find includes
+# (i.e. this is needed for bootstrap builds).
+if [ "${OS}" = "Darwin" ]; then
+ echo Getting libc++ r"${CLANG_REVISION}" in "${LIBCXX_DIR}"
+ svn co --force "${LLVM_REPO_URL}/libcxx/trunk@${CLANG_REVISION}" \
+ "${LIBCXX_DIR}"
+fi
+
+# While we're bundling our own libc++ on OS X, we need to compile libc++abi
+# into it too (since OS X 10.6 doesn't have libc++abi.dylib either).
+if [ "${OS}" = "Darwin" ]; then
+ echo Getting libc++abi r"${CLANG_REVISION}" in "${LIBCXXABI_DIR}"
+ svn co --force "${LLVM_REPO_URL}/libcxxabi/trunk@${CLANG_REVISION}" \
+ "${LIBCXXABI_DIR}"
+fi
+
+# Apply patch for tests failing with --disable-pthreads (llvm.org/PR11974)
+pushd "${CLANG_DIR}"
+cat << 'EOF' |
+--- third_party/llvm/tools/clang/test/Index/crash-recovery-modules.m (revision 202554)
++++ third_party/llvm/tools/clang/test/Index/crash-recovery-modules.m (working copy)
+@@ -12,6 +12,8 @@
+
+ // REQUIRES: crash-recovery
+ // REQUIRES: shell
++// XFAIL: *
++// (PR11974)
+
+ @import Crash;
+EOF
+patch -p4
+popd
+
+pushd "${CLANG_DIR}"
+cat << 'EOF' |
+--- unittests/libclang/LibclangTest.cpp (revision 215949)
++++ unittests/libclang/LibclangTest.cpp (working copy)
+@@ -431,7 +431,7 @@
+ EXPECT_EQ(0U, clang_getNumDiagnostics(ClangTU));
+ }
+
+-TEST_F(LibclangReparseTest, ReparseWithModule) {
++TEST_F(LibclangReparseTest, DISABLED_ReparseWithModule) {
+ const char *HeaderTop = "#ifndef H\n#define H\nstruct Foo { int bar;";
+ const char *HeaderBottom = "\n};\n#endif\n";
+ const char *MFile = "#include \"HeaderFile.h\"\nint main() {"
+EOF
+patch -p0
+popd
+
+# Echo all commands.
+set -x
+
+# Set default values for CC and CXX if they're not set in the environment.
+CC=${CC:-cc}
+CXX=${CXX:-c++}
+
+if [[ -n "${gcc_toolchain}" ]]; then
+ # Use the specified gcc installation for building.
+ CC="$gcc_toolchain/bin/gcc"
+ CXX="$gcc_toolchain/bin/g++"
+ # Set LD_LIBRARY_PATH to make auxiliary targets (tablegen, bootstrap compiler,
+ # etc.) find the .so.
+ export LD_LIBRARY_PATH="$(dirname $(${CXX} -print-file-name=libstdc++.so.6))"
+fi
+
+CFLAGS=""
+CXXFLAGS=""
+LDFLAGS=""
+
+# LLVM uses C++11 starting in llvm 3.5. On Linux, this means libstdc++4.7+ is
+# needed, on OS X it requires libc++. clang only automatically links to libc++
+# when targeting OS X 10.9+, so add stdlib=libc++ explicitly so clang can run on
+# OS X versions as old as 10.7.
+# TODO(thakis): Some bots are still on 10.6, so for now bundle libc++.dylib.
+# Remove this once all bots are on 10.7+, then use --enable-libcpp=yes and
+# change deployment_target to 10.7.
+deployment_target=""
+
+if [ "${OS}" = "Darwin" ]; then
+ # When building on 10.9, /usr/include usually doesn't exist, and while
+ # Xcode's clang automatically sets a sysroot, self-built clangs don't.
+ CFLAGS="-isysroot $(xcrun --show-sdk-path)"
+ CPPFLAGS="${CFLAGS}"
+ CXXFLAGS="-stdlib=libc++ -nostdinc++ -I${ABS_LIBCXX_DIR}/include ${CFLAGS}"
+
+ if [[ -n "${bootstrap}" ]]; then
+ deployment_target=10.6
+ fi
+fi
+
+# Build bootstrap clang if requested.
+if [[ -n "${bootstrap}" ]]; then
+ ABS_INSTALL_DIR="${PWD}/${LLVM_BOOTSTRAP_INSTALL_DIR}"
+ echo "Building bootstrap compiler"
+ mkdir -p "${LLVM_BOOTSTRAP_DIR}"
+ pushd "${LLVM_BOOTSTRAP_DIR}"
+
+ cmake -GNinja \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DLLVM_ENABLE_ASSERTIONS=ON \
+ -DLLVM_TARGETS_TO_BUILD=host \
+ -DLLVM_ENABLE_THREADS=OFF \
+ -DCMAKE_INSTALL_PREFIX="${ABS_INSTALL_DIR}" \
+ -DCMAKE_C_COMPILER="${CC}" \
+ -DCMAKE_CXX_COMPILER="${CXX}" \
+ -DCMAKE_C_FLAGS="${CFLAGS}" \
+ -DCMAKE_CXX_FLAGS="${CXXFLAGS}" \
+ ../llvm
+
+ ninja
+ if [[ -n "${run_tests}" ]]; then
+ ninja check-all
+ fi
+
+ ninja install
+ if [[ -n "${gcc_toolchain}" ]]; then
+ # Copy that gcc's stdlibc++.so.6 to the build dir, so the bootstrap
+ # compiler can start.
+ cp -v "$(${CXX} -print-file-name=libstdc++.so.6)" \
+ "${ABS_INSTALL_DIR}/lib/"
+ fi
+
+ popd
+ CC="${ABS_INSTALL_DIR}/bin/clang"
+ CXX="${ABS_INSTALL_DIR}/bin/clang++"
+
+ if [[ -n "${gcc_toolchain}" ]]; then
+ # Tell the bootstrap compiler to use a specific gcc prefix to search
+ # for standard library headers and shared object file.
+ CFLAGS="--gcc-toolchain=${gcc_toolchain}"
+ CXXFLAGS="--gcc-toolchain=${gcc_toolchain}"
+ fi
+
+ echo "Building final compiler"
+fi
+
+# Build clang (in a separate directory).
+# The clang bots have this path hardcoded in built/scripts/slave/compile.py,
+# so if you change it you also need to change these links.
+mkdir -p "${LLVM_BUILD_DIR}"
+pushd "${LLVM_BUILD_DIR}"
+
+# Build libc++.dylib while some bots are still on OS X 10.6.
+if [ "${OS}" = "Darwin" ]; then
+ rm -rf libcxxbuild
+ LIBCXXFLAGS="-O3 -std=c++11 -fstrict-aliasing"
+
+ # libcxx and libcxxabi both have a file stdexcept.cpp, so put their .o files
+ # into different subdirectories.
+ mkdir -p libcxxbuild/libcxx
+ pushd libcxxbuild/libcxx
+ ${CXX:-c++} -c ${CXXFLAGS} ${LIBCXXFLAGS} "${ABS_LIBCXX_DIR}"/src/*.cpp
+ popd
+
+ mkdir -p libcxxbuild/libcxxabi
+ pushd libcxxbuild/libcxxabi
+ ${CXX:-c++} -c ${CXXFLAGS} ${LIBCXXFLAGS} "${ABS_LIBCXXABI_DIR}"/src/*.cpp -I"${ABS_LIBCXXABI_DIR}/include"
+ popd
+
+ pushd libcxxbuild
+ ${CC:-cc} libcxx/*.o libcxxabi/*.o -o libc++.1.dylib -dynamiclib \
+ -nodefaultlibs -current_version 1 -compatibility_version 1 \
+ -lSystem -install_name @executable_path/libc++.dylib \
+ -Wl,-unexported_symbols_list,${ABS_LIBCXX_DIR}/lib/libc++unexp.exp \
+ -Wl,-force_symbols_not_weak_list,${ABS_LIBCXX_DIR}/lib/notweak.exp \
+ -Wl,-force_symbols_weak_list,${ABS_LIBCXX_DIR}/lib/weak.exp
+ ln -sf libc++.1.dylib libc++.dylib
+ popd
+ LDFLAGS+="-stdlib=libc++ -L${PWD}/libcxxbuild"
+fi
+
+rm -fv CMakeCache.txt
+MACOSX_DEPLOYMENT_TARGET=${deployment_target} cmake -GNinja \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DLLVM_ENABLE_ASSERTIONS=ON \
+ -DLLVM_ENABLE_THREADS=OFF \
+ -DCMAKE_C_COMPILER="${CC}" \
+ -DCMAKE_CXX_COMPILER="${CXX}" \
+ -DCMAKE_C_FLAGS="${CFLAGS}" \
+ -DCMAKE_CXX_FLAGS="${CXXFLAGS}" \
+ -DCMAKE_EXE_LINKER_FLAGS="${LDFLAGS}" \
+ -DCMAKE_SHARED_LINKER_FLAGS="${LDFLAGS}" \
+ -DCMAKE_MODULE_LINKER_FLAGS="${LDFLAGS}" \
+ "${ABS_LLVM_DIR}"
+env
+
+if [[ -n "${gcc_toolchain}" ]]; then
+ # Copy in the right stdlibc++.so.6 so clang can start.
+ mkdir -p lib
+ cp -v "$(${CXX} ${CXXFLAGS} -print-file-name=libstdc++.so.6)" lib/
+fi
+
+ninja
+
+STRIP_FLAGS=
+if [ "${OS}" = "Darwin" ]; then
+ # See http://crbug.com/256342
+ STRIP_FLAGS=-x
+
+ cp libcxxbuild/libc++.1.dylib bin/
+fi
+strip ${STRIP_FLAGS} bin/clang
+popd
+
+# Build compiler-rt out-of-tree.
+mkdir -p "${COMPILER_RT_BUILD_DIR}"
+pushd "${COMPILER_RT_BUILD_DIR}"
+
+rm -fv CMakeCache.txt
+MACOSX_DEPLOYMENT_TARGET=${deployment_target} cmake -GNinja \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DLLVM_ENABLE_ASSERTIONS=ON \
+ -DLLVM_ENABLE_THREADS=OFF \
+ -DCMAKE_C_COMPILER="${CC}" \
+ -DCMAKE_CXX_COMPILER="${CXX}" \
+ -DLLVM_CONFIG_PATH="${ABS_LLVM_BUILD_DIR}/bin/llvm-config" \
+ "${ABS_COMPILER_RT_DIR}"
+
+ninja
+
+# Copy selected output to the main tree.
+# Darwin doesn't support cp --parents, so pipe through tar instead.
+CLANG_VERSION=$("${ABS_LLVM_BUILD_DIR}/bin/clang" --version | \
+ sed -ne 's/clang version \([0-9]\.[0-9]\.[0-9]\).*/\1/p')
+ABS_LLVM_CLANG_LIB_DIR="${ABS_LLVM_BUILD_DIR}/lib/clang/${CLANG_VERSION}"
+tar -c *blacklist.txt | tar -C ${ABS_LLVM_CLANG_LIB_DIR} -xv
+tar -c include/sanitizer | tar -C ${ABS_LLVM_CLANG_LIB_DIR} -xv
+if [[ "${OS}" = "Darwin" ]]; then
+ tar -c lib/darwin | tar -C ${ABS_LLVM_CLANG_LIB_DIR} -xv
+else
+ tar -c lib/linux | tar -C ${ABS_LLVM_CLANG_LIB_DIR} -xv
+fi
+
+popd
+
+if [[ -n "${with_android}" ]]; then
+ # Make a standalone Android toolchain.
+ ${ANDROID_NDK_DIR}/build/tools/make-standalone-toolchain.sh \
+ --platform=android-14 \
+ --install-dir="${LLVM_BUILD_DIR}/android-toolchain" \
+ --system=linux-x86_64 \
+ --stl=stlport
+
+ # Android NDK r9d copies a broken unwind.h into the toolchain, see
+ # http://crbug.com/357890
+ rm -v "${LLVM_BUILD_DIR}"/android-toolchain/include/c++/*/unwind.h
+
+ # Build ASan runtime for Android in a separate build tree.
+ mkdir -p ${LLVM_BUILD_DIR}/android
+ pushd ${LLVM_BUILD_DIR}/android
+ rm -fv CMakeCache.txt
+ MACOSX_DEPLOYMENT_TARGET=${deployment_target} cmake -GNinja \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DLLVM_ENABLE_ASSERTIONS=ON \
+ -DLLVM_ENABLE_THREADS=OFF \
+ -DCMAKE_C_COMPILER=${PWD}/../bin/clang \
+ -DCMAKE_CXX_COMPILER=${PWD}/../bin/clang++ \
+ -DLLVM_CONFIG_PATH=${PWD}/../bin/llvm-config \
+ -DCMAKE_C_FLAGS="--target=arm-linux-androideabi --sysroot=${PWD}/../android-toolchain/sysroot -B${PWD}/../android-toolchain" \
+ -DCMAKE_CXX_FLAGS="--target=arm-linux-androideabi --sysroot=${PWD}/../android-toolchain/sysroot -B${PWD}/../android-toolchain" \
+ -DANDROID=1 \
+ "${ABS_COMPILER_RT_DIR}"
+ ninja clang_rt.asan-arm-android
+
+ # And copy it into the main build tree.
+ cp "$(find -name libclang_rt.asan-arm-android.so)" "${ABS_LLVM_CLANG_LIB_DIR}/lib/linux/"
+ popd
+fi
+
+# Build Chrome-specific clang tools. Paths in this list should be relative to
+# tools/clang.
+TOOL_SRC_DIR="${PWD}/${THIS_DIR}/../"
+TOOL_BUILD_DIR="${ABS_LLVM_BUILD_DIR}/tools/clang/tools/chrome-extras"
+
+rm -rf "${TOOL_BUILD_DIR}"
+mkdir -p "${TOOL_BUILD_DIR}"
+pushd "${TOOL_BUILD_DIR}"
+rm -fv CMakeCache.txt
+MACOSX_DEPLOYMENT_TARGET=${deployment_target} cmake -GNinja \
+ -DLLVM_BUILD_DIR="${ABS_LLVM_BUILD_DIR}" \
+ -DLLVM_SRC_DIR="${ABS_LLVM_DIR}" \
+ -DCMAKE_C_COMPILER="${CC}" \
+ -DCMAKE_CXX_COMPILER="${CXX}" \
+ -DCMAKE_C_FLAGS="${CFLAGS}" \
+ -DCMAKE_CXX_FLAGS="${CXXFLAGS}" \
+ -DCMAKE_EXE_LINKER_FLAGS="${LDFLAGS}" \
+ -DCMAKE_SHARED_LINKER_FLAGS="${LDFLAGS}" \
+ -DCMAKE_MODULE_LINKER_FLAGS="${LDFLAGS}" \
+ -DCMAKE_INSTALL_PREFIX="${ABS_LLVM_BUILD_DIR}" \
+ -DCHROMIUM_TOOLS="${chrome_tools}" \
+ "${TOOL_SRC_DIR}"
+popd
+ninja -C "${TOOL_BUILD_DIR}" install
+
+if [[ -n "$run_tests" ]]; then
+ # Run Chrome tool tests.
+ ninja -C "${TOOL_BUILD_DIR}" check-all
+ # Run the LLVM and Clang tests.
+ ninja -C "${LLVM_BUILD_DIR}" check-all
+fi
+
+# After everything is done, log success for this revision.
+echo "${CLANG_AND_PLUGINS_REVISION}" > "${STAMP_FILE}"
diff --git a/tools/gdb/gdb_chrome.py b/tools/gdb/gdb_chrome.py
new file mode 100644
index 0000000..2d2bfa6
--- /dev/null
+++ b/tools/gdb/gdb_chrome.py
@@ -0,0 +1,334 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""GDB support for Chrome types.
+
+Add this to your gdb by amending your ~/.gdbinit as follows:
+ python
+ import sys
+ sys.path.insert(0, "/path/to/tools/gdb/")
+ import gdb_chrome
+ end
+
+Use
+ (gdb) p /r any_variable
+to print |any_variable| without using any printers.
+"""
+
+import datetime
+import gdb
+import gdb.printing
+import os
+import sys
+
+sys.path.insert(0, os.path.join(
+ os.path.dirname(os.path.abspath(__file__)),
+ '..', '..', 'third_party', 'WebKit', 'Tools', 'gdb'))
+try:
+ import webkit
+finally:
+ sys.path.pop(0)
+
+# When debugging this module, set the below variable to True, and then use
+# (gdb) python del sys.modules['gdb_chrome']
+# (gdb) python import gdb_chrome
+# to reload.
+_DEBUGGING = False
+
+
+pp_set = gdb.printing.RegexpCollectionPrettyPrinter("chromium")
+
+
+def typed_ptr(ptr):
+ """Prints a pointer along with its exact type.
+
+ By default, gdb would print just the address, which takes more
+ steps to interpret.
+ """
+ # Returning this as a cast expression surrounded by parentheses
+ # makes it easier to cut+paste inside of gdb.
+ return '((%s)%s)' % (ptr.dynamic_type, ptr)
+
+
+def yield_fields(val):
+ """Use this in a printer's children() method to print an object's fields.
+
+ e.g.
+ def children():
+ for result in yield_fields(self.val):
+ yield result
+ """
+ try:
+ fields = val.type.target().fields()
+ except:
+ fields = val.type.fields()
+ for field in fields:
+ if field.is_base_class:
+ yield (field.name, val.cast(gdb.lookup_type(field.name)))
+ else:
+ yield (field.name, val[field.name])
+
+
+class Printer(object):
+ def __init__(self, val):
+ self.val = val
+
+
+class StringPrinter(Printer):
+ def display_hint(self):
+ return 'string'
+
+
+class String16Printer(StringPrinter):
+ def to_string(self):
+ return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
+pp_set.add_printer(
+ 'string16',
+ '^string16|std::basic_string<(unsigned short|base::char16).*>$',
+ String16Printer);
+
+
+class GURLPrinter(StringPrinter):
+ def to_string(self):
+ return self.val['spec_']
+pp_set.add_printer('GURL', '^GURL$', GURLPrinter)
+
+
+class FilePathPrinter(StringPrinter):
+ def to_string(self):
+ return self.val['path_']['_M_dataplus']['_M_p']
+pp_set.add_printer('FilePath', '^FilePath$', FilePathPrinter)
+
+
+class SizePrinter(Printer):
+ def to_string(self):
+ return '%sx%s' % (self.val['width_'], self.val['height_'])
+pp_set.add_printer('gfx::Size', '^gfx::(Size|SizeF|SizeBase<.*>)$', SizePrinter)
+
+
+class PointPrinter(Printer):
+ def to_string(self):
+ return '%s,%s' % (self.val['x_'], self.val['y_'])
+pp_set.add_printer('gfx::Point', '^gfx::(Point|PointF|PointBase<.*>)$',
+ PointPrinter)
+
+
+class RectPrinter(Printer):
+ def to_string(self):
+ return '%s %s' % (self.val['origin_'], self.val['size_'])
+pp_set.add_printer('gfx::Rect', '^gfx::(Rect|RectF|RectBase<.*>)$',
+ RectPrinter)
+
+
+class SmartPtrPrinter(Printer):
+ def to_string(self):
+ return '%s%s' % (self.typename, typed_ptr(self.ptr()))
+
+
+class ScopedRefPtrPrinter(SmartPtrPrinter):
+ typename = 'scoped_refptr'
+ def ptr(self):
+ return self.val['ptr_']
+pp_set.add_printer('scoped_refptr', '^scoped_refptr<.*>$', ScopedRefPtrPrinter)
+
+
+class LinkedPtrPrinter(SmartPtrPrinter):
+ typename = 'linked_ptr'
+ def ptr(self):
+ return self.val['value_']
+pp_set.add_printer('linked_ptr', '^linked_ptr<.*>$', LinkedPtrPrinter)
+
+
+class WeakPtrPrinter(SmartPtrPrinter):
+ typename = 'base::WeakPtr'
+ def ptr(self):
+ flag = ScopedRefPtrPrinter(self.val['ref_']['flag_']).ptr()
+ if flag and flag['is_valid_']:
+ return self.val['ptr_']
+ return gdb.Value(0).cast(self.val['ptr_'].type)
+pp_set.add_printer('base::WeakPtr', '^base::WeakPtr<.*>$', WeakPtrPrinter)
+
+
+class CallbackPrinter(Printer):
+ """Callbacks provide no usable information so reduce the space they take."""
+ def to_string(self):
+ return '...'
+pp_set.add_printer('base::Callback', '^base::Callback<.*>$', CallbackPrinter)
+
+
+class LocationPrinter(Printer):
+ def to_string(self):
+ return '%s()@%s:%s' % (self.val['function_name_'].string(),
+ self.val['file_name_'].string(),
+ self.val['line_number_'])
+pp_set.add_printer('tracked_objects::Location', '^tracked_objects::Location$',
+ LocationPrinter)
+
+
+class PendingTaskPrinter(Printer):
+ def to_string(self):
+ return 'From %s' % (self.val['posted_from'],)
+
+ def children(self):
+ for result in yield_fields(self.val):
+ if result[0] not in ('task', 'posted_from'):
+ yield result
+pp_set.add_printer('base::PendingTask', '^base::PendingTask$',
+ PendingTaskPrinter)
+
+
+class LockPrinter(Printer):
+ def to_string(self):
+ try:
+ if self.val['owned_by_thread_']:
+ return 'Locked by thread %s' % self.val['owning_thread_id_']
+ else:
+ return 'Unlocked'
+ except gdb.error:
+ return 'Unknown state'
+pp_set.add_printer('base::Lock', '^base::Lock$', LockPrinter)
+
+
+class TimeDeltaPrinter(object):
+ def __init__(self, val):
+ self._timedelta = datetime.timedelta(microseconds=int(val['delta_']))
+
+ def timedelta(self):
+ return self._timedelta
+
+ def to_string(self):
+ return str(self._timedelta)
+pp_set.add_printer('base::TimeDelta', '^base::TimeDelta$', TimeDeltaPrinter)
+
+
+class TimeTicksPrinter(TimeDeltaPrinter):
+ def __init__(self, val):
+ self._timedelta = datetime.timedelta(microseconds=int(val['ticks_']))
+pp_set.add_printer('base::TimeTicks', '^base::TimeTicks$', TimeTicksPrinter)
+
+
+class TimePrinter(object):
+ def __init__(self, val):
+ timet_offset = gdb.parse_and_eval(
+ 'base::Time::kTimeTToMicrosecondsOffset')
+ self._datetime = (datetime.datetime.fromtimestamp(0) +
+ datetime.timedelta(microseconds=
+ int(val['us_'] - timet_offset)))
+
+ def datetime(self):
+ return self._datetime
+
+ def to_string(self):
+ return str(self._datetime)
+pp_set.add_printer('base::Time', '^base::Time$', TimePrinter)
+
+
+class IpcMessagePrinter(Printer):
+ def header(self):
+ return self.val['header_'].cast(
+ gdb.lookup_type('IPC::Message::Header').pointer())
+
+ def to_string(self):
+ message_type = self.header()['type']
+ return '%s of kind %s line %s' % (
+ self.val.dynamic_type,
+ (message_type >> 16).cast(gdb.lookup_type('IPCMessageStart')),
+ message_type & 0xffff)
+
+ def children(self):
+ yield ('header_', self.header().dereference())
+ yield ('capacity_after_header_', self.val['capacity_after_header_'])
+ for field in self.val.type.fields():
+ if field.is_base_class:
+ continue
+ yield (field.name, self.val[field.name])
+pp_set.add_printer('IPC::Message', '^IPC::Message$', IpcMessagePrinter)
+
+
+class NotificationRegistrarPrinter(Printer):
+ def to_string(self):
+ try:
+ registrations = self.val['registered_']
+ vector_finish = registrations['_M_impl']['_M_finish']
+ vector_start = registrations['_M_impl']['_M_start']
+ if vector_start == vector_finish:
+ return 'Not watching notifications'
+ if vector_start.dereference().type.sizeof == 0:
+ # Incomplete type: b/8242773
+ return 'Watching some notifications'
+ return ('Watching %s notifications; '
+ 'print %s->registered_ for details') % (
+ int(vector_finish - vector_start),
+ typed_ptr(self.val.address))
+ except gdb.error:
+ return 'NotificationRegistrar'
+pp_set.add_printer('content::NotificationRegistrar',
+ '^content::NotificationRegistrar$',
+ NotificationRegistrarPrinter)
+
+
+class SiteInstanceImplPrinter(object):
+ def __init__(self, val):
+ self.val = val.cast(val.dynamic_type)
+
+ def to_string(self):
+ return 'SiteInstanceImpl@%s for %s' % (
+ self.val.address, self.val['site_'])
+
+ def children(self):
+ yield ('id_', self.val['id_'])
+ yield ('has_site_', self.val['has_site_'])
+ if self.val['browsing_instance_']['ptr_']:
+ yield ('browsing_instance_', self.val['browsing_instance_']['ptr_'])
+ if self.val['process_']:
+ yield ('process_', typed_ptr(self.val['process_']))
+ if self.val['render_process_host_factory_']:
+ yield ('render_process_host_factory_',
+ self.val['render_process_host_factory_'])
+pp_set.add_printer('content::SiteInstanceImpl', '^content::SiteInstanceImpl$',
+ SiteInstanceImplPrinter)
+
+
+class RenderProcessHostImplPrinter(object):
+ def __init__(self, val):
+ self.val = val.cast(val.dynamic_type)
+
+ def to_string(self):
+ pid = ''
+ try:
+ child_process_launcher_ptr = (
+ self.val['child_process_launcher_']['impl_']['data_']['ptr'])
+ if child_process_launcher_ptr:
+ context = (child_process_launcher_ptr['context_']['ptr_'])
+ if context:
+ pid = ' PID %s' % str(context['process_']['process_'])
+ except gdb.error:
+ # The definition of the Context type may not be available.
+ # b/8242773
+ pass
+ return 'RenderProcessHostImpl@%s%s' % (self.val.address, pid)
+
+ def children(self):
+ yield ('id_', self.val['id_'])
+ yield ('listeners_',
+ self.val['listeners_']['data_'])
+ yield ('worker_ref_count_', self.val['worker_ref_count_'])
+ yield ('fast_shutdown_started_', self.val['fast_shutdown_started_'])
+ yield ('deleting_soon_', self.val['deleting_soon_'])
+ yield ('pending_views_', self.val['pending_views_'])
+ yield ('visible_widgets_', self.val['visible_widgets_'])
+ yield ('backgrounded_', self.val['backgrounded_'])
+ yield ('widget_helper_', self.val['widget_helper_'])
+ yield ('is_initialized_', self.val['is_initialized_'])
+ yield ('browser_context_', typed_ptr(self.val['browser_context_']))
+ yield ('sudden_termination_allowed_',
+ self.val['sudden_termination_allowed_'])
+ yield ('ignore_input_events_', self.val['ignore_input_events_'])
+ yield ('is_guest_', self.val['is_guest_'])
+pp_set.add_printer('content::RenderProcessHostImpl',
+ '^content::RenderProcessHostImpl$',
+ RenderProcessHostImplPrinter)
+
+
+gdb.printing.register_pretty_printer(gdb, pp_set, replace=_DEBUGGING)
diff --git a/tools/generate_library_loader/generate_library_loader.gni b/tools/generate_library_loader/generate_library_loader.gni
new file mode 100644
index 0000000..debeddd
--- /dev/null
+++ b/tools/generate_library_loader/generate_library_loader.gni
@@ -0,0 +1,54 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This template makes a stub for a Linux system library that dynamically loads
+# it at runtime.
+
+# name: Name to use for the value of the --name arg.
+# output_h/output_cc: Names for the generated header/cc file with no dir.
+# header: header file to process. Example: "<foo/bar.h>"
+# functions: List of strings for functions to process.
+# config: (optional) Label of the config generated by pkgconfig.
+# bundled_header: (optional)
+template("generate_library_loader") {
+ output_h = "$root_gen_dir/library_loaders/" + invoker.output_h
+ output_cc = "$root_gen_dir/library_loaders/" + invoker.output_cc
+
+ action_visibility = [ ":$target_name" ]
+ action("${target_name}_loader") {
+ visibility = action_visibility
+
+ script = "//tools/generate_library_loader/generate_library_loader.py"
+ if (defined(invoker.visibility)) {
+ visibility = invoker.visibility
+ }
+
+ outputs = [ output_h, output_cc ]
+
+ args = [
+ "--name", invoker.name,
+ "--output-h", rebase_path(output_h),
+ "--output-cc", rebase_path(output_cc),
+ "--header", invoker.header,
+ # Note GYP build exposes a per-target variable to control this, which, if
+ # manually set to true, will disable dlopen(). Its not clear this is
+ # needed, so here we just leave off. If this can be done globally, we can
+ # expose one switch for this value, otherwise we need to add a template
+ # param for this.
+ "--link-directly=0",
+ ]
+ if (defined(invoker.bundled_header)) {
+ args += [ "--bundled-header", invoker.bundled_header ]
+ }
+ args += invoker.functions
+ }
+
+ source_set(target_name) {
+ if (defined(invoker.config)) {
+ public_configs = [ invoker.config ]
+ }
+ sources = [ output_h, output_cc ]
+ deps = [ ":${target_name}_loader" ]
+ }
+}
diff --git a/tools/generate_library_loader/generate_library_loader.py b/tools/generate_library_loader/generate_library_loader.py
new file mode 100755
index 0000000..ebf0ab1
--- /dev/null
+++ b/tools/generate_library_loader/generate_library_loader.py
@@ -0,0 +1,249 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Creates a library loader (a header and implementation file),
+which is a wrapper for dlopen or direct linking with given library.
+
+The loader makes it possible to have the same client code for both cases,
+and also makes it easier to write code using dlopen (and also provides
+a standard way to do so, and limits the ugliness just to generated files).
+
+For more info refer to http://crbug.com/162733 .
+"""
+
+
+import optparse
+import os.path
+import re
+import sys
+
+
+HEADER_TEMPLATE = """// This is generated file. Do not modify directly.
+// Path to the code generator: %(generator_path)s .
+
+#ifndef %(unique_prefix)s
+#define %(unique_prefix)s
+
+%(wrapped_header_include)s
+
+#include <string>
+
+class %(class_name)s {
+ public:
+ %(class_name)s();
+ ~%(class_name)s();
+
+ bool Load(const std::string& library_name)
+ __attribute__((warn_unused_result));
+
+ bool loaded() const { return loaded_; }
+
+%(member_decls)s
+
+ private:
+ void CleanUp(bool unload);
+
+#if defined(%(unique_prefix)s_DLOPEN)
+ void* library_;
+#endif
+
+ bool loaded_;
+
+ // Disallow copy constructor and assignment operator.
+ %(class_name)s(const %(class_name)s&);
+ void operator=(const %(class_name)s&);
+};
+
+#endif // %(unique_prefix)s
+"""
+
+
+HEADER_MEMBER_TEMPLATE = """ typeof(&::%(function_name)s) %(function_name)s;
+"""
+
+
+IMPL_TEMPLATE = """// This is generated file. Do not modify directly.
+// Path to the code generator: %(generator_path)s .
+
+#include "%(generated_header_name)s"
+
+#include <dlfcn.h>
+
+// Put these sanity checks here so that they fire at most once
+// (to avoid cluttering the build output).
+#if !defined(%(unique_prefix)s_DLOPEN) && !defined(%(unique_prefix)s_DT_NEEDED)
+#error neither %(unique_prefix)s_DLOPEN nor %(unique_prefix)s_DT_NEEDED defined
+#endif
+#if defined(%(unique_prefix)s_DLOPEN) && defined(%(unique_prefix)s_DT_NEEDED)
+#error both %(unique_prefix)s_DLOPEN and %(unique_prefix)s_DT_NEEDED defined
+#endif
+
+%(class_name)s::%(class_name)s() : loaded_(false) {
+}
+
+%(class_name)s::~%(class_name)s() {
+ CleanUp(loaded_);
+}
+
+bool %(class_name)s::Load(const std::string& library_name) {
+ if (loaded_)
+ return false;
+
+#if defined(%(unique_prefix)s_DLOPEN)
+ library_ = dlopen(library_name.c_str(), RTLD_LAZY);
+ if (!library_)
+ return false;
+#endif
+
+%(member_init)s
+
+ loaded_ = true;
+ return true;
+}
+
+void %(class_name)s::CleanUp(bool unload) {
+#if defined(%(unique_prefix)s_DLOPEN)
+ if (unload) {
+ dlclose(library_);
+ library_ = NULL;
+ }
+#endif
+ loaded_ = false;
+%(member_cleanup)s
+}
+"""
+
+IMPL_MEMBER_INIT_TEMPLATE = """
+#if defined(%(unique_prefix)s_DLOPEN)
+ %(function_name)s =
+ reinterpret_cast<typeof(this->%(function_name)s)>(
+ dlsym(library_, "%(function_name)s"));
+#endif
+#if defined(%(unique_prefix)s_DT_NEEDED)
+ %(function_name)s = &::%(function_name)s;
+#endif
+ if (!%(function_name)s) {
+ CleanUp(true);
+ return false;
+ }
+"""
+
+IMPL_MEMBER_CLEANUP_TEMPLATE = """ %(function_name)s = NULL;
+"""
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('--name')
+ parser.add_option('--output-cc')
+ parser.add_option('--output-h')
+ parser.add_option('--header')
+
+ parser.add_option('--bundled-header')
+ parser.add_option('--use-extern-c', action='store_true', default=False)
+ parser.add_option('--link-directly', type=int, default=0)
+
+ options, args = parser.parse_args()
+
+ if not options.name:
+ parser.error('Missing --name parameter')
+ if not options.output_cc:
+ parser.error('Missing --output-cc parameter')
+ if not options.output_h:
+ parser.error('Missing --output-h parameter')
+ if not options.header:
+ parser.error('Missing --header paramater')
+ if not args:
+ parser.error('No function names specified')
+
+ # Make sure we are always dealing with paths relative to source tree root
+ # to avoid issues caused by different relative path roots.
+ source_tree_root = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..'))
+ options.output_cc = os.path.relpath(options.output_cc, source_tree_root)
+ options.output_h = os.path.relpath(options.output_h, source_tree_root)
+
+ # Create a unique prefix, e.g. for header guards.
+ # Stick a known string at the beginning to ensure this doesn't begin
+ # with an underscore, which is reserved for the C++ implementation.
+ unique_prefix = ('LIBRARY_LOADER_' +
+ re.sub(r'[\W]', '_', options.output_h).upper())
+
+ member_decls = []
+ member_init = []
+ member_cleanup = []
+ for fn in args:
+ member_decls.append(HEADER_MEMBER_TEMPLATE % {
+ 'function_name': fn,
+ 'unique_prefix': unique_prefix
+ })
+ member_init.append(IMPL_MEMBER_INIT_TEMPLATE % {
+ 'function_name': fn,
+ 'unique_prefix': unique_prefix
+ })
+ member_cleanup.append(IMPL_MEMBER_CLEANUP_TEMPLATE % {
+ 'function_name': fn,
+ 'unique_prefix': unique_prefix
+ })
+
+ header = options.header
+ if options.link_directly == 0 and options.bundled_header:
+ header = options.bundled_header
+ wrapped_header_include = '#include %s\n' % header
+
+ # Some libraries (e.g. libpci) have headers that cannot be included
+ # without extern "C", otherwise they cause the link to fail.
+ # TODO(phajdan.jr): This is a workaround for broken headers. Remove it.
+ if options.use_extern_c:
+ wrapped_header_include = 'extern "C" {\n%s\n}\n' % wrapped_header_include
+
+ # It seems cleaner just to have a single #define here and #ifdefs in bunch
+ # of places, rather than having a different set of templates, duplicating
+ # or complicating more code.
+ if options.link_directly == 0:
+ wrapped_header_include += '#define %s_DLOPEN\n' % unique_prefix
+ elif options.link_directly == 1:
+ wrapped_header_include += '#define %s_DT_NEEDED\n' % unique_prefix
+ else:
+ parser.error('Invalid value for --link-directly. Should be 0 or 1.')
+
+ # Make it easier for people to find the code generator just in case.
+ # Doing it this way is more maintainable, because it's going to work
+ # even if file gets moved without updating the contents.
+ generator_path = os.path.relpath(__file__, source_tree_root)
+
+ header_contents = HEADER_TEMPLATE % {
+ 'generator_path': generator_path,
+ 'unique_prefix': unique_prefix,
+ 'wrapped_header_include': wrapped_header_include,
+ 'class_name': options.name,
+ 'member_decls': ''.join(member_decls),
+ }
+
+ impl_contents = IMPL_TEMPLATE % {
+ 'generator_path': generator_path,
+ 'unique_prefix': unique_prefix,
+ 'generated_header_name': options.output_h,
+ 'class_name': options.name,
+ 'member_init': ''.join(member_init),
+ 'member_cleanup': ''.join(member_cleanup),
+ }
+
+ header_file = open(os.path.join(source_tree_root, options.output_h), 'w')
+ try:
+ header_file.write(header_contents)
+ finally:
+ header_file.close()
+
+ impl_file = open(os.path.join(source_tree_root, options.output_cc), 'w')
+ try:
+ impl_file.write(impl_contents)
+ finally:
+ impl_file.close()
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/git/README b/tools/git/README
new file mode 100644
index 0000000..7f8e363
--- /dev/null
+++ b/tools/git/README
@@ -0,0 +1,16 @@
+This directory contains some helpful Git tools.
+
+post-checkout and post-merge
+============================
+These hooks warn you about DEPS modifications so you will remember
+to run "gclient sync".
+
+To install these Git hooks, create symlinks like so:
+ ln -s $(pwd)/post-checkout $(git rev-parse --git-dir)/hooks
+ ln -s $(pwd)/post-merge $(git rev-parse --git-dir)/hooks
+
+
+git-graph
+=========
+Create a graph of the recent history of occurences of a grep
+expression in the project.
diff --git a/tools/git/for-all-touched-files.py b/tools/git/for-all-touched-files.py
new file mode 100755
index 0000000..a7e784a
--- /dev/null
+++ b/tools/git/for-all-touched-files.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+ Invokes the specified (quoted) command for all files modified
+ between the current git branch and the specified branch or commit.
+
+ The special token [[FILENAME]] (or whatever you choose using the -t
+ flag) is replaced with each of the filenames of new or modified files.
+
+ Deleted files are not included. Neither are untracked files.
+
+Synopsis:
+ %prog [-b BRANCH] [-d] [-x EXTENSIONS|-c] [-t TOKEN] QUOTED_COMMAND
+
+Examples:
+ %prog -x gyp,gypi "tools/format_xml.py [[FILENAME]]"
+ %prog -c "tools/sort-headers.py [[FILENAME]]"
+ %prog -t "~~BINGO~~" "echo I modified ~~BINGO~~"
+"""
+
+import optparse
+import os
+import subprocess
+import sys
+
+
+# List of C++-like source file extensions.
+_CPP_EXTENSIONS = ('h', 'hh', 'hpp', 'c', 'cc', 'cpp', 'cxx', 'mm',)
+
+
+def GitShell(args, ignore_return=False):
+ """A shell invocation suitable for communicating with git. Returns
+ output as list of lines, raises exception on error.
+ """
+ job = subprocess.Popen(args,
+ shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ (out, err) = job.communicate()
+ if job.returncode != 0 and not ignore_return:
+ print out
+ raise Exception("Error %d running command %s" % (
+ job.returncode, args))
+ return out.split('\n')
+
+
+def FilenamesFromGit(branch_name, extensions):
+ """Provides a list of all new and modified files listed by [git diff
+ branch_name] where branch_name can be blank to get a diff of the
+ workspace.
+
+ Excludes deleted files.
+
+ If extensions is not an empty list, include only files with one of
+ the extensions on the list.
+ """
+ lines = GitShell('git diff --stat=600,500 %s' % branch_name)
+ filenames = []
+ for line in lines:
+ line = line.lstrip()
+ # Avoid summary line, and files that have been deleted (no plus).
+ if line.find('|') != -1 and line.find('+') != -1:
+ filename = line.split()[0]
+ if filename:
+ filename = filename.rstrip()
+ ext = filename.rsplit('.')[-1]
+ if not extensions or ext in extensions:
+ filenames.append(filename)
+ return filenames
+
+
+def ForAllTouchedFiles(branch_name, extensions, token, command):
+ """For each new or modified file output by [git diff branch_name],
+ run command with token replaced with the filename. If extensions is
+ not empty, do this only for files with one of the extensions in that
+ list.
+ """
+ filenames = FilenamesFromGit(branch_name, extensions)
+ for filename in filenames:
+ os.system(command.replace(token, filename))
+
+
+def main():
+ parser = optparse.OptionParser(usage=__doc__)
+ parser.add_option('-x', '--extensions', default='', dest='extensions',
+ help='Limits to files with given extensions '
+ '(comma-separated).')
+ parser.add_option('-c', '--cpp', default=False, action='store_true',
+ dest='cpp_only',
+ help='Runs your command only on C++-like source files.')
+ parser.add_option('-t', '--token', default='[[FILENAME]]', dest='token',
+ help='Sets the token to be replaced for each file '
+ 'in your command (default [[FILENAME]]).')
+ parser.add_option('-b', '--branch', default='origin/master', dest='branch',
+ help='Sets what to diff to (default origin/master). Set '
+ 'to empty to diff workspace against HEAD.')
+ opts, args = parser.parse_args()
+
+ if not args:
+ parser.print_help()
+ sys.exit(1)
+
+ extensions = opts.extensions
+ if opts.cpp_only:
+ extensions = _CPP_EXTENSIONS
+
+ ForAllTouchedFiles(opts.branch, extensions, opts.token, args[0])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/git/git-diff-ide.py b/tools/git/git-diff-ide.py
new file mode 100755
index 0000000..405d270
--- /dev/null
+++ b/tools/git/git-diff-ide.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+ Invokes git diff [args...] and inserts file:line in front of each line of diff
+ output where possible.
+
+ This is useful from an IDE that allows you to double-click lines that begin
+ with file:line to open and jump to that point in the file.
+
+Synopsis:
+ %prog [git diff args...]
+
+Examples:
+ %prog
+ %prog HEAD
+"""
+
+import subprocess
+import sys
+
+
+def GitShell(args, ignore_return=False):
+ """A shell invocation suitable for communicating with git. Returns
+ output as list of lines, raises exception on error.
+ """
+ job = subprocess.Popen(args,
+ shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ (out, err) = job.communicate()
+ if job.returncode != 0 and not ignore_return:
+ print out
+ raise Exception("Error %d running command %s" % (
+ job.returncode, args))
+ return out.split('\n')
+
+
+def PrintGitDiff(extra_args):
+ """Outputs git diff extra_args with file:line inserted into relevant lines."""
+ current_file = '';
+ line_num = 0;
+ lines = GitShell('git diff %s' % ' '.join(extra_args))
+ for line in lines:
+ # Pass-through lines:
+ # diff --git a/file.c b/file.c
+ # index 0e38c2d..8cd69ae 100644
+ # --- a/file.c
+ if (line.startswith('diff ') or
+ line.startswith('index ') or
+ line.startswith('--- ')):
+ print line
+ continue
+
+ # Get the filename from the +++ line:
+ # +++ b/file.c
+ if line.startswith('+++ '):
+ # Filename might be /dev/null or a/file or b/file.
+ # Skip the first two characters unless it starts with /.
+ current_file = line[4:] if line[4] == '/' else line[6:]
+ print line
+ continue
+
+ # Update line number from the @@ lines:
+ # @@ -41,9 +41,9 @@ def MyFunc():
+ # ^^
+ if line.startswith('@@ '):
+ _, old_nr, new_nr, _ = line.split(' ', 3)
+ line_num = int(new_nr.split(',')[0])
+ print line
+ continue
+ print current_file + ':' + repr(line_num) + ':' + line
+
+ # Increment line number for lines that start with ' ' or '+':
+ # @@ -41,4 +41,4 @@ def MyFunc():
+ # file.c:41: // existing code
+ # file.c:42: // existing code
+ # file.c:43:-// deleted code
+ # file.c:43:-// deleted code
+ # file.c:43:+// inserted code
+ # file.c:44:+// inserted code
+ if line.startswith(' ') or line.startswith('+'):
+ line_num += 1
+
+
+def main():
+ PrintGitDiff(sys.argv[1:])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/git/git-utils.sh b/tools/git/git-utils.sh
new file mode 100755
index 0000000..608d27a
--- /dev/null
+++ b/tools/git/git-utils.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TPUT=$(which tput 2>/dev/null)
+if test -x "$TPUT" && $TPUT setaf 1 >/dev/null ; then
+ RED="$($TPUT setaf 1)"
+ NORMAL="$($TPUT op)"
+else
+ RED=
+ NORMAL=
+fi
+
+warn() {
+ echo "${RED}WARNING:${NORMAL} $@"
+}
diff --git a/tools/git/graph.sh b/tools/git/graph.sh
new file mode 100755
index 0000000..800a52b
--- /dev/null
+++ b/tools/git/graph.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+about="Given a grep expression, creates a graph of occurrences of that
+expression in the recent history of the tree.
+
+Prerequisites: git and GNU R (apt-get install r-base).
+"
+
+set -e
+
+target="$1"
+
+if [ -z $target ]; then
+ echo "usage: $0 <grep-compatible expression>"
+ echo
+ echo "$about"
+ exit 1
+fi
+
+datafile=$(mktemp -t tmp.XXXXXXXXXX)
+trap "rm -f $datafile" EXIT
+
+echo 'ago count' > $datafile
+for ago in {90..0}; do
+ commit=$(git rev-list -1 --until="$ago days ago" origin/trunk)
+ git checkout -q -f $commit
+ count=$(git grep -E "$target" -- '*.cc' '*.h' '*.m' '*.mm' | wc -l)
+ echo "-$ago $count" >> $datafile
+ echo -n '.'
+done
+
+R CMD BATCH <(cat <<EOF
+data = read.delim("$datafile", sep=' ')
+png(width=600, height=300)
+plot(count ~ ago, type="l", main="$target", xlab='days ago', data=data)
+EOF
+) /dev/null
+
+echo done. # Primarily to add a newline after all the dots.
diff --git a/tools/git/mass-rename.py b/tools/git/mass-rename.py
new file mode 100755
index 0000000..21fbef7
--- /dev/null
+++ b/tools/git/mass-rename.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+mass-rename: update source files (gyp lists, #includes) to reflect
+a rename. Expects "git diff --cached -M" to list a bunch of renames.
+
+To use:
+ 1) git mv foo1 bar1; git mv foo2 bar2; etc.
+ 2) *without committing*, ./tools/git/mass-rename.py
+ 3) look at git diff (without --cached) to see what the damage is
+"""
+
+
+import os
+import subprocess
+import sys
+
+
+BASE_DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+def main():
+ popen = subprocess.Popen('git diff --cached --raw -M',
+ shell=True, stdout=subprocess.PIPE)
+ out, _ = popen.communicate()
+ if popen.returncode != 0:
+ return 1
+ for line in out.splitlines():
+ parts = line.split('\t')
+ if len(parts) != 3:
+ print 'Skipping: %s -- not a rename?' % parts
+ continue
+ attrs, fro, to = parts
+ if attrs.split()[4].startswith('R'):
+ subprocess.check_call([
+ sys.executable,
+ os.path.join(BASE_DIR, 'move_source_file.py'),
+ '--already_moved',
+ '--no_error_for_non_source_file',
+ fro, to])
+ else:
+ print 'Skipping: %s -- not a rename?' % fro
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/git/mass-rename.sh b/tools/git/mass-rename.sh
new file mode 100755
index 0000000..f92814e
--- /dev/null
+++ b/tools/git/mass-rename.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# mass-rename: update source files (gyp lists, #includes) to reflect
+# a rename. Expects "git diff --cached -M" to list a bunch of renames.
+#
+# To use:
+# 1) git mv foo1 bar1; git mv foo2 bar2; etc.
+# 2) *without committing*, ./tools/git/mass-rename.sh
+# 3) look at git diff (without --cached) to see what the damage is
+# 4) commit, then use tools/sort-headers.py to fix #include ordering:
+# for f in $(git diff --name-only origin); do ./tools/sort-headers.py $f; done
+
+DIR="$( cd "$( dirname "$0" )" && pwd )"
+python $DIR/mass-rename.py "$*"
diff --git a/tools/git/mffr.py b/tools/git/mffr.py
new file mode 100755
index 0000000..d5b67c8
--- /dev/null
+++ b/tools/git/mffr.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Usage: mffr.py [-d] [-g *.h] [-g *.cc] REGEXP REPLACEMENT
+
+This tool performs a fast find-and-replace operation on files in
+the current git repository.
+
+The -d flag selects a default set of globs (C++ and Objective-C/C++
+source files). The -g flag adds a single glob to the list and may
+be used multiple times. If neither -d nor -g is specified, the tool
+searches all files (*.*).
+
+REGEXP uses full Python regexp syntax. REPLACEMENT can use
+back-references.
+"""
+
+import optparse
+import re
+import subprocess
+import sys
+
+
+# We need to use shell=True with subprocess on Windows so that it
+# finds 'git' from the path, but can lead to undesired behavior on
+# Linux.
+_USE_SHELL = (sys.platform == 'win32')
+
+
+def MultiFileFindReplace(original, replacement, file_globs):
+ """Implements fast multi-file find and replace.
+
+ Given an |original| string and a |replacement| string, find matching
+ files by running git grep on |original| in files matching any
+ pattern in |file_globs|.
+
+ Once files are found, |re.sub| is run to replace |original| with
+ |replacement|. |replacement| may use capture group back-references.
+
+ Args:
+ original: '(#(include|import)\s*["<])chrome/browser/ui/browser.h([>"])'
+ replacement: '\1chrome/browser/ui/browser/browser.h\3'
+ file_globs: ['*.cc', '*.h', '*.m', '*.mm']
+
+ Returns the list of files modified.
+
+ Raises an exception on error.
+ """
+ # Posix extended regular expressions do not reliably support the "\s"
+ # shorthand.
+ posix_ere_original = re.sub(r"\\s", "[[:space:]]", original)
+ if sys.platform == 'win32':
+ posix_ere_original = posix_ere_original.replace('"', '""')
+ out, err = subprocess.Popen(
+ ['git', 'grep', '-E', '--name-only', posix_ere_original,
+ '--'] + file_globs,
+ stdout=subprocess.PIPE,
+ shell=_USE_SHELL).communicate()
+ referees = out.splitlines()
+
+ for referee in referees:
+ with open(referee) as f:
+ original_contents = f.read()
+ contents = re.sub(original, replacement, original_contents)
+ if contents == original_contents:
+ raise Exception('No change in file %s although matched in grep' %
+ referee)
+ with open(referee, 'wb') as f:
+ f.write(contents)
+
+ return referees
+
+
+def main():
+ parser = optparse.OptionParser(usage='''
+(1) %prog <options> REGEXP REPLACEMENT
+REGEXP uses full Python regexp syntax. REPLACEMENT can use back-references.
+
+(2) %prog <options> -i <file>
+<file> should contain a list (in Python syntax) of
+[REGEXP, REPLACEMENT, [GLOBS]] lists, e.g.:
+[
+ [r"(foo|bar)", r"\1baz", ["*.cc", "*.h"]],
+ ["54", "42"],
+]
+As shown above, [GLOBS] can be omitted for a given search-replace list, in which
+case the corresponding search-replace will use the globs specified on the
+command line.''')
+ parser.add_option('-d', action='store_true',
+ dest='use_default_glob',
+ help='Perform the change on C++ and Objective-C(++) source '
+ 'and header files.')
+ parser.add_option('-f', action='store_true',
+ dest='force_unsafe_run',
+ help='Perform the run even if there are uncommitted local '
+ 'changes.')
+ parser.add_option('-g', action='append',
+ type='string',
+ default=[],
+ metavar="<glob>",
+ dest='user_supplied_globs',
+ help='Perform the change on the specified glob. Can be '
+ 'specified multiple times, in which case the globs are '
+ 'unioned.')
+ parser.add_option('-i', "--input_file",
+ type='string',
+ action='store',
+ default='',
+ metavar="<file>",
+ dest='input_filename',
+ help='Read arguments from <file> rather than the command '
+ 'line. NOTE: To be sure of regular expressions being '
+ 'interpreted correctly, use raw strings.')
+ opts, args = parser.parse_args()
+ if opts.use_default_glob and opts.user_supplied_globs:
+ print '"-d" and "-g" cannot be used together'
+ parser.print_help()
+ return 1
+
+ from_file = opts.input_filename != ""
+ if (from_file and len(args) != 0) or (not from_file and len(args) != 2):
+ parser.print_help()
+ return 1
+
+ if not opts.force_unsafe_run:
+ out, err = subprocess.Popen(['git', 'status', '--porcelain'],
+ stdout=subprocess.PIPE,
+ shell=_USE_SHELL).communicate()
+ if out:
+ print 'ERROR: This tool does not print any confirmation prompts,'
+ print 'so you should only run it with a clean staging area and cache'
+ print 'so that reverting a bad find/replace is as easy as running'
+ print ' git checkout -- .'
+ print ''
+ print 'To override this safeguard, pass the -f flag.'
+ return 1
+
+ global_file_globs = ['*.*']
+ if opts.use_default_glob:
+ global_file_globs = ['*.cc', '*.h', '*.m', '*.mm']
+ elif opts.user_supplied_globs:
+ global_file_globs = opts.user_supplied_globs
+
+ # Construct list of search-replace tasks.
+ search_replace_tasks = []
+ if opts.input_filename == '':
+ original = args[0]
+ replacement = args[1]
+ search_replace_tasks.append([original, replacement, global_file_globs])
+ else:
+ f = open(opts.input_filename)
+ search_replace_tasks = eval("".join(f.readlines()))
+ for task in search_replace_tasks:
+ if len(task) == 2:
+ task.append(global_file_globs)
+ f.close()
+
+ for (original, replacement, file_globs) in search_replace_tasks:
+ print 'File globs: %s' % file_globs
+ print 'Original: %s' % original
+ print 'Replacement: %s' % replacement
+ MultiFileFindReplace(original, replacement, file_globs)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/git/move_source_file.bat b/tools/git/move_source_file.bat
new file mode 100755
index 0000000..bc3d797
--- /dev/null
+++ b/tools/git/move_source_file.bat
@@ -0,0 +1,6 @@
+@echo off
+setlocal
+:: This is required with cygwin only.
+PATH=%~dp0;%PATH%
+set PYTHONDONTWRITEBYTECODE=1
+call python "%~dp0move_source_file.py" %*
diff --git a/tools/git/move_source_file.py b/tools/git/move_source_file.py
new file mode 100755
index 0000000..a60c7e1
--- /dev/null
+++ b/tools/git/move_source_file.py
@@ -0,0 +1,194 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Moves C++ files to a new location, updating any include paths that point
+to them, and re-ordering headers as needed. If multiple source files are
+specified, the destination must be a directory. Updates include guards in
+moved header files. Assumes Chromium coding style.
+
+Attempts to update paths used in .gyp(i) files, but does not reorder
+or restructure .gyp(i) files in any way.
+
+Updates full-path references to files in // comments in source files.
+
+Must run in a git checkout, as it relies on git grep for a fast way to
+find files that reference the moved file.
+"""
+
+
+import optparse
+import os
+import re
+import subprocess
+import sys
+
+import mffr
+
+if __name__ == '__main__':
+ # Need to add the directory containing sort-headers.py to the Python
+ # classpath.
+ sys.path.append(os.path.abspath(os.path.join(sys.path[0], '..')))
+sort_headers = __import__('sort-headers')
+
+
+HANDLED_EXTENSIONS = ['.cc', '.mm', '.h', '.hh']
+
+
+def IsHandledFile(path):
+ return os.path.splitext(path)[1] in HANDLED_EXTENSIONS
+
+
+def MakeDestinationPath(from_path, to_path):
+ """Given the from and to paths, return a correct destination path.
+
+ The initial destination path may either a full path or a directory.
+ Also does basic sanity checks.
+ """
+ if not IsHandledFile(from_path):
+ raise Exception('Only intended to move individual source files '
+ '(%s does not have a recognized extension).' %
+ from_path)
+ if os.path.isdir(to_path):
+ to_path = os.path.join(to_path, os.path.basename(from_path))
+ else:
+ dest_extension = os.path.splitext(to_path)[1]
+ if dest_extension not in HANDLED_EXTENSIONS:
+ raise Exception('Destination must be either a full path with '
+ 'a recognized extension or a directory.')
+ return to_path
+
+
+def MoveFile(from_path, to_path):
+ """Performs a git mv command to move a file from |from_path| to |to_path|.
+ """
+ if not os.system('git mv %s %s' % (from_path, to_path)) == 0:
+ raise Exception('Fatal: Failed to run git mv command.')
+
+
+def UpdatePostMove(from_path, to_path):
+ """Given a file that has moved from |from_path| to |to_path|,
+ updates the moved file's include guard to match the new path and
+ updates all references to the file in other source files. Also tries
+ to update references in .gyp(i) files using a heuristic.
+ """
+ # Include paths always use forward slashes.
+ from_path = from_path.replace('\\', '/')
+ to_path = to_path.replace('\\', '/')
+
+ if os.path.splitext(from_path)[1] in ['.h', '.hh']:
+ UpdateIncludeGuard(from_path, to_path)
+
+ # Update include/import references.
+ files_with_changed_includes = mffr.MultiFileFindReplace(
+ r'(#(include|import)\s*["<])%s([>"])' % re.escape(from_path),
+ r'\1%s\3' % to_path,
+ ['*.cc', '*.h', '*.m', '*.mm'])
+
+ # Reorder headers in files that changed.
+ for changed_file in files_with_changed_includes:
+ def AlwaysConfirm(a, b): return True
+ sort_headers.FixFileWithConfirmFunction(changed_file, AlwaysConfirm, True)
+
+ # Update comments; only supports // comments, which are primarily
+ # used in our code.
+ #
+ # This work takes a bit of time. If this script starts feeling too
+ # slow, one good way to speed it up is to make the comment handling
+ # optional under a flag.
+ mffr.MultiFileFindReplace(
+ r'(//.*)%s' % re.escape(from_path),
+ r'\1%s' % to_path,
+ ['*.cc', '*.h', '*.m', '*.mm'])
+
+ # Update references in .gyp(i) files.
+ def PathMinusFirstComponent(path):
+ """foo/bar/baz -> bar/baz"""
+ parts = re.split(r"[/\\]", path, 1)
+ if len(parts) == 2:
+ return parts[1]
+ else:
+ return parts[0]
+ mffr.MultiFileFindReplace(
+ r'([\'"])%s([\'"])' % re.escape(PathMinusFirstComponent(from_path)),
+ r'\1%s\2' % PathMinusFirstComponent(to_path),
+ ['*.gyp*'])
+
+
+def MakeIncludeGuardName(path_from_root):
+ """Returns an include guard name given a path from root."""
+ guard = path_from_root.replace('/', '_')
+ guard = guard.replace('\\', '_')
+ guard = guard.replace('.', '_')
+ guard += '_'
+ return guard.upper()
+
+
+def UpdateIncludeGuard(old_path, new_path):
+ """Updates the include guard in a file now residing at |new_path|,
+ previously residing at |old_path|, with an up-to-date include guard.
+
+ Prints a warning if the update could not be completed successfully (e.g.,
+ because the old include guard was not formatted correctly per Chromium style).
+ """
+ old_guard = MakeIncludeGuardName(old_path)
+ new_guard = MakeIncludeGuardName(new_path)
+
+ with open(new_path) as f:
+ contents = f.read()
+
+ new_contents = contents.replace(old_guard, new_guard)
+ # The file should now have three instances of the new guard: two at the top
+ # of the file plus one at the bottom for the comment on the #endif.
+ if new_contents.count(new_guard) != 3:
+ print ('WARNING: Could not successfully update include guard; perhaps '
+ 'old guard is not per style guide? You will have to update the '
+ 'include guard manually. (%s)' % new_path)
+
+ with open(new_path, 'w') as f:
+ f.write(new_contents)
+
+def main():
+ if not os.path.isdir('.git'):
+ print 'Fatal: You must run from the root of a git checkout.'
+ return 1
+
+ parser = optparse.OptionParser(usage='%prog FROM_PATH... TO_PATH')
+ parser.add_option('--already_moved', action='store_true',
+ dest='already_moved',
+ help='Causes the script to skip moving the file.')
+ parser.add_option('--no_error_for_non_source_file', action='store_false',
+ default='True',
+ dest='error_for_non_source_file',
+ help='Causes the script to simply print a warning on '
+ 'encountering a non-source file rather than raising an '
+ 'error.')
+ opts, args = parser.parse_args()
+
+ if len(args) < 2:
+ parser.print_help()
+ return 1
+
+ from_paths = args[:len(args)-1]
+ orig_to_path = args[-1]
+
+ if len(from_paths) > 1 and not os.path.isdir(orig_to_path):
+ print 'Target %s is not a directory.' % orig_to_path
+ print
+ parser.print_help()
+ return 1
+
+ for from_path in from_paths:
+ if not opts.error_for_non_source_file and not IsHandledFile(from_path):
+ print '%s does not appear to be a source file, skipping' % (from_path)
+ continue
+ to_path = MakeDestinationPath(from_path, orig_to_path)
+ if not opts.already_moved:
+ MoveFile(from_path, to_path)
+ UpdatePostMove(from_path, to_path)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/git/post-checkout b/tools/git/post-checkout
new file mode 100755
index 0000000..452eb48
--- /dev/null
+++ b/tools/git/post-checkout
@@ -0,0 +1,22 @@
+#!/bin/bash
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+script=$(readlink $0)
+source $(dirname ${script:-$0})/git-utils.sh
+
+old_ref=$1 # Previous HEAD.
+new_ref=$2 # Current HEAD.
+branch_switch=$3 # Whether we switched branches.
+
+if [ $old_ref == $new_ref ]; then
+ if ! git diff-index --quiet HEAD $(git rev-parse --show-cdup)DEPS; then
+ warn "DEPS has local modifications; do you need to re-run gclient sync?"
+ fi
+else
+ if git diff-tree $old_ref $new_ref | grep -qs $'\tDEPS$'; then
+ warn "DEPS has changed; you probably need to re-run gclient sync."
+ fi
+fi
+
diff --git a/tools/git/post-merge b/tools/git/post-merge
new file mode 100755
index 0000000..8b774ce
--- /dev/null
+++ b/tools/git/post-merge
@@ -0,0 +1,12 @@
+#!/bin/bash
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+script=$(readlink $0)
+source $(dirname ${script:-$0})/git-utils.sh
+
+if git diff-tree ORIG_HEAD HEAD | grep -qs $'\tDEPS$'; then
+ warn "DEPS has changed; you probably need to re-run gclient sync."
+fi
+
diff --git a/tools/git/update-copyrights.sh b/tools/git/update-copyrights.sh
new file mode 100755
index 0000000..ac69bd5
--- /dev/null
+++ b/tools/git/update-copyrights.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+echo 'Updating copyrights is no longer necessary.'
+echo 'See https://groups.google.com/a/chromium.org/d/msg/chromium-dev/8p4JKV76kig/OiFYFjuZ6nAJ'
diff --git a/tools/gritsettings/resource_ids b/tools/gritsettings/resource_ids
new file mode 100644
index 0000000..c4a6337
--- /dev/null
+++ b/tools/gritsettings/resource_ids
@@ -0,0 +1,236 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This file is used to assign starting resource ids for resources and strings
+# used by Chromium. This is done to ensure that resource ids are unique
+# across all the grd files. If you are adding a new grd file, please add
+# a new entry to this file.
+#
+# The first entry in the file, SRCDIR, is special: It is a relative path from
+# this file to the base of your checkout.
+#
+# The range of ID values, which is used by pak files, is from 0 to 2^16 - 1.
+{
+ "SRCDIR": "../..",
+
+ "chrome/browser/browser_resources.grd": {
+ "includes": [400],
+ "structures": [750],
+ },
+ "chrome/browser/resources/component_extension_resources.grd": {
+ "includes": [1000],
+ "structures": [1450],
+ },
+ "chrome/browser/resources/net_internals_resources.grd": {
+ "includes": [1500],
+ },
+ "ui/webui/resources/webui_resources.grd": {
+ "includes": [2000],
+ "structures": [2200],
+ },
+ "chrome/common/common_resources.grd": {
+ "includes": [2500],
+ },
+ "chrome/renderer/resources/renderer_resources.grd": {
+ "includes": [3500],
+ "structures": [3700],
+ },
+ "net/base/net_resources.grd": {
+ "includes": [4000],
+ },
+ "ui/resources/ui_unscaled_resources.grd": {
+ "includes": [4500],
+ },
+ "content/app/resources/content_resources.grd": {
+ "structures": [4700],
+ },
+ "ui/resources/ui_resources.grd": {
+ "structures": [5500],
+ },
+ "ash/resources/ash_resources.grd": {
+ "includes": [6100],
+ "structures": [6150],
+ },
+ "athena/resources/athena_resources.grd": {
+ "structures": [6400],
+ },
+ "athena/strings/athena_strings.grd": {
+ "messages": [6500],
+ },
+ "chrome/app/theme/theme_resources.grd": {
+ "structures": [7000],
+ },
+ "chrome/app/theme/chrome_unscaled_resources.grd": {
+ "includes": [8000],
+ },
+ "ui/strings/app_locale_settings.grd": {
+ "messages": [9000],
+ },
+ "chrome/app/resources/locale_settings.grd": {
+ "includes": [9500],
+ "messages": [10000],
+ },
+ # These each start with the same resource id because we only use one
+ # file for each build (chromiumos, google_chromeos, linux, mac, or win).
+ "chrome/app/resources/locale_settings_chromiumos.grd": {
+ "messages": [10500],
+ },
+ "chrome/app/resources/locale_settings_google_chromeos.grd": {
+ "messages": [10500],
+ },
+ "chrome/app/resources/locale_settings_linux.grd": {
+ "messages": [10500],
+ },
+ "chrome/app/resources/locale_settings_mac.grd": {
+ "messages": [10500],
+ },
+ "chrome/app/resources/locale_settings_win.grd": {
+ "messages": [10500],
+ },
+ "ui/strings/ui_strings.grd": {
+ "messages": [11000],
+ },
+ # Chromium strings and Google Chrome strings must start at the same id.
+ # We only use one file depending on whether we're building Chromium or
+ # Google Chrome.
+ "chrome/app/chromium_strings.grd": {
+ "messages": [11500],
+ },
+ "chrome/app/google_chrome_strings.grd": {
+ "messages": [11500],
+ },
+ # Leave lots of space for generated_resources since it has most of our
+ # strings.
+ "chrome/app/generated_resources.grd": {
+ "structures": [12000],
+ "messages": [12500],
+ },
+ "content/app/strings/content_strings.grd": {
+ "messages": [18500],
+ },
+ "components/policy/resources/policy_templates.grd": {
+ "structures": [20500],
+ "messages": [20510],
+ },
+ "chrome/browser/resources/sync_internals_resources.grd": {
+ "includes": [21500],
+ },
+ "chrome/browser/resources/signin_internals_resources.grd": {
+ "includes": [21750],
+ },
+ "chrome/browser/resources/invalidations_resources.grd": {
+ "includes": [21950],
+ },
+ # This file is generated during the build.
+ "<(SHARED_INTERMEDIATE_DIR)/devtools/devtools_resources.grd": {
+ "includes": [22000],
+ },
+ "devtools_resources.grd": {
+ "includes": [22000],
+ },
+ "chrome/browser/resources/options_resources.grd": {
+ "includes": [23000],
+ "structures": [23200],
+ },
+ "cloud_print/virtual_driver/win/install/virtual_driver_setup_resources.grd": {
+ "messages": [23500],
+ "includes": [23550],
+ },
+ "cloud_print/service/win/service_resources.grd": {
+ "messages": [23600],
+ "includes": [23700],
+ "structures": [23750],
+ },
+ "cloud_print/gcp20/prototype/gcp20_device.grd": {
+ "messages": [23800],
+ "includes": [23830],
+ "structures": [23860],
+ },
+ "chrome/browser/resources/quota_internals_resources.grd": {
+ "includes": [24000],
+ },
+ "content/content_resources.grd": {
+ "includes": [25000],
+ },
+ "content/shell/shell_resources.grd": {
+ "includes": [25500],
+ },
+ # This file is generated during the build.
+ "<(SHARED_INTERMEDIATE_DIR)/content/browser/tracing/tracing_resources.grd": {
+ "includes": [25750],
+ },
+ "ash/ash_strings.grd": {
+ "messages": [26000],
+ },
+ "ui/chromeos/resources/ui_chromeos_resources.grd": {
+ "structures": [26200],
+ },
+ "ui/chromeos/ui_chromeos_strings.grd": {
+ "messages": [26300],
+ },
+ "chrome/common/extensions_api_resources.grd": {
+ "includes": [26400],
+ },
+ "extensions/extensions_resources.grd": {
+ "includes": [26600],
+ },
+ "extensions/browser/resources/extensions_browser_resources.grd": {
+ "structures": [26800],
+ },
+ "extensions/renderer/resources/extensions_renderer_resources.grd": {
+ "includes": [26850],
+ "structures": [26950],
+ },
+ "extensions/extensions_strings.grd": {
+ "messages": [27000],
+ },
+ "chrome/browser/resources/memory_internals_resources.grd": {
+ "includes": [27500],
+ },
+ "chrome/browser/resources/password_manager_internals_resources.grd": {
+ "includes": [27800],
+ },
+ "device/bluetooth/bluetooth_strings.grd": {
+ "messages": [28000],
+ },
+ "ui/keyboard/keyboard_resources.grd": {
+ "includes": [28050],
+ },
+ "ui/file_manager/file_manager_resources.grd": {
+ "includes": [28100],
+ },
+ "ui/login/login_resources.grd": {
+ "includes": [28300],
+ },
+ "chrome/browser/resources/translate_internals_resources.grd": {
+ "includes": [28500],
+ },
+ "chrome/browser/resources/sync_file_system_internals_resources.grd": {
+ "includes": [29000],
+ },
+ "chrome/app/address_input_strings.grd": {
+ "messages": [29100],
+ },
+ "components/components_strings.grd": {
+ "messages": [30000],
+ },
+ "components/resources/components_resources.grd": {
+ "includes": [30250],
+ },
+ "components/resources/components_scaled_resources.grd": {
+ "structures": [30350],
+ },
+ "third_party/WebKit/public/blink_resources.grd": {
+ "includes": [30500],
+ },
+
+ # These files don't need to reserve resource ids, but are listed here so that
+ # translation scripts know of their existence.
+ "android_webview/java/strings/android_webview_strings.grd": {},
+ "chrome/android/java/strings/android_chrome_strings.grd": {},
+ "content/public/android/java/strings/android_content_strings.grd": {},
+ "ui/android/java/strings/android_ui_strings.grd": {},
+
+ # Resource ids starting at 31000 are reserved for projects built on Chromium.
+}
diff --git a/tools/idl_parser/PRESUBMIT.py b/tools/idl_parser/PRESUBMIT.py
new file mode 100644
index 0000000..ff657aa
--- /dev/null
+++ b/tools/idl_parser/PRESUBMIT.py
@@ -0,0 +1,15 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+WHITELIST = [ r'^.+_test\.py$' ]
+
+def CheckChangeOnUpload(input_api, output_api):
+ return input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api, '.', whitelist=WHITELIST)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api, '.', whitelist=WHITELIST)
diff --git a/tools/idl_parser/__init__.py b/tools/idl_parser/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/idl_parser/__init__.py
diff --git a/tools/idl_parser/idl_lexer.py b/tools/idl_parser/idl_lexer.py
new file mode 100755
index 0000000..9c320d6
--- /dev/null
+++ b/tools/idl_parser/idl_lexer.py
@@ -0,0 +1,288 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Lexer for PPAPI IDL
+
+The lexer uses the PLY library to build a tokenizer which understands both
+WebIDL and Pepper tokens.
+
+WebIDL, and WebIDL regular expressions can be found at:
+ http://www.w3.org/TR/2012/CR-WebIDL-20120419/
+PLY can be found at:
+ http://www.dabeaz.com/ply/
+"""
+
+import os.path
+import sys
+
+#
+# Try to load the ply module, if not, then assume it is in the third_party
+# directory.
+#
+try:
+ # Disable lint check which fails to find the ply module.
+ # pylint: disable=F0401
+ from ply import lex
+except ImportError:
+ module_path, module_name = os.path.split(__file__)
+ third_party = os.path.join(module_path, '..', '..', 'third_party')
+ sys.path.append(third_party)
+ # pylint: disable=F0401
+ from ply import lex
+
+#
+# IDL Lexer
+#
+class IDLLexer(object):
+ # 'literals' is a value expected by lex which specifies a list of valid
+ # literal tokens, meaning the token type and token value are identical.
+ literals = r'"*.(){}[],;:=+-/~|&^?<>'
+
+ # 't_ignore' contains ignored characters (spaces and tabs)
+ t_ignore = ' \t'
+
+ # 'tokens' is a value required by lex which specifies the complete list
+ # of valid token types.
+ tokens = [
+ # Data types
+ 'float',
+ 'integer',
+ 'string',
+
+ # Symbol and keywords types
+ 'COMMENT',
+ 'identifier',
+
+ # MultiChar operators
+ 'ELLIPSIS',
+ ]
+
+ # 'keywords' is a map of string to token type. All tokens matching
+ # KEYWORD_OR_SYMBOL are matched against keywords dictionary, to determine
+ # if the token is actually a keyword.
+ keywords = {
+ 'any' : 'ANY',
+ 'attribute' : 'ATTRIBUTE',
+ 'boolean' : 'BOOLEAN',
+ 'byte' : 'BYTE',
+ 'ByteString' : 'BYTESTRING',
+ 'callback' : 'CALLBACK',
+ 'const' : 'CONST',
+ 'creator' : 'CREATOR',
+ 'Date' : 'DATE',
+ 'deleter' : 'DELETER',
+ 'dictionary' : 'DICTIONARY',
+ 'DOMString' : 'DOMSTRING',
+ 'double' : 'DOUBLE',
+ 'enum' : 'ENUM',
+ 'false' : 'FALSE',
+ 'float' : 'FLOAT',
+ 'exception' : 'EXCEPTION',
+ 'getter': 'GETTER',
+ 'implements' : 'IMPLEMENTS',
+ 'Infinity' : 'INFINITY',
+ 'inherit' : 'INHERIT',
+ 'interface' : 'INTERFACE',
+ 'legacycaller' : 'LEGACYCALLER',
+ 'long' : 'LONG',
+ 'Nan' : 'NAN',
+ 'null' : 'NULL',
+ 'object' : 'OBJECT',
+ 'octet' : 'OCTET',
+ 'optional' : 'OPTIONAL',
+ 'or' : 'OR',
+ 'partial' : 'PARTIAL',
+ 'readonly' : 'READONLY',
+ 'RegExp' : 'REGEXP',
+ 'sequence' : 'SEQUENCE',
+ 'serializer' : 'SERIALIZER',
+ 'setter': 'SETTER',
+ 'short' : 'SHORT',
+ 'static' : 'STATIC',
+ 'stringifier' : 'STRINGIFIER',
+ 'typedef' : 'TYPEDEF',
+ 'true' : 'TRUE',
+ 'unsigned' : 'UNSIGNED',
+ 'unrestricted' : 'UNRESTRICTED',
+ 'void' : 'VOID'
+ }
+
+ # Token definitions
+ #
+ # Lex assumes any value or function in the form of 't_<TYPE>' represents a
+ # regular expression where a match will emit a token of type <TYPE>. In the
+ # case of a function, the function is called when a match is made. These
+ # definitions come from WebIDL.
+ #
+ # These need to be methods for lexer construction, despite not using self.
+ # pylint: disable=R0201
+ def t_ELLIPSIS(self, t):
+ r'\.\.\.'
+ return t
+
+ # Regex needs to be in the docstring
+ # pylint: disable=C0301
+ def t_float(self, t):
+ r'-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][+-]?[0-9]+)?|[0-9]+[Ee][+-]?[0-9]+)'
+ return t
+
+ def t_integer(self, t):
+ r'-?([1-9][0-9]*|0[Xx][0-9A-Fa-f]+|0[0-7]*)'
+ return t
+
+
+ # A line ending '\n', we use this to increment the line number
+ def t_LINE_END(self, t):
+ r'\n+'
+ self.AddLines(len(t.value))
+
+ # We do not process escapes in the IDL strings. Strings are exclusively
+ # used for attributes and enums, and not used as typical 'C' constants.
+ def t_string(self, t):
+ r'"[^"]*"'
+ t.value = t.value[1:-1]
+ self.AddLines(t.value.count('\n'))
+ return t
+
+ # A C or C++ style comment: /* xxx */ or //
+ def t_COMMENT(self, t):
+ r'(/\*(.|\n)*?\*/)|(//.*(\n[ \t]*//.*)*)'
+ self.AddLines(t.value.count('\n'))
+ return t
+
+ # A symbol or keyword.
+ def t_KEYWORD_OR_SYMBOL(self, t):
+ r'_?[A-Za-z][A-Za-z_0-9]*'
+
+ # All non-keywords are assumed to be symbols
+ t.type = self.keywords.get(t.value, 'identifier')
+
+ # We strip leading underscores so that you can specify symbols with the same
+ # value as a keywords (E.g. a dictionary named 'interface').
+ if t.value[0] == '_':
+ t.value = t.value[1:]
+ return t
+
+ def t_ANY_error(self, t):
+ msg = 'Unrecognized input'
+ line = self.Lexer().lineno
+
+ # If that line has not been accounted for, then we must have hit
+ # EoF, so compute the beginning of the line that caused the problem.
+ if line >= len(self.index):
+ # Find the offset in the line of the first word causing the issue
+ word = t.value.split()[0]
+ offs = self.lines[line - 1].find(word)
+ # Add the computed line's starting position
+ self.index.append(self.Lexer().lexpos - offs)
+ msg = 'Unexpected EoF reached after'
+
+ pos = self.Lexer().lexpos - self.index[line]
+ out = self.ErrorMessage(line, pos, msg)
+ sys.stderr.write(out + '\n')
+ self._lex_errors += 1
+
+
+ def AddLines(self, count):
+ # Set the lexer position for the beginning of the next line. In the case
+ # of multiple lines, tokens can not exist on any of the lines except the
+ # last one, so the recorded value for previous lines are unused. We still
+ # fill the array however, to make sure the line count is correct.
+ self.Lexer().lineno += count
+ for _ in range(count):
+ self.index.append(self.Lexer().lexpos)
+
+ def FileLineMsg(self, line, msg):
+ # Generate a message containing the file and line number of a token.
+ filename = self.Lexer().filename
+ if filename:
+ return "%s(%d) : %s" % (filename, line + 1, msg)
+ return "<BuiltIn> : %s" % msg
+
+ def SourceLine(self, line, pos):
+ # Create a source line marker
+ caret = ' ' * pos + '^'
+ # We decrement the line number since the array is 0 based while the
+ # line numbers are 1 based.
+ return "%s\n%s" % (self.lines[line - 1], caret)
+
+ def ErrorMessage(self, line, pos, msg):
+ return "\n%s\n%s" % (
+ self.FileLineMsg(line, msg),
+ self.SourceLine(line, pos))
+
+#
+# Tokenizer
+#
+# The token function returns the next token provided by IDLLexer for matching
+# against the leaf paterns.
+#
+ def token(self):
+ tok = self.Lexer().token()
+ if tok:
+ self.last = tok
+ return tok
+
+
+ def GetTokens(self):
+ outlist = []
+ while True:
+ t = self.Lexer().token()
+ if not t:
+ break
+ outlist.append(t)
+ return outlist
+
+ def Tokenize(self, data, filename='__no_file__'):
+ lexer = self.Lexer()
+ lexer.lineno = 1
+ lexer.filename = filename
+ lexer.input(data)
+ self.lines = data.split('\n')
+
+ def KnownTokens(self):
+ return self.tokens
+
+ def Lexer(self):
+ if not self._lexobj:
+ self._lexobj = lex.lex(object=self, lextab=None, optimize=0)
+ return self._lexobj
+
+ def _AddToken(self, token):
+ if token in self.tokens:
+ raise RuntimeError('Same token: ' + token)
+ self.tokens.append(token)
+
+ def _AddTokens(self, tokens):
+ for token in tokens:
+ self._AddToken(token)
+
+ def _AddKeywords(self, keywords):
+ for key in keywords:
+ value = key.upper()
+ self._AddToken(value)
+ self.keywords[key] = value
+
+ def _DelKeywords(self, keywords):
+ for key in keywords:
+ self.tokens.remove(key.upper())
+ del self.keywords[key]
+
+ def __init__(self):
+ self.index = [0]
+ self._lex_errors = 0
+ self.linex = []
+ self.filename = None
+ self.keywords = {}
+ self.tokens = []
+ self._AddTokens(IDLLexer.tokens)
+ self._AddKeywords(IDLLexer.keywords)
+ self._lexobj = None
+ self.last = None
+ self.lines = None
+
+# If run by itself, attempt to build the lexer
+if __name__ == '__main__':
+ lexer_object = IDLLexer()
diff --git a/tools/idl_parser/idl_lexer_test.py b/tools/idl_parser/idl_lexer_test.py
new file mode 100755
index 0000000..8b20da8
--- /dev/null
+++ b/tools/idl_parser/idl_lexer_test.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from idl_lexer import IDLLexer
+from idl_ppapi_lexer import IDLPPAPILexer
+
+#
+# FileToTokens
+#
+# From a source file generate a list of tokens.
+#
+def FileToTokens(lexer, filename):
+ with open(filename, 'rb') as srcfile:
+ lexer.Tokenize(srcfile.read(), filename)
+ return lexer.GetTokens()
+
+
+#
+# TextToTokens
+#
+# From a source file generate a list of tokens.
+#
+def TextToTokens(lexer, text):
+ lexer.Tokenize(text)
+ return lexer.GetTokens()
+
+
+class WebIDLLexer(unittest.TestCase):
+ def setUp(self):
+ self.lexer = IDLLexer()
+ self.filenames = [
+ 'test_lexer/values.in',
+ 'test_lexer/keywords.in'
+ ]
+
+ #
+ # testRebuildText
+ #
+ # From a set of tokens, generate a new source text by joining with a
+ # single space. The new source is then tokenized and compared against the
+ # old set.
+ #
+ def testRebuildText(self):
+ for filename in self.filenames:
+ tokens1 = FileToTokens(self.lexer, filename)
+ to_text = '\n'.join(['%s' % t.value for t in tokens1])
+ tokens2 = TextToTokens(self.lexer, to_text)
+
+ count1 = len(tokens1)
+ count2 = len(tokens2)
+ self.assertEqual(count1, count2)
+
+ for i in range(count1):
+ msg = 'Value %s does not match original %s on line %d of %s.' % (
+ tokens2[i].value, tokens1[i].value, tokens1[i].lineno, filename)
+ self.assertEqual(tokens1[i].value, tokens2[i].value, msg)
+
+ #
+ # testExpectedType
+ #
+ # From a set of tokens pairs, verify the type field of the second matches
+ # the value of the first, so that:
+ # integer 123 float 1.1 ...
+ # will generate a passing test, when the first token has both the type and
+ # value of the keyword integer and the second has the type of integer and
+ # value of 123 and so on.
+ #
+ def testExpectedType(self):
+ for filename in self.filenames:
+ tokens = FileToTokens(self.lexer, filename)
+ count = len(tokens)
+ self.assertTrue(count > 0)
+ self.assertFalse(count & 1)
+
+ index = 0
+ while index < count:
+ expect_type = tokens[index].value
+ actual_type = tokens[index + 1].type
+ msg = 'Type %s does not match expected %s on line %d of %s.' % (
+ actual_type, expect_type, tokens[index].lineno, filename)
+ index += 2
+ self.assertEqual(expect_type, actual_type, msg)
+
+
+class PepperIDLLexer(WebIDLLexer):
+ def setUp(self):
+ self.lexer = IDLPPAPILexer()
+ self.filenames = [
+ 'test_lexer/values_ppapi.in',
+ 'test_lexer/keywords_ppapi.in'
+ ]
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/idl_parser/idl_node.py b/tools/idl_parser/idl_node.py
new file mode 100755
index 0000000..e50fc4e
--- /dev/null
+++ b/tools/idl_parser/idl_node.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+#
+# IDL Node
+#
+# IDL Node defines the IDLAttribute and IDLNode objects which are constructed
+# by the parser as it processes the various 'productions'. The IDLAttribute
+# objects are assigned to the IDLNode's property dictionary instead of being
+# applied as children of The IDLNodes, so they do not exist in the final tree.
+# The AST of IDLNodes is the output from the parsing state and will be used
+# as the source data by the various generators.
+#
+
+
+#
+# CopyToList
+#
+# Takes an input item, list, or None, and returns a new list of that set.
+def CopyToList(item):
+ # If the item is 'Empty' make it an empty list
+ if not item:
+ item = []
+
+ # If the item is not a list
+ if type(item) is not type([]):
+ item = [item]
+
+ # Make a copy we can modify
+ return list(item)
+
+
+# IDLSearch
+#
+# A temporary object used by the parsing process to hold an Extended Attribute
+# which will be passed as a child to a standard IDLNode.
+#
+class IDLSearch(object):
+ def __init__(self):
+ self.depth = 0
+
+ def Enter(self, node):
+ pass
+
+ def Exit(self, node):
+ pass
+
+
+# IDLAttribute
+#
+# A temporary object used by the parsing process to hold an Extended Attribute
+# which will be passed as a child to a standard IDLNode.
+#
+class IDLAttribute(object):
+ def __init__(self, name, value):
+ self._cls = 'Property'
+ self.name = name
+ self.value = value
+
+ def __str__(self):
+ return '%s=%s' % (self.name, self.value)
+
+ def GetClass(self):
+ return self._cls
+
+#
+# IDLNode
+#
+# This class implements the AST tree, providing the associations between
+# parents and children. It also contains a namepsace and propertynode to
+# allow for look-ups. IDLNode is derived from IDLRelease, so it is
+# version aware.
+#
+class IDLNode(object):
+ def __init__(self, cls, filename, lineno, pos, children=None):
+ self._cls = cls
+ self._properties = {
+ 'ERRORS' : [],
+ 'WARNINGS': [],
+ 'FILENAME': filename,
+ 'LINENO' : lineno,
+ 'POSSITION' : pos,
+ }
+
+ self._children = []
+ self._parent = None
+ self.AddChildren(children)
+
+#
+#
+#
+ # Return a string representation of this node
+ def __str__(self):
+ name = self.GetProperty('NAME','')
+ return '%s(%s)' % (self._cls, name)
+
+ def GetLogLine(self, msg):
+ filename, lineno = self.GetFileAndLine()
+ return '%s(%d) : %s\n' % (filename, lineno, msg)
+
+ # Log an error for this object
+ def Error(self, msg):
+ self.GetProperty('ERRORS').append(msg)
+ sys.stderr.write(self.GetLogLine('error: ' + msg))
+
+ # Log a warning for this object
+ def Warning(self, msg):
+ self.GetProperty('WARNINGS').append(msg)
+ sys.stdout.write(self.GetLogLine('warning:' + msg))
+
+ # Return file and line number for where node was defined
+ def GetFileAndLine(self):
+ return self.GetProperty('FILENAME'), self.GetProperty('LINENO')
+
+ def GetClass(self):
+ return self._cls
+
+ def GetName(self):
+ return self.GetProperty('NAME')
+
+ def GetParent(self):
+ return self._parent
+
+ def Traverse(self, search, filter_nodes):
+ if self._cls in filter_nodes:
+ return ''
+
+ search.Enter(self)
+ search.depth += 1
+ for child in self._children:
+ child.Traverse(search, filter_nodes)
+ search.depth -= 1
+ search.Exit(self)
+
+
+ def Tree(self, filter_nodes=None, accept_props=None):
+ class DumpTreeSearch(IDLSearch):
+ def __init__(self, props):
+ IDLSearch.__init__(self)
+ self.out = []
+ self.props = props
+
+ def Enter(self, node):
+ tab = ''.rjust(self.depth * 2)
+ self.out.append(tab + str(node))
+ if self.props:
+ proplist = []
+ for key, value in node.GetProperties().iteritems():
+ if key in self.props:
+ proplist.append(tab + ' %s: %s' % (key, str(value)))
+ if proplist:
+ self.out.append(tab + ' PROPERTIES')
+ self.out.extend(proplist)
+
+ if filter_nodes == None:
+ filter_nodes = ['Comment', 'Copyright']
+
+ search = DumpTreeSearch(accept_props)
+ self.Traverse(search, filter_nodes)
+ return search.out
+
+#
+# Search related functions
+#
+ # Check if node is of a given type
+ def IsA(self, *typelist):
+ if self._cls in typelist:
+ return True
+ return False
+
+ # Get a list of all children
+ def GetChildren(self):
+ return self._children
+
+ def GetListOf(self, *keys):
+ out = []
+ for child in self.GetChildren():
+ if child.GetClass() in keys:
+ out.append(child)
+ return out
+
+ def GetOneOf(self, *keys):
+ out = self.GetListOf(*keys)
+ if out:
+ return out[0]
+ return None
+
+ def AddChildren(self, children):
+ children = CopyToList(children)
+ for child in children:
+ if not child:
+ continue
+ if type(child) == IDLAttribute:
+ self.SetProperty(child.name, child.value)
+ continue
+ if type(child) == IDLNode:
+ child._parent = self
+ self._children.append(child)
+ continue
+ raise RuntimeError('Adding child of type %s.\n' % type(child).__name__)
+
+
+#
+# Property Functions
+#
+ def SetProperty(self, name, val):
+ self._properties[name] = val
+
+ def GetProperty(self, name, default=None):
+ return self._properties.get(name, default)
+
+ def GetProperties(self):
+ return self._properties
diff --git a/tools/idl_parser/idl_parser.py b/tools/idl_parser/idl_parser.py
new file mode 100755
index 0000000..a19785b
--- /dev/null
+++ b/tools/idl_parser/idl_parser.py
@@ -0,0 +1,1122 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Parser for PPAPI IDL """
+
+#
+# IDL Parser
+#
+# The parser is uses the PLY yacc library to build a set of parsing rules based
+# on WebIDL.
+#
+# WebIDL, and WebIDL grammar can be found at:
+# http://heycam.github.io/webidl/
+# PLY can be found at:
+# http://www.dabeaz.com/ply/
+#
+# The parser generates a tree by recursively matching sets of items against
+# defined patterns. When a match is made, that set of items is reduced
+# to a new item. The new item can provide a match for parent patterns.
+# In this way an AST is built (reduced) depth first.
+#
+
+#
+# Disable check for line length and Member as Function due to how grammar rules
+# are defined with PLY
+#
+# pylint: disable=R0201
+# pylint: disable=C0301
+
+import os.path
+import sys
+import time
+
+from idl_lexer import IDLLexer
+from idl_node import IDLAttribute, IDLNode
+
+#
+# Try to load the ply module, if not, then assume it is in the third_party
+# directory.
+#
+try:
+ # Disable lint check which fails to find the ply module.
+ # pylint: disable=F0401
+ from ply import lex
+ from ply import yacc
+except ImportError:
+ module_path, module_name = os.path.split(__file__)
+ third_party = os.path.join(module_path, os.par, os.par, 'third_party')
+ sys.path.append(third_party)
+ # pylint: disable=F0401
+ from ply import lex
+ from ply import yacc
+
+#
+# ERROR_REMAP
+#
+# Maps the standard error formula into a more friendly error message.
+#
+ERROR_REMAP = {
+ 'Unexpected ")" after "(".' : 'Empty argument list.',
+ 'Unexpected ")" after ",".' : 'Missing argument.',
+ 'Unexpected "}" after ",".' : 'Trailing comma in block.',
+ 'Unexpected "}" after "{".' : 'Unexpected empty block.',
+ 'Unexpected comment after "}".' : 'Unexpected trailing comment.',
+ 'Unexpected "{" after keyword "enum".' : 'Enum missing name.',
+ 'Unexpected "{" after keyword "struct".' : 'Struct missing name.',
+ 'Unexpected "{" after keyword "interface".' : 'Interface missing name.',
+}
+
+
+def Boolean(val):
+ """Convert to strict boolean type."""
+ if val:
+ return True
+ return False
+
+
+def ListFromConcat(*items):
+ """Generate list by concatenating inputs"""
+ itemsout = []
+ for item in items:
+ if item is None:
+ continue
+ if type(item) is not type([]):
+ itemsout.append(item)
+ else:
+ itemsout.extend(item)
+
+ return itemsout
+
+def ExpandProduction(p):
+ if type(p) == list:
+ return '[' + ', '.join([ExpandProduction(x) for x in p]) + ']'
+ if type(p) == IDLNode:
+ return 'Node:' + str(p)
+ if type(p) == IDLAttribute:
+ return 'Attr:' + str(p)
+ if type(p) == str:
+ return 'str:' + p
+ return '%s:%s' % (p.__class__.__name__, str(p))
+
+# TokenTypeName
+#
+# Generate a string which has the type and value of the token.
+#
+def TokenTypeName(t):
+ if t.type == 'SYMBOL':
+ return 'symbol %s' % t.value
+ if t.type in ['HEX', 'INT', 'OCT', 'FLOAT']:
+ return 'value %s' % t.value
+ if t.type == 'string' :
+ return 'string "%s"' % t.value
+ if t.type == 'COMMENT' :
+ return 'comment'
+ if t.type == t.value:
+ return '"%s"' % t.value
+ if t.type == ',':
+ return 'Comma'
+ if t.type == 'identifier':
+ return 'identifier "%s"' % t.value
+ return 'keyword "%s"' % t.value
+
+
+#
+# IDL Parser
+#
+# The Parser inherits the from the Lexer to provide PLY with the tokenizing
+# definitions. Parsing patterns are encoded as functions where p_<name> is
+# is called any time a patern matching the function documentation is found.
+# Paterns are expressed in the form of:
+# """ <new item> : <item> ....
+# | <item> ...."""
+#
+# Where new item is the result of a match against one or more sets of items
+# separated by the "|".
+#
+# The function is called with an object 'p' where p[0] is the output object
+# and p[n] is the set of inputs for positive values of 'n'. Len(p) can be
+# used to distinguish between multiple item sets in the pattern.
+#
+# For more details on parsing refer to the PLY documentation at
+# http://www.dabeaz.com/ply/
+#
+# The parser is based on the WebIDL standard. See:
+# http://heycam.github.io/webidl/#idl-grammar
+#
+# The various productions are annotated so that the WHOLE number greater than
+# zero in the comment denotes the matching WebIDL grammar definition.
+#
+# Productions with a fractional component in the comment denote additions to
+# the WebIDL spec, such as comments.
+#
+
+
+class IDLParser(object):
+#
+# We force all input files to start with two comments. The first comment is a
+# Copyright notice followed by a file comment and finally by file level
+# productions.
+#
+ # [0] Insert a TOP definition for Copyright and Comments
+ def p_Top(self, p):
+ """Top : COMMENT COMMENT Definitions"""
+ Copyright = self.BuildComment('Copyright', p, 1)
+ Filedoc = self.BuildComment('Comment', p, 2)
+ p[0] = ListFromConcat(Copyright, Filedoc, p[3])
+
+ # [0.1] Add support for Multiple COMMENTS
+ def p_Comments(self, p):
+ """Comments : CommentsRest"""
+ if len(p) > 1:
+ p[0] = p[1]
+
+ # [0.2] Produce a COMMENT and aggregate sibling comments
+ def p_CommentsRest(self, p):
+ """CommentsRest : COMMENT CommentsRest
+ | """
+ if len(p) > 1:
+ p[0] = ListFromConcat(self.BuildComment('Comment', p, 1), p[2])
+
+
+#
+#The parser is based on the WebIDL standard. See:
+# http://heycam.github.io/webidl/#idl-grammar
+#
+ # [1]
+ def p_Definitions(self, p):
+ """Definitions : ExtendedAttributeList Definition Definitions
+ | """
+ if len(p) > 1:
+ p[2].AddChildren(p[1])
+ p[0] = ListFromConcat(p[2], p[3])
+
+ # [2]
+ def p_Definition(self, p):
+ """Definition : CallbackOrInterface
+ | Partial
+ | Dictionary
+ | Exception
+ | Enum
+ | Typedef
+ | ImplementsStatement"""
+ p[0] = p[1]
+
+ # [2.1] Error recovery for definition
+ def p_DefinitionError(self, p):
+ """Definition : error ';'"""
+ p[0] = self.BuildError(p, 'Definition')
+
+ # [3]
+ def p_CallbackOrInterface(self, p):
+ """CallbackOrInterface : CALLBACK CallbackRestOrInterface
+ | Interface"""
+ if len(p) > 2:
+ p[0] = p[2]
+ else:
+ p[0] = p[1]
+
+ # [4]
+ def p_CallbackRestOrInterface(self, p):
+ """CallbackRestOrInterface : CallbackRest
+ | Interface"""
+ p[0] = p[1]
+
+ # [5]
+ def p_Interface(self, p):
+ """Interface : INTERFACE identifier Inheritance '{' InterfaceMembers '}' ';'"""
+ p[0] = self.BuildNamed('Interface', p, 2, ListFromConcat(p[3], p[5]))
+
+ # [6]
+ def p_Partial(self, p):
+ """Partial : PARTIAL PartialDefinition"""
+ p[2].AddChildren(self.BuildTrue('Partial'))
+ p[0] = p[2]
+
+ # [6.1] Error recovery for Partial
+ def p_PartialError(self, p):
+ """Partial : PARTIAL error"""
+ p[0] = self.BuildError(p, 'Partial')
+
+ # [7]
+ def p_PartialDefinition(self, p):
+ """PartialDefinition : PartialDictionary
+ | PartialInterface"""
+ p[0] = p[1]
+
+ # [8]
+ def p_PartialInterface(self, p):
+ """PartialInterface : INTERFACE identifier '{' InterfaceMembers '}' ';'"""
+ p[0] = self.BuildNamed('Interface', p, 2, p[4])
+
+ # [9]
+ def p_InterfaceMembers(self, p):
+ """InterfaceMembers : ExtendedAttributeList InterfaceMember InterfaceMembers
+ |"""
+ if len(p) > 1:
+ p[2].AddChildren(p[1])
+ p[0] = ListFromConcat(p[2], p[3])
+
+ # [10]
+ def p_InterfaceMember(self, p):
+ """InterfaceMember : Const
+ | AttributeOrOperationOrIterator"""
+ p[0] = p[1]
+
+ # [11]
+ def p_Dictionary(self, p):
+ """Dictionary : DICTIONARY identifier Inheritance '{' DictionaryMembers '}' ';'"""
+ p[0] = self.BuildNamed('Dictionary', p, 2, ListFromConcat(p[3], p[5]))
+
+ # [11.1] Error recovery for regular Dictionary
+ def p_DictionaryError(self, p):
+ """Dictionary : DICTIONARY error ';'"""
+ p[0] = self.BuildError(p, 'Dictionary')
+
+ # [12]
+ def p_DictionaryMembers(self, p):
+ """DictionaryMembers : ExtendedAttributeList DictionaryMember DictionaryMembers
+ |"""
+ if len(p) > 1:
+ p[2].AddChildren(p[1])
+ p[0] = ListFromConcat(p[2], p[3])
+
+ # [13]
+ def p_DictionaryMember(self, p):
+ """DictionaryMember : Type identifier Default ';'"""
+ p[0] = self.BuildNamed('Key', p, 2, ListFromConcat(p[1], p[3]))
+
+ # [14]
+ def p_PartialDictionary(self, p):
+ """PartialDictionary : DICTIONARY identifier '{' DictionaryMembers '}' ';'"""
+ partial = self.BuildTrue('Partial')
+ p[0] = self.BuildNamed('Dictionary', p, 2, ListFromConcat(p[4], partial))
+
+ # [14.1] Error recovery for Partial Dictionary
+ def p_PartialDictionaryError(self, p):
+ """PartialDictionary : DICTIONARY error ';'"""
+ p[0] = self.BuildError(p, 'PartialDictionary')
+
+ # [15]
+ def p_Default(self, p):
+ """Default : '=' DefaultValue
+ |"""
+ if len(p) > 1:
+ p[0] = self.BuildProduction('Default', p, 2, p[2])
+
+ # [16]
+ def p_DefaultValue(self, p):
+ """DefaultValue : ConstValue
+ | string"""
+ if type(p[1]) == str:
+ p[0] = ListFromConcat(self.BuildAttribute('TYPE', 'DOMString'),
+ self.BuildAttribute('NAME', p[1]))
+ else:
+ p[0] = p[1]
+
+ # [17]
+ def p_Exception(self, p):
+ """Exception : EXCEPTION identifier Inheritance '{' ExceptionMembers '}' ';'"""
+ p[0] = self.BuildNamed('Exception', p, 2, ListFromConcat(p[3], p[5]))
+
+ # [18]
+ def p_ExceptionMembers(self, p):
+ """ExceptionMembers : ExtendedAttributeList ExceptionMember ExceptionMembers
+ |"""
+ if len(p) > 1:
+ p[2].AddChildren(p[1])
+ p[0] = ListFromConcat(p[2], p[3])
+
+ # [18.1] Error recovery for ExceptionMembers
+ def p_ExceptionMembersError(self, p):
+ """ExceptionMembers : error"""
+ p[0] = self.BuildError(p, 'ExceptionMembers')
+
+ # [19]
+ def p_Inheritance(self, p):
+ """Inheritance : ':' identifier
+ |"""
+ if len(p) > 1:
+ p[0] = self.BuildNamed('Inherit', p, 2)
+
+ # [20]
+ def p_Enum(self, p):
+ """Enum : ENUM identifier '{' EnumValueList '}' ';'"""
+ p[0] = self.BuildNamed('Enum', p, 2, p[4])
+
+ # [20.1] Error recovery for Enums
+ def p_EnumError(self, p):
+ """Enum : ENUM error ';'"""
+ p[0] = self.BuildError(p, 'Enum')
+
+ # [21]
+ def p_EnumValueList(self, p):
+ """EnumValueList : ExtendedAttributeList string EnumValueListComma"""
+ enum = self.BuildNamed('EnumItem', p, 2, p[1])
+ p[0] = ListFromConcat(enum, p[3])
+
+ # [22]
+ def p_EnumValueListComma(self, p):
+ """EnumValueListComma : ',' EnumValueListString
+ |"""
+ if len(p) > 1:
+ p[0] = p[2]
+
+ # [23]
+ def p_EnumValueListString(self, p):
+ """EnumValueListString : ExtendedAttributeList string EnumValueListComma
+ |"""
+ if len(p) > 1:
+ enum = self.BuildNamed('EnumItem', p, 2, p[1])
+ p[0] = ListFromConcat(enum, p[3])
+
+ # [24]
+ def p_CallbackRest(self, p):
+ """CallbackRest : identifier '=' ReturnType '(' ArgumentList ')' ';'"""
+ arguments = self.BuildProduction('Arguments', p, 4, p[5])
+ p[0] = self.BuildNamed('Callback', p, 1, ListFromConcat(p[3], arguments))
+
+ # [25]
+ def p_Typedef(self, p):
+ """Typedef : TYPEDEF ExtendedAttributeListNoComments Type identifier ';'"""
+ p[0] = self.BuildNamed('Typedef', p, 4, ListFromConcat(p[2], p[3]))
+
+ # [25.1] Error recovery for Typedefs
+ def p_TypedefError(self, p):
+ """Typedef : TYPEDEF error ';'"""
+ p[0] = self.BuildError(p, 'Typedef')
+
+ # [26]
+ def p_ImplementsStatement(self, p):
+ """ImplementsStatement : identifier IMPLEMENTS identifier ';'"""
+ name = self.BuildAttribute('REFERENCE', p[3])
+ p[0] = self.BuildNamed('Implements', p, 1, name)
+
+ # [27]
+ def p_Const(self, p):
+ """Const : CONST ConstType identifier '=' ConstValue ';'"""
+ value = self.BuildProduction('Value', p, 5, p[5])
+ p[0] = self.BuildNamed('Const', p, 3, ListFromConcat(p[2], value))
+
+ # [28]
+ def p_ConstValue(self, p):
+ """ConstValue : BooleanLiteral
+ | FloatLiteral
+ | integer
+ | null"""
+ if type(p[1]) == str:
+ p[0] = ListFromConcat(self.BuildAttribute('TYPE', 'integer'),
+ self.BuildAttribute('NAME', p[1]))
+ else:
+ p[0] = p[1]
+
+ # [28.1] Add definition for NULL
+ def p_null(self, p):
+ """null : NULL"""
+ p[0] = ListFromConcat(self.BuildAttribute('TYPE', 'NULL'),
+ self.BuildAttribute('NAME', 'NULL'))
+
+ # [29]
+ def p_BooleanLiteral(self, p):
+ """BooleanLiteral : TRUE
+ | FALSE"""
+ value = self.BuildAttribute('VALUE', Boolean(p[1] == 'true'))
+ p[0] = ListFromConcat(self.BuildAttribute('TYPE', 'boolean'), value)
+
+ # [30]
+ def p_FloatLiteral(self, p):
+ """FloatLiteral : float
+ | '-' INFINITY
+ | INFINITY
+ | NAN """
+ if len(p) > 2:
+ val = '-Infinity'
+ else:
+ val = p[1]
+ p[0] = ListFromConcat(self.BuildAttribute('TYPE', 'float'),
+ self.BuildAttribute('VALUE', val))
+
+ # [31] Removed unsupported: Serializer
+ def p_AttributeOrOperationOrIterator(self, p):
+ """AttributeOrOperationOrIterator : Stringifier
+ | StaticMember
+ | Attribute
+ | OperationOrIterator"""
+ p[0] = p[1]
+
+ # [32-37] NOT IMPLEMENTED (Serializer)
+
+ # [38]
+ def p_Stringifier(self, p):
+ """Stringifier : STRINGIFIER StringifierRest"""
+ p[0] = self.BuildProduction('Stringifier', p, 1, p[2])
+
+ # [39]
+ def p_StringifierRest(self, p):
+ """StringifierRest : AttributeRest
+ | ReturnType OperationRest
+ | ';'"""
+ if len(p) == 3:
+ p[2].AddChildren(p[1])
+ p[0] = p[2]
+ elif p[1] != ';':
+ p[0] = p[1]
+
+ # [40]
+ def p_StaticMember(self, p):
+ """StaticMember : STATIC StaticMemberRest"""
+ p[2].AddChildren(self.BuildTrue('STATIC'))
+ p[0] = p[2]
+
+ # [41]
+ def p_StaticMemberRest(self, p):
+ """StaticMemberRest : AttributeRest
+ | ReturnType OperationRest"""
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[2].AddChildren(p[1])
+ p[0] = p[2]
+
+ # [42]
+ def p_Attribute(self, p):
+ """Attribute : Inherit AttributeRest"""
+ p[2].AddChildren(ListFromConcat(p[1]))
+ p[0] = p[2]
+
+ # [43]
+ def p_AttributeRest(self, p):
+ """AttributeRest : ReadOnly ATTRIBUTE Type identifier ';'"""
+ p[0] = self.BuildNamed('Attribute', p, 4,
+ ListFromConcat(p[1], p[3]))
+
+ # [44]
+ def p_Inherit(self, p):
+ """Inherit : INHERIT
+ |"""
+ if len(p) > 1:
+ p[0] = self.BuildTrue('INHERIT')
+
+ # [45]
+ def p_ReadOnly(self, p):
+ """ReadOnly : READONLY
+ |"""
+ if len(p) > 1:
+ p[0] = self.BuildTrue('READONLY')
+
+ # [46]
+ def p_OperationOrIterator(self, p):
+ """OperationOrIterator : ReturnType OperationOrIteratorRest
+ | SpecialOperation"""
+ if len(p) == 3:
+ p[2].AddChildren(p[1])
+ p[0] = p[2]
+ else:
+ p[0] = p[1]
+
+ # [47]
+ def p_SpecialOperation(self, p):
+ """SpecialOperation : Special Specials ReturnType OperationRest"""
+ p[4].AddChildren(ListFromConcat(p[1], p[2], p[3]))
+ p[0] = p[4]
+
+ # [48]
+ def p_Specials(self, p):
+ """Specials : Special Specials
+ | """
+ if len(p) > 1:
+ p[0] = ListFromConcat(p[1], p[2])
+
+ # [49]
+ def p_Special(self, p):
+ """Special : GETTER
+ | SETTER
+ | CREATOR
+ | DELETER
+ | LEGACYCALLER"""
+ p[0] = self.BuildTrue(p[1].upper())
+
+ # [50] Removed unsupported: IteratorRest
+ def p_OperationOrIteratorRest(self, p):
+ """OperationOrIteratorRest : OperationRest"""
+ p[0] = p[1]
+
+ # [51-53] NOT IMPLEMENTED (IteratorRest)
+
+ # [54]
+ def p_OperationRest(self, p):
+ """OperationRest : OptionalIdentifier '(' ArgumentList ')' ';'"""
+ arguments = self.BuildProduction('Arguments', p, 2, p[3])
+ p[0] = self.BuildNamed('Operation', p, 1, arguments)
+
+ # [55]
+ def p_OptionalIdentifier(self, p):
+ """OptionalIdentifier : identifier
+ |"""
+ if len(p) > 1:
+ p[0] = p[1]
+ else:
+ p[0] = '_unnamed_'
+
+ # [56]
+ def p_ArgumentList(self, p):
+ """ArgumentList : Argument Arguments
+ |"""
+ if len(p) > 1:
+ p[0] = ListFromConcat(p[1], p[2])
+
+ # [56.1] ArgumentList error recovery
+ def p_ArgumentListError(self, p):
+ """ArgumentList : error """
+ p[0] = self.BuildError(p, 'ArgumentList')
+
+ # [57]
+ def p_Arguments(self, p):
+ """Arguments : ',' Argument Arguments
+ |"""
+ if len(p) > 1:
+ p[0] = ListFromConcat(p[2], p[3])
+
+ # [58]
+ def p_Argument(self, p):
+ """Argument : ExtendedAttributeList OptionalOrRequiredArgument"""
+ p[2].AddChildren(p[1])
+ p[0] = p[2]
+
+ # [59]
+ def p_OptionalOrRequiredArgument(self, p):
+ """OptionalOrRequiredArgument : OPTIONAL Type ArgumentName Default
+ | Type Ellipsis ArgumentName"""
+ if len(p) > 4:
+ arg = self.BuildNamed('Argument', p, 3, ListFromConcat(p[2], p[4]))
+ arg.AddChildren(self.BuildTrue('OPTIONAL'))
+ else:
+ arg = self.BuildNamed('Argument', p, 3, ListFromConcat(p[1], p[2]))
+ p[0] = arg
+
+ # [60]
+ def p_ArgumentName(self, p):
+ """ArgumentName : ArgumentNameKeyword
+ | identifier"""
+ p[0] = p[1]
+
+ # [61]
+ def p_Ellipsis(self, p):
+ """Ellipsis : ELLIPSIS
+ |"""
+ if len(p) > 1:
+ p[0] = self.BuildNamed('Argument', p, 1)
+ p[0].AddChildren(self.BuildTrue('ELLIPSIS'))
+
+ # [62]
+ def p_ExceptionMember(self, p):
+ """ExceptionMember : Const
+ | ExceptionField"""
+ p[0] = p[1]
+
+ # [63]
+ def p_ExceptionField(self, p):
+ """ExceptionField : Type identifier ';'"""
+ p[0] = self.BuildNamed('ExceptionField', p, 2, p[1])
+
+ # [63.1] Error recovery for ExceptionMembers
+ def p_ExceptionFieldError(self, p):
+ """ExceptionField : error"""
+ p[0] = self.BuildError(p, 'ExceptionField')
+
+ # [64] No comment version for mid statement attributes.
+ def p_ExtendedAttributeListNoComments(self, p):
+ """ExtendedAttributeListNoComments : '[' ExtendedAttribute ExtendedAttributes ']'
+ | """
+ if len(p) > 2:
+ items = ListFromConcat(p[2], p[3])
+ p[0] = self.BuildProduction('ExtAttributes', p, 1, items)
+
+ # [64.1] Add optional comment field for start of statements.
+ def p_ExtendedAttributeList(self, p):
+ """ExtendedAttributeList : Comments '[' ExtendedAttribute ExtendedAttributes ']'
+ | Comments """
+ if len(p) > 2:
+ items = ListFromConcat(p[3], p[4])
+ attribs = self.BuildProduction('ExtAttributes', p, 2, items)
+ p[0] = ListFromConcat(p[1], attribs)
+ else:
+ p[0] = p[1]
+
+ # [65]
+ def p_ExtendedAttributes(self, p):
+ """ExtendedAttributes : ',' ExtendedAttribute ExtendedAttributes
+ |"""
+ if len(p) > 1:
+ p[0] = ListFromConcat(p[2], p[3])
+
+ # We only support:
+ # [ identifier ]
+ # [ identifier ( ArgumentList ) ]
+ # [ identifier = identifier ]
+ # [ identifier = ( IdentifierList ) ]
+ # [ identifier = identifier ( ArgumentList ) ]
+ # [66] map directly to [91-93, 95]
+ # [67-69, 71] are unsupported
+ def p_ExtendedAttribute(self, p):
+ """ExtendedAttribute : ExtendedAttributeNoArgs
+ | ExtendedAttributeArgList
+ | ExtendedAttributeIdent
+ | ExtendedAttributeIdentList
+ | ExtendedAttributeNamedArgList"""
+ p[0] = p[1]
+
+ # [70]
+ def p_ArgumentNameKeyword(self, p):
+ """ArgumentNameKeyword : ATTRIBUTE
+ | CALLBACK
+ | CONST
+ | CREATOR
+ | DELETER
+ | DICTIONARY
+ | ENUM
+ | EXCEPTION
+ | GETTER
+ | IMPLEMENTS
+ | INHERIT
+ | LEGACYCALLER
+ | PARTIAL
+ | SERIALIZER
+ | SETTER
+ | STATIC
+ | STRINGIFIER
+ | TYPEDEF
+ | UNRESTRICTED"""
+ p[0] = p[1]
+
+ # [72]
+ def p_Type(self, p):
+ """Type : SingleType
+ | UnionType TypeSuffix"""
+ if len(p) == 2:
+ p[0] = self.BuildProduction('Type', p, 1, p[1])
+ else:
+ p[0] = self.BuildProduction('Type', p, 1, ListFromConcat(p[1], p[2]))
+
+ # [73]
+ def p_SingleType(self, p):
+ """SingleType : NonAnyType
+ | ANY TypeSuffixStartingWithArray"""
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = ListFromConcat(self.BuildProduction('Any', p, 1), p[2])
+
+ # [74]
+ def p_UnionType(self, p):
+ """UnionType : '(' UnionMemberType OR UnionMemberType UnionMemberTypes ')'"""
+
+ # [75]
+ def p_UnionMemberType(self, p):
+ """UnionMemberType : NonAnyType
+ | UnionType TypeSuffix
+ | ANY '[' ']' TypeSuffix"""
+ # [76]
+ def p_UnionMemberTypes(self, p):
+ """UnionMemberTypes : OR UnionMemberType UnionMemberTypes
+ |"""
+
+ # [77] Moved BYTESTRING, DOMSTRING, OBJECT, DATE, REGEXP to PrimitiveType
+ # Moving all built-in types into PrimitiveType makes it easier to
+ # differentiate between them and 'identifier', since p[1] would be a string in
+ # both cases.
+ def p_NonAnyType(self, p):
+ """NonAnyType : PrimitiveType TypeSuffix
+ | identifier TypeSuffix
+ | SEQUENCE '<' Type '>' Null"""
+ if len(p) == 3:
+ if type(p[1]) == str:
+ typeref = self.BuildNamed('Typeref', p, 1)
+ else:
+ typeref = p[1]
+ p[0] = ListFromConcat(typeref, p[2])
+
+ if len(p) == 6:
+ p[0] = self.BuildProduction('Sequence', p, 1, ListFromConcat(p[3], p[5]))
+
+
+ # [78]
+ def p_ConstType(self, p):
+ """ConstType : PrimitiveType Null
+ | identifier Null"""
+ if type(p[1]) == str:
+ p[0] = self.BuildNamed('Typeref', p, 1, p[2])
+ else:
+ p[1].AddChildren(p[2])
+ p[0] = p[1]
+
+
+ # [79] Added BYTESTRING, DOMSTRING, OBJECT, DATE, REGEXP
+ def p_PrimitiveType(self, p):
+ """PrimitiveType : UnsignedIntegerType
+ | UnrestrictedFloatType
+ | BOOLEAN
+ | BYTE
+ | OCTET
+ | BYTESTRING
+ | DOMSTRING
+ | OBJECT
+ | DATE
+ | REGEXP"""
+ if type(p[1]) == str:
+ p[0] = self.BuildNamed('PrimitiveType', p, 1)
+ else:
+ p[0] = p[1]
+
+
+ # [80]
+ def p_UnrestrictedFloatType(self, p):
+ """UnrestrictedFloatType : UNRESTRICTED FloatType
+ | FloatType"""
+ if len(p) == 2:
+ typeref = self.BuildNamed('PrimitiveType', p, 1)
+ else:
+ typeref = self.BuildNamed('PrimitiveType', p, 2)
+ typeref.AddChildren(self.BuildTrue('UNRESTRICTED'))
+ p[0] = typeref
+
+
+ # [81]
+ def p_FloatType(self, p):
+ """FloatType : FLOAT
+ | DOUBLE"""
+ p[0] = p[1]
+
+ # [82]
+ def p_UnsignedIntegerType(self, p):
+ """UnsignedIntegerType : UNSIGNED IntegerType
+ | IntegerType"""
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = 'unsigned ' + p[2]
+
+ # [83]
+ def p_IntegerType(self, p):
+ """IntegerType : SHORT
+ | LONG OptionalLong"""
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = p[1] + p[2]
+
+ # [84]
+ def p_OptionalLong(self, p):
+ """OptionalLong : LONG
+ | """
+ if len(p) > 1:
+ p[0] = ' ' + p[1]
+ else:
+ p[0] = ''
+
+
+ # [85] Add support for sized array
+ def p_TypeSuffix(self, p):
+ """TypeSuffix : '[' integer ']' TypeSuffix
+ | '[' ']' TypeSuffix
+ | '?' TypeSuffixStartingWithArray
+ | """
+ if len(p) == 5:
+ p[0] = self.BuildNamed('Array', p, 2, p[4])
+
+ if len(p) == 4:
+ p[0] = self.BuildProduction('Array', p, 1, p[3])
+
+ if len(p) == 3:
+ p[0] = ListFromConcat(self.BuildTrue('NULLABLE'), p[2])
+
+
+ # [86]
+ def p_TypeSuffixStartingWithArray(self, p):
+ """TypeSuffixStartingWithArray : '[' ']' TypeSuffix
+ | """
+ if len(p) > 1:
+ p[0] = self.BuildProduction('Array', p, 0, p[3])
+
+ # [87]
+ def p_Null(self, p):
+ """Null : '?'
+ |"""
+ if len(p) > 1:
+ p[0] = self.BuildTrue('NULLABLE')
+
+ # [88]
+ def p_ReturnType(self, p):
+ """ReturnType : Type
+ | VOID"""
+ if p[1] == 'void':
+ p[0] = self.BuildProduction('Type', p, 1)
+ p[0].AddChildren(self.BuildNamed('PrimitiveType', p, 1))
+ else:
+ p[0] = p[1]
+
+ # [89]
+ def p_IdentifierList(self, p):
+ """IdentifierList : identifier Identifiers"""
+ p[0] = ListFromConcat(p[1], p[2])
+
+ # [90]
+ def p_Identifiers(self, p):
+ """Identifiers : ',' identifier Identifiers
+ |"""
+ if len(p) > 1:
+ p[0] = ListFromConcat(p[2], p[3])
+
+ # [91]
+ def p_ExtendedAttributeNoArgs(self, p):
+ """ExtendedAttributeNoArgs : identifier"""
+ p[0] = self.BuildNamed('ExtAttribute', p, 1)
+
+ # [92]
+ def p_ExtendedAttributeArgList(self, p):
+ """ExtendedAttributeArgList : identifier '(' ArgumentList ')'"""
+ arguments = self.BuildProduction('Arguments', p, 2, p[3])
+ p[0] = self.BuildNamed('ExtAttribute', p, 1, arguments)
+
+ # [93]
+ def p_ExtendedAttributeIdent(self, p):
+ """ExtendedAttributeIdent : identifier '=' identifier"""
+ value = self.BuildAttribute('VALUE', p[3])
+ p[0] = self.BuildNamed('ExtAttribute', p, 1, value)
+
+ # [94]
+ def p_ExtendedAttributeIdentList(self, p):
+ """ExtendedAttributeIdentList : identifier '=' '(' IdentifierList ')'"""
+ value = self.BuildAttribute('VALUE', p[4])
+ p[0] = self.BuildNamed('ExtAttribute', p, 1, value)
+
+ # [95]
+ def p_ExtendedAttributeNamedArgList(self, p):
+ """ExtendedAttributeNamedArgList : identifier '=' identifier '(' ArgumentList ')'"""
+ args = self.BuildProduction('Arguments', p, 4, p[5])
+ value = self.BuildNamed('Call', p, 3, args)
+ p[0] = self.BuildNamed('ExtAttribute', p, 1, value)
+
+ # [96] NOT IMPLEMENTED (ExtendedAttributeTypePair)
+
+#
+# Parser Errors
+#
+# p_error is called whenever the parser can not find a pattern match for
+# a set of items from the current state. The p_error function defined here
+# is triggered logging an error, and parsing recovery happens as the
+# p_<type>_error functions defined above are called. This allows the parser
+# to continue so as to capture more than one error per file.
+#
+ def p_error(self, t):
+ if t:
+ lineno = t.lineno
+ pos = t.lexpos
+ prev = self.yaccobj.symstack[-1]
+ if type(prev) == lex.LexToken:
+ msg = "Unexpected %s after %s." % (
+ TokenTypeName(t), TokenTypeName(prev))
+ else:
+ msg = "Unexpected %s." % (t.value)
+ else:
+ last = self.LastToken()
+ lineno = last.lineno
+ pos = last.lexpos
+ msg = "Unexpected end of file after %s." % TokenTypeName(last)
+ self.yaccobj.restart()
+
+ # Attempt to remap the error to a friendlier form
+ if msg in ERROR_REMAP:
+ msg = ERROR_REMAP[msg]
+
+ self._last_error_msg = msg
+ self._last_error_lineno = lineno
+ self._last_error_pos = pos
+
+ def Warn(self, node, msg):
+ sys.stdout.write(node.GetLogLine(msg))
+ self.parse_warnings += 1
+
+ def LastToken(self):
+ return self.lexer.last
+
+ def __init__(self, lexer, verbose=False, debug=False, mute_error=False):
+ self.lexer = lexer
+ self.tokens = lexer.KnownTokens()
+ self.yaccobj = yacc.yacc(module=self, tabmodule=None, debug=debug,
+ optimize=0, write_tables=0)
+ self.parse_debug = debug
+ self.verbose = verbose
+ self.mute_error = mute_error
+ self._parse_errors = 0
+ self._parse_warnings = 0
+ self._last_error_msg = None
+ self._last_error_lineno = 0
+ self._last_error_pos = 0
+
+
+#
+# BuildProduction
+#
+# Production is the set of items sent to a grammar rule resulting in a new
+# item being returned.
+#
+# p - Is the Yacc production object containing the stack of items
+# index - Index into the production of the name for the item being produced.
+# cls - The type of item being producted
+# childlist - The children of the new item
+ def BuildProduction(self, cls, p, index, childlist=None):
+ try:
+ if not childlist:
+ childlist = []
+
+ filename = self.lexer.Lexer().filename
+ lineno = p.lineno(index)
+ pos = p.lexpos(index)
+ out = IDLNode(cls, filename, lineno, pos, childlist)
+ return out
+ except:
+ print 'Exception while parsing:'
+ for num, item in enumerate(p):
+ print ' [%d] %s' % (num, ExpandProduction(item))
+ if self.LastToken():
+ print 'Last token: %s' % str(self.LastToken())
+ raise
+
+ def BuildNamed(self, cls, p, index, childlist=None):
+ childlist = ListFromConcat(childlist)
+ childlist.append(self.BuildAttribute('NAME', p[index]))
+ return self.BuildProduction(cls, p, index, childlist)
+
+ def BuildComment(self, cls, p, index):
+ name = p[index]
+
+ # Remove comment markers
+ lines = []
+ if name[:2] == '//':
+ # For C++ style, remove any leading whitespace and the '//' marker from
+ # each line.
+ form = 'cc'
+ for line in name.split('\n'):
+ start = line.find('//')
+ lines.append(line[start+2:])
+ else:
+ # For C style, remove ending '*/''
+ form = 'c'
+ for line in name[:-2].split('\n'):
+ # Remove characters until start marker for this line '*' if found
+ # otherwise it should be blank.
+ offs = line.find('*')
+ if offs >= 0:
+ line = line[offs + 1:].rstrip()
+ else:
+ line = ''
+ lines.append(line)
+ name = '\n'.join(lines)
+ childlist = [self.BuildAttribute('NAME', name),
+ self.BuildAttribute('FORM', form)]
+ return self.BuildProduction(cls, p, index, childlist)
+
+#
+# BuildError
+#
+# Build and Errror node as part of the recovery process.
+#
+#
+ def BuildError(self, p, prod):
+ self._parse_errors += 1
+ name = self.BuildAttribute('NAME', self._last_error_msg)
+ line = self.BuildAttribute('LINE', self._last_error_lineno)
+ pos = self.BuildAttribute('POS', self._last_error_pos)
+ prod = self.BuildAttribute('PROD', prod)
+
+ node = self.BuildProduction('Error', p, 1,
+ ListFromConcat(name, line, pos, prod))
+ if not self.mute_error:
+ node.Error(self._last_error_msg)
+
+ return node
+
+#
+# BuildAttribute
+#
+# An ExtendedAttribute is a special production that results in a property
+# which is applied to the adjacent item. Attributes have no children and
+# instead represent key/value pairs.
+#
+ def BuildAttribute(self, key, val):
+ return IDLAttribute(key, val)
+
+ def BuildFalse(self, key):
+ return IDLAttribute(key, Boolean(False))
+
+ def BuildTrue(self, key):
+ return IDLAttribute(key, Boolean(True))
+
+ def GetErrors(self):
+ # Access lexer errors, despite being private
+ # pylint: disable=W0212
+ return self._parse_errors + self.lexer._lex_errors
+
+#
+# ParseData
+#
+# Attempts to parse the current data loaded in the lexer.
+#
+ def ParseText(self, filename, data):
+ self._parse_errors = 0
+ self._parse_warnings = 0
+ self._last_error_msg = None
+ self._last_error_lineno = 0
+ self._last_error_pos = 0
+
+ try:
+ self.lexer.Tokenize(data, filename)
+ nodes = self.yaccobj.parse(lexer=self.lexer) or []
+ name = self.BuildAttribute('NAME', filename)
+ return IDLNode('File', filename, 0, 0, nodes + [name])
+
+ except lex.LexError as lexError:
+ sys.stderr.write('Error in token: %s\n' % str(lexError))
+ return None
+
+
+
+def ParseFile(parser, filename):
+ """Parse a file and return a File type of node."""
+ with open(filename) as fileobject:
+ try:
+ out = parser.ParseText(filename, fileobject.read())
+ out.SetProperty('DATETIME', time.ctime(os.path.getmtime(filename)))
+ out.SetProperty('ERRORS', parser.GetErrors())
+ return out
+
+ except Exception as e:
+ last = parser.LastToken()
+ sys.stderr.write('%s(%d) : Internal parsing error\n\t%s.\n' % (
+ filename, last.lineno, str(e)))
+
+
+def main(argv):
+ nodes = []
+ parser = IDLParser(IDLLexer())
+ errors = 0
+ for filename in argv:
+ filenode = ParseFile(parser, filename)
+ if (filenode):
+ errors += filenode.GetProperty('ERRORS')
+ nodes.append(filenode)
+
+ ast = IDLNode('AST', '__AST__', 0, 0, nodes)
+
+ print '\n'.join(ast.Tree(accept_props=['PROD']))
+ if errors:
+ print '\nFound %d errors.\n' % errors
+
+ return errors
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/idl_parser/idl_parser_test.py b/tools/idl_parser/idl_parser_test.py
new file mode 100755
index 0000000..76a9571
--- /dev/null
+++ b/tools/idl_parser/idl_parser_test.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import unittest
+
+from idl_lexer import IDLLexer
+from idl_parser import IDLParser, ParseFile
+from idl_ppapi_lexer import IDLPPAPILexer
+from idl_ppapi_parser import IDLPPAPIParser
+
+def ParseCommentTest(comment):
+ comment = comment.strip()
+ comments = comment.split(None, 1)
+ return comments[0], comments[1]
+
+
+class WebIDLParser(unittest.TestCase):
+ def setUp(self):
+ self.parser = IDLParser(IDLLexer(), mute_error=True)
+ self.filenames = glob.glob('test_parser/*_web.idl')
+
+ def _TestNode(self, node):
+ comments = node.GetListOf('Comment')
+ for comment in comments:
+ check, value = ParseCommentTest(comment.GetName())
+ if check == 'BUILD':
+ msg = 'Expecting %s, but found %s.\n' % (value, str(node))
+ self.assertEqual(value, str(node), msg)
+
+ if check == 'ERROR':
+ msg = node.GetLogLine('Expecting\n\t%s\nbut found \n\t%s\n' % (
+ value, str(node)))
+ self.assertEqual(value, node.GetName(), msg)
+
+ if check == 'PROP':
+ key, expect = value.split('=')
+ actual = str(node.GetProperty(key))
+ msg = 'Mismatched property %s: %s vs %s.\n' % (key, expect, actual)
+ self.assertEqual(expect, actual, msg)
+
+ if check == 'TREE':
+ quick = '\n'.join(node.Tree())
+ lineno = node.GetProperty('LINENO')
+ msg = 'Mismatched tree at line %d:\n%sVS\n%s' % (lineno, value, quick)
+ self.assertEqual(value, quick, msg)
+
+ def testExpectedNodes(self):
+ for filename in self.filenames:
+ filenode = ParseFile(self.parser, filename)
+ children = filenode.GetChildren()
+ self.assertTrue(len(children) > 2, 'Expecting children in %s.' %
+ filename)
+
+ for node in filenode.GetChildren()[2:]:
+ self._TestNode(node)
+
+
+class PepperIDLParser(unittest.TestCase):
+ def setUp(self):
+ self.parser = IDLPPAPIParser(IDLPPAPILexer(), mute_error=True)
+ self.filenames = glob.glob('test_parser/*_ppapi.idl')
+
+ def _TestNode(self, filename, node):
+ comments = node.GetListOf('Comment')
+ for comment in comments:
+ check, value = ParseCommentTest(comment.GetName())
+ if check == 'BUILD':
+ msg = '%s - Expecting %s, but found %s.\n' % (
+ filename, value, str(node))
+ self.assertEqual(value, str(node), msg)
+
+ if check == 'ERROR':
+ msg = node.GetLogLine('%s - Expecting\n\t%s\nbut found \n\t%s\n' % (
+ filename, value, str(node)))
+ self.assertEqual(value, node.GetName(), msg)
+
+ if check == 'PROP':
+ key, expect = value.split('=')
+ actual = str(node.GetProperty(key))
+ msg = '%s - Mismatched property %s: %s vs %s.\n' % (
+ filename, key, expect, actual)
+ self.assertEqual(expect, actual, msg)
+
+ if check == 'TREE':
+ quick = '\n'.join(node.Tree())
+ lineno = node.GetProperty('LINENO')
+ msg = '%s - Mismatched tree at line %d:\n%sVS\n%s' % (
+ filename, lineno, value, quick)
+ self.assertEqual(value, quick, msg)
+
+ def testExpectedNodes(self):
+ for filename in self.filenames:
+ filenode = ParseFile(self.parser, filename)
+ children = filenode.GetChildren()
+ self.assertTrue(len(children) > 2, 'Expecting children in %s.' %
+ filename)
+
+ for node in filenode.GetChildren()[2:]:
+ self._TestNode(filename, node)
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
+
diff --git a/tools/idl_parser/idl_ppapi_lexer.py b/tools/idl_parser/idl_ppapi_lexer.py
new file mode 100755
index 0000000..3d5d39e
--- /dev/null
+++ b/tools/idl_parser/idl_ppapi_lexer.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Lexer for PPAPI IDL
+
+The lexer uses the PLY library to build a tokenizer which understands both
+WebIDL and Pepper tokens.
+
+WebIDL, and WebIDL regular expressions can be found at:
+ http://www.w3.org/TR/2012/CR-WebIDL-20120419/
+PLY can be found at:
+ http://www.dabeaz.com/ply/
+"""
+
+from idl_lexer import IDLLexer
+
+
+#
+# IDL PPAPI Lexer
+#
+class IDLPPAPILexer(IDLLexer):
+ # Token definitions
+ #
+ # These need to be methods for lexer construction, despite not using self.
+ # pylint: disable=R0201
+
+ # Special multi-character operators
+ def t_LSHIFT(self, t):
+ r'<<'
+ return t
+
+ def t_RSHIFT(self, t):
+ r'>>'
+ return t
+
+ def t_INLINE(self, t):
+ r'\#inline (.|\n)*?\#endinl.*'
+ self.AddLines(t.value.count('\n'))
+ return t
+
+ # Return a "preprocessor" inline block
+ def __init__(self):
+ IDLLexer.__init__(self)
+ self._AddTokens(['INLINE', 'LSHIFT', 'RSHIFT'])
+ self._AddKeywords(['label', 'struct'])
+
+ # Add number types
+ self._AddKeywords(['char', 'int8_t', 'int16_t', 'int32_t', 'int64_t'])
+ self._AddKeywords(['uint8_t', 'uint16_t', 'uint32_t', 'uint64_t'])
+ self._AddKeywords(['double_t', 'float_t'])
+
+ # Add handle types
+ self._AddKeywords(['handle_t', 'PP_FileHandle'])
+
+ # Add pointer types (void*, char*, const char*, const void*)
+ self._AddKeywords(['mem_t', 'str_t', 'cstr_t', 'interface_t'])
+
+ # Remove JS types
+ self._DelKeywords(['boolean', 'byte', 'ByteString', 'Date', 'DOMString',
+ 'double', 'float', 'long', 'object', 'octet', 'RegExp',
+ 'short', 'unsigned'])
+
+
+# If run by itself, attempt to build the lexer
+if __name__ == '__main__':
+ lexer = IDLPPAPILexer()
+ lexer.Tokenize(open('test_parser/inline_ppapi.idl').read())
+ for tok in lexer.GetTokens():
+ print '\n' + str(tok)
diff --git a/tools/idl_parser/idl_ppapi_parser.py b/tools/idl_parser/idl_ppapi_parser.py
new file mode 100755
index 0000000..07963ea
--- /dev/null
+++ b/tools/idl_parser/idl_ppapi_parser.py
@@ -0,0 +1,307 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Parser for PPAPI IDL """
+
+#
+# IDL Parser
+#
+# The parser is uses the PLY yacc library to build a set of parsing rules based
+# on WebIDL.
+#
+# WebIDL, and WebIDL grammar can be found at:
+# http://heycam.github.io/webidl/
+# PLY can be found at:
+# http://www.dabeaz.com/ply/
+#
+# The parser generates a tree by recursively matching sets of items against
+# defined patterns. When a match is made, that set of items is reduced
+# to a new item. The new item can provide a match for parent patterns.
+# In this way an AST is built (reduced) depth first.
+#
+
+#
+# Disable check for line length and Member as Function due to how grammar rules
+# are defined with PLY
+#
+# pylint: disable=R0201
+# pylint: disable=C0301
+
+import sys
+
+from idl_ppapi_lexer import IDLPPAPILexer
+from idl_parser import IDLParser, ListFromConcat, ParseFile
+from idl_node import IDLNode
+
+class IDLPPAPIParser(IDLParser):
+#
+# We force all input files to start with two comments. The first comment is a
+# Copyright notice followed by a file comment and finally by file level
+# productions.
+#
+ # [0] Insert a TOP definition for Copyright and Comments
+ def p_Top(self, p):
+ """Top : COMMENT COMMENT Definitions"""
+ Copyright = self.BuildComment('Copyright', p, 1)
+ Filedoc = self.BuildComment('Comment', p, 2)
+ p[0] = ListFromConcat(Copyright, Filedoc, p[3])
+
+#
+#The parser is based on the WebIDL standard. See:
+# http://heycam.github.io/webidl/#idl-grammar
+#
+ # [1]
+ def p_Definitions(self, p):
+ """Definitions : ExtendedAttributeList Definition Definitions
+ | """
+ if len(p) > 1:
+ p[2].AddChildren(p[1])
+ p[0] = ListFromConcat(p[2], p[3])
+
+ # [2] Add INLINE definition
+ def p_Definition(self, p):
+ """Definition : CallbackOrInterface
+ | Struct
+ | Partial
+ | Dictionary
+ | Exception
+ | Enum
+ | Typedef
+ | ImplementsStatement
+ | Label
+ | Inline"""
+ p[0] = p[1]
+
+ def p_Inline(self, p):
+ """Inline : INLINE"""
+ words = p[1].split()
+ name = self.BuildAttribute('NAME', words[1])
+ lines = p[1].split('\n')
+ value = self.BuildAttribute('VALUE', '\n'.join(lines[1:-1]) + '\n')
+ children = ListFromConcat(name, value)
+ p[0] = self.BuildProduction('Inline', p, 1, children)
+
+#
+# Label
+#
+# A label is a special kind of enumeration which allows us to go from a
+# set of version numbrs to releases
+#
+ def p_Label(self, p):
+ """Label : LABEL identifier '{' LabelList '}' ';'"""
+ p[0] = self.BuildNamed('Label', p, 2, p[4])
+
+ def p_LabelList(self, p):
+ """LabelList : identifier '=' float LabelCont"""
+ val = self.BuildAttribute('VALUE', p[3])
+ label = self.BuildNamed('LabelItem', p, 1, val)
+ p[0] = ListFromConcat(label, p[4])
+
+ def p_LabelCont(self, p):
+ """LabelCont : ',' LabelList
+ |"""
+ if len(p) > 1:
+ p[0] = p[2]
+
+ def p_LabelContError(self, p):
+ """LabelCont : error LabelCont"""
+ p[0] = p[2]
+
+ # [5.1] Add "struct" style interface
+ def p_Struct(self, p):
+ """Struct : STRUCT identifier Inheritance '{' StructMembers '}' ';'"""
+ p[0] = self.BuildNamed('Struct', p, 2, ListFromConcat(p[3], p[5]))
+
+ def p_StructMembers(self, p):
+ """StructMembers : StructMember StructMembers
+ |"""
+ if len(p) > 1:
+ p[0] = ListFromConcat(p[1], p[2])
+
+ def p_StructMember(self, p):
+ """StructMember : ExtendedAttributeList Type identifier ';'"""
+ p[0] = self.BuildNamed('Member', p, 3, ListFromConcat(p[1], p[2]))
+
+ # [24]
+ def p_Typedef(self, p):
+ """Typedef : TYPEDEF ExtendedAttributeListNoComments Type identifier ';'"""
+ p[0] = self.BuildNamed('Typedef', p, 4, ListFromConcat(p[2], p[3]))
+
+ # [24.1]
+ def p_TypedefFunc(self, p):
+ """Typedef : TYPEDEF ExtendedAttributeListNoComments ReturnType identifier '(' ArgumentList ')' ';'"""
+ args = self.BuildProduction('Arguments', p, 5, p[6])
+ p[0] = self.BuildNamed('Callback', p, 4, ListFromConcat(p[2], p[3], args))
+
+ # [27]
+ def p_ConstValue(self, p):
+ """ConstValue : integer
+ | integer LSHIFT integer
+ | integer RSHIFT integer"""
+ val = str(p[1])
+ if len(p) > 2:
+ val = "%s %s %s" % (p[1], p[2], p[3])
+ p[0] = ListFromConcat(self.BuildAttribute('TYPE', 'integer'),
+ self.BuildAttribute('VALUE', val))
+
+ def p_ConstValueStr(self, p):
+ """ConstValue : string"""
+ p[0] = ListFromConcat(self.BuildAttribute('TYPE', 'string'),
+ self.BuildAttribute('VALUE', p[1]))
+
+ # Boolean & Float Literals area already BuildAttributes
+ def p_ConstValueLiteral(self, p):
+ """ConstValue : FloatLiteral
+ | BooleanLiteral """
+ p[0] = p[1]
+
+ # [21]
+ def p_EnumValueList(self, p):
+ """EnumValueList : EnumValue EnumValues"""
+ p[0] = ListFromConcat(p[1], p[2])
+
+ # [22]
+ def p_EnumValues(self, p):
+ """EnumValues : ',' EnumValue EnumValues
+ |"""
+ if len(p) > 1:
+ p[0] = ListFromConcat(p[2], p[3])
+
+ def p_EnumValue(self, p):
+ """EnumValue : ExtendedAttributeList identifier
+ | ExtendedAttributeList identifier '=' ConstValue"""
+ p[0] = self.BuildNamed('EnumItem', p, 2, p[1])
+ if len(p) > 3:
+ p[0].AddChildren(p[4])
+
+ def p_PrimitiveType(self, p):
+ """PrimitiveType : IntegerType
+ | UnsignedIntegerType
+ | FloatType
+ | HandleType
+ | PointerType"""
+ if type(p[1]) == str:
+ p[0] = self.BuildNamed('PrimitiveType', p, 1)
+ else:
+ p[0] = p[1]
+
+ def p_PointerType(self, p):
+ """PointerType : STR_T
+ | MEM_T
+ | CSTR_T
+ | INTERFACE_T
+ | NULL"""
+ p[0] = p[1]
+
+ def p_HandleType(self, p):
+ """HandleType : HANDLE_T
+ | PP_FILEHANDLE"""
+ p[0] = p[1]
+
+ # [66]
+ def p_FloatType(self, p):
+ """FloatType : FLOAT_T
+ | DOUBLE_T"""
+ p[0] = p[1]
+
+ # [67]
+ def p_UnsignedIntegerType(self, p):
+ """UnsignedIntegerType : UINT8_T
+ | UINT16_T
+ | UINT32_T
+ | UINT64_T"""
+ p[0] = p[1]
+
+
+ # [68]
+ def p_IntegerType(self, p):
+ """IntegerType : CHAR
+ | INT8_T
+ | INT16_T
+ | INT32_T
+ | INT64_T"""
+ p[0] = p[1]
+
+ # These targets are no longer used
+ def p_OptionalLong(self, p):
+ """ """
+ pass
+
+ def p_UnrestrictedFloatType(self, p):
+ """ """
+ pass
+
+ def p_null(self, p):
+ """ """
+ pass
+
+ # We only support:
+ # [ identifier ]
+ # [ identifier ( ArgumentList )]
+ # [ identifier ( ValueList )]
+ # [ identifier = identifier ]
+ # [ identifier = ( IdentifierList )]
+ # [ identifier = ConstValue ]
+ # [ identifier = identifier ( ArgumentList )]
+ # [51] map directly to 74-77
+ # [52-54, 56] are unsupported
+ def p_ExtendedAttribute(self, p):
+ """ExtendedAttribute : ExtendedAttributeNoArgs
+ | ExtendedAttributeArgList
+ | ExtendedAttributeValList
+ | ExtendedAttributeIdent
+ | ExtendedAttributeIdentList
+ | ExtendedAttributeIdentConst
+ | ExtendedAttributeNamedArgList"""
+ p[0] = p[1]
+
+ def p_ExtendedAttributeValList(self, p):
+ """ExtendedAttributeValList : identifier '(' ValueList ')'"""
+ arguments = self.BuildProduction('Values', p, 2, p[3])
+ p[0] = self.BuildNamed('ExtAttribute', p, 1, arguments)
+
+ def p_ValueList(self, p):
+ """ValueList : ConstValue ValueListCont"""
+ p[0] = ListFromConcat(p[1], p[2])
+
+ def p_ValueListCont(self, p):
+ """ValueListCont : ValueList
+ |"""
+ if len(p) > 1:
+ p[0] = p[1]
+
+ # [76]
+ def p_ExtendedAttributeIdentConst(self, p):
+ """ExtendedAttributeIdentConst : identifier '=' ConstValue"""
+ p[0] = self.BuildNamed('ExtAttribute', p, 1, p[3])
+
+
+ def __init__(self, lexer, verbose=False, debug=False, mute_error=False):
+ IDLParser.__init__(self, lexer, verbose, debug, mute_error)
+
+
+def main(argv):
+ nodes = []
+ parser = IDLPPAPIParser(IDLPPAPILexer())
+ errors = 0
+
+ for filename in argv:
+ filenode = ParseFile(parser, filename)
+ if filenode:
+ errors += filenode.GetProperty('ERRORS')
+ nodes.append(filenode)
+
+ ast = IDLNode('AST', '__AST__', 0, 0, nodes)
+
+ print '\n'.join(ast.Tree(accept_props=['PROD', 'TYPE', 'VALUE']))
+ if errors:
+ print '\nFound %d errors.\n' % errors
+
+
+ return errors
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/idl_parser/run_tests.py b/tools/idl_parser/run_tests.py
new file mode 100755
index 0000000..cf26759
--- /dev/null
+++ b/tools/idl_parser/run_tests.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import sys
+import unittest
+
+if __name__ == '__main__':
+ suite = unittest.TestSuite()
+ for testname in glob.glob('*_test.py'):
+ print 'Adding Test: ' + testname
+ module = __import__(testname[:-3])
+ suite.addTests(unittest.defaultTestLoader.loadTestsFromModule(module))
+ result = unittest.TextTestRunner(verbosity=2).run(suite)
+ if result.wasSuccessful():
+ sys.exit(0)
+ else:
+ sys.exit(1)
diff --git a/tools/idl_parser/test_lexer/keywords.in b/tools/idl_parser/test_lexer/keywords.in
new file mode 100644
index 0000000..16dc32f
--- /dev/null
+++ b/tools/idl_parser/test_lexer/keywords.in
@@ -0,0 +1,40 @@
+ANY any
+ATTRIBUTE attribute
+BOOLEAN boolean
+BYTESTRING ByteString
+BYTE byte
+CALLBACK callback
+CONST const
+CREATOR creator
+DATE Date
+DELETER deleter
+DICTIONARY dictionary
+DOMSTRING DOMString
+DOUBLE double
+FALSE false
+FLOAT float
+EXCEPTION exception
+GETTER getter
+IMPLEMENTS implements
+INFINITY Infinity
+INTERFACE interface
+LEGACYCALLER legacycaller
+LONG long
+NAN Nan
+NULL null
+OBJECT object
+OPTIONAL optional
+OR or
+PARTIAL partial
+READONLY readonly
+REGEXP RegExp
+SERIALIZER serializer
+SETTER setter
+SHORT short
+STATIC static
+STRINGIFIER stringifier
+TYPEDEF typedef
+TRUE true
+UNSIGNED unsigned
+UNRESTRICTED unrestricted
+VOID void
diff --git a/tools/idl_parser/test_lexer/keywords_ppapi.in b/tools/idl_parser/test_lexer/keywords_ppapi.in
new file mode 100644
index 0000000..62567e4
--- /dev/null
+++ b/tools/idl_parser/test_lexer/keywords_ppapi.in
@@ -0,0 +1,44 @@
+ANY any
+ATTRIBUTE attribute
+CALLBACK callback
+CONST const
+CREATOR creator
+DELETER deleter
+DICTIONARY dictionary
+FALSE false
+EXCEPTION exception
+GETTER getter
+IMPLEMENTS implements
+INFINITY Infinity
+INTERFACE interface
+LABEL label
+LEGACYCALLER legacycaller
+NAN Nan
+NULL null
+OPTIONAL optional
+OR or
+PARTIAL partial
+READONLY readonly
+SETTER setter
+STATIC static
+STRINGIFIER stringifier
+TYPEDEF typedef
+TRUE true
+VOID void
+CHAR char
+INT8_T int8_t
+INT16_T int16_t
+INT32_T int32_t
+INT64_T int64_t
+UINT8_T uint8_t
+UINT16_T uint16_t
+UINT32_T uint32_t
+UINT64_T uint64_t
+DOUBLE_T double_t
+FLOAT_T float_t
+MEM_T mem_t
+STR_T str_t
+CSTR_T cstr_t
+INTERFACE_T interface_t
+HANDLE_T handle_t
+PP_FILEHANDLE PP_FileHandle
\ No newline at end of file
diff --git a/tools/idl_parser/test_lexer/values.in b/tools/idl_parser/test_lexer/values.in
new file mode 100644
index 0000000..be714d0
--- /dev/null
+++ b/tools/idl_parser/test_lexer/values.in
@@ -0,0 +1,55 @@
+integer 1 integer 123 integer 12345
+identifier A123 identifier A_A
+
+COMMENT /*XXXX*/
+COMMENT //XXXX
+
+COMMENT /*MULTI LINE*/
+
+[ [
+] ]
+* *
+. .
+( (
+) )
+{ {
+} }
+[ [
+] ]
+, ,
+; ;
+: :
+= =
++ +
+- -
+/ /
+~ ~
+| |
+& &
+^ ^
+> >
+< <
+
+ELLIPSIS ...
+
+float 1.1
+float 1e1
+float -1.1
+float -1e1
+float 1e-1
+float -1e-1
+float 1.0e1
+float -1.0e-1
+
+integer 00
+integer 01
+integer 0123
+integer 01234567
+integer 123
+integer 1234567890
+integer 0x123
+integer 0X123
+integer 0x1234567890AbCdEf
+integer 0X1234567890aBcDeF
+
+identifier blah
diff --git a/tools/idl_parser/test_lexer/values_ppapi.in b/tools/idl_parser/test_lexer/values_ppapi.in
new file mode 100644
index 0000000..33fa577
--- /dev/null
+++ b/tools/idl_parser/test_lexer/values_ppapi.in
@@ -0,0 +1,50 @@
+integer 1 integer 123 integer 12345
+identifier A123 identifier A_A
+
+COMMENT /*XXXX*/
+COMMENT //XXXX
+
+COMMENT /*MULTI LINE*/
+
+[ [
+] ]
+* *
+. .
+( (
+) )
+{ {
+} }
+[ [
+] ]
+, ,
+; ;
+: :
+= =
++ +
+- -
+/ /
+~ ~
+| |
+& &
+^ ^
+> >
+< <
+
+LSHIFT <<
+RSHIFT >>
+ELLIPSIS ...
+
+float 1.1
+float 1e1
+float -1.1
+float -1e1
+float 1e-1
+float -1e-1
+float 1.0e1
+float -1.0e-1
+
+integer 00
+integer 01
+integer 0123
+
+identifier blah
diff --git a/tools/idl_parser/test_parser/callback_web.idl b/tools/idl_parser/test_parser/callback_web.idl
new file mode 100644
index 0000000..b16b6b5
--- /dev/null
+++ b/tools/idl_parser/test_parser/callback_web.idl
@@ -0,0 +1,116 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Callback productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+
+/* TREE
+ *Callback(VoidFunc)
+ * Type()
+ * PrimitiveType(void)
+ * Arguments()
+ */
+callback VoidFunc = void();
+
+/* TREE
+ *Callback(VoidFuncLongErr)
+ * Type()
+ * PrimitiveType(void)
+ * Arguments()
+ * Error(Unexpected ).)
+ */
+callback VoidFuncLongErr = void ( long );
+
+/* TREE
+ *Callback(VoidFuncLong)
+ * Type()
+ * PrimitiveType(void)
+ * Arguments()
+ * Argument(L1)
+ * Type()
+ * PrimitiveType(long)
+ */
+callback VoidFuncLong = void ( long L1 );
+
+/* TREE
+ *Callback(VoidFuncLongArray)
+ * Type()
+ * PrimitiveType(void)
+ * Arguments()
+ * Argument(L1)
+ * Type()
+ * PrimitiveType(long)
+ * Array()
+ */
+callback VoidFuncLongArray = void ( long[] L1 );
+
+/* TREE
+ *Callback(VoidFuncLongArray5)
+ * Type()
+ * PrimitiveType(void)
+ * Arguments()
+ * Argument(L1)
+ * Type()
+ * PrimitiveType(long)
+ * Array(5)
+ */
+callback VoidFuncLongArray5 = void ( long[5] L1 );
+
+
+/* TREE
+ *Callback(VoidFuncLongArray54)
+ * Type()
+ * PrimitiveType(void)
+ * Arguments()
+ * Argument(L1)
+ * Type()
+ * PrimitiveType(long)
+ * Array(5)
+ * Argument(L2)
+ * Type()
+ * PrimitiveType(long long)
+ * Array(4)
+ */
+callback VoidFuncLongArray54 = void ( long[5] L1, long long [4] L2 );
+
+
+/* TREE
+ *Callback(VoidFuncLongIdent)
+ * Type()
+ * PrimitiveType(void)
+ * Arguments()
+ * Argument(L1)
+ * Type()
+ * PrimitiveType(long)
+ * Array(5)
+ * Argument(L2)
+ * Type()
+ * Typeref(VoidFuncLongArray)
+ */
+callback VoidFuncLongIdent = void ( long[5] L1, VoidFuncLongArray L2 );
diff --git a/tools/idl_parser/test_parser/dictionary_web.idl b/tools/idl_parser/test_parser/dictionary_web.idl
new file mode 100644
index 0000000..5030686
--- /dev/null
+++ b/tools/idl_parser/test_parser/dictionary_web.idl
@@ -0,0 +1,95 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Dictionary productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+
+/* TREE
+ *Dictionary(MyDict)
+ */
+dictionary MyDict { };
+
+/* TREE
+ *Dictionary(MyDictInherit)
+ * Inherit(Foo)
+ */
+dictionary MyDictInherit : Foo {};
+
+/* TREE
+ *Dictionary(MyDictPartial)
+ */
+partial dictionary MyDictPartial { };
+
+/* ERROR Unexpected ":" after identifier "MyDictInherit". */
+partial dictionary MyDictInherit : Foo {};
+
+/* TREE
+ *Dictionary(MyDictBig)
+ * Key(setString)
+ * Type()
+ * PrimitiveType(DOMString)
+ * Default(Foo)
+ * Key(setLong)
+ * Type()
+ * PrimitiveType(unsigned long long)
+ * Default(123)
+ * Key(unsetLong)
+ * Type()
+ * PrimitiveType(long)
+ */
+dictionary MyDictBig {
+ DOMString setString = "Foo";
+ unsigned long long setLong = 123;
+ long unsetLong;
+};
+
+
+/* ERROR Unexpected "{" after keyword "dictionary". */
+dictionary {
+ DOMString? setString = null;
+};
+
+
+/* ERROR Unexpected identifier "NoColon" after identifier "ForParent". */
+dictionary ForParent NoColon {
+ DOMString? setString = null;
+};
+
+/* TREE
+ *Dictionary(MyDictNull)
+ * Key(setString)
+ * Type()
+ * PrimitiveType(DOMString)
+ * Default(NULL)
+ */
+dictionary MyDictNull {
+ DOMString? setString = null;
+};
+
+
diff --git a/tools/idl_parser/test_parser/enum_ppapi.idl b/tools/idl_parser/test_parser/enum_ppapi.idl
new file mode 100644
index 0000000..1b088b8
--- /dev/null
+++ b/tools/idl_parser/test_parser/enum_ppapi.idl
@@ -0,0 +1,126 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Enum productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+/* TREE
+ *Enum(MealType1)
+ * EnumItem(rice)
+ * EnumItem(noodles)
+ * EnumItem(other)
+*/
+enum MealType1 {
+ /* BUILD EnumItem (rice) */
+ rice,
+ /* BUILD EnumItem (noodles) */
+ noodles,
+ /* BUILD EnumItem(other) */
+ other
+};
+
+/* BUILD Error(Enum missing name.) */
+/* ERROR Enum missing name. */
+enum {
+ rice,
+ noodles,
+ other,
+};
+
+/* TREE
+ *Enum(MealType2)
+ * EnumItem(rice)
+ * EnumItem(noodles)
+ * EnumItem(other)
+*/
+enum MealType2 {
+ /* BUILD EnumItem(rice) */
+ rice,
+ /* BUILD EnumItem(noodles) */
+ noodles = 1,
+ /* BUILD EnumItem(other) */
+ other
+};
+
+/* BUILD Error(Unexpected identifier "noodles" after identifier "rice".) */
+/* ERROR Unexpected identifier "noodles" after identifier "rice". */
+enum MissingComma {
+ rice
+ noodles,
+ other
+};
+
+/* BUILD Error(Trailing comma in block.) */
+/* ERROR Trailing comma in block. */
+enum TrailingComma {
+ rice,
+ noodles,
+ other,
+};
+
+/* BUILD Error(Unexpected "," after ",".) */
+/* ERROR Unexpected "," after ",". */
+enum ExtraComma {
+ rice,
+ noodles,
+ ,other,
+};
+
+/* BUILD Error(Unexpected keyword "interface" after "{".) */
+/* ERROR Unexpected keyword "interface" after "{". */
+enum ExtraComma {
+ interface,
+ noodles,
+ ,other,
+};
+
+/* BUILD Error(Unexpected string "somename" after "{".) */
+/* ERROR Unexpected string "somename" after "{". */
+enum ExtraComma {
+ "somename",
+ noodles,
+ other,
+};
+
+/* BUILD Enum(MealType3) */
+enum MealType3 {
+ /* BUILD EnumItem(rice) */
+ rice = 1 << 1,
+ /* BUILD EnumItem(noodles) */
+ noodles = 0x1 << 0x2,
+ /* BUILD EnumItem(other) */
+ other = 012 << 777
+};
+
+/* BUILD Enum(MealType4) */
+enum MealType4 {
+ /* BUILD EnumItem(rice) */
+ rice = true,
+ /* BUILD EnumItem(noodles) */
+ noodles = false
+};
diff --git a/tools/idl_parser/test_parser/enum_web.idl b/tools/idl_parser/test_parser/enum_web.idl
new file mode 100644
index 0000000..e3107c0
--- /dev/null
+++ b/tools/idl_parser/test_parser/enum_web.idl
@@ -0,0 +1,123 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Enum productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+/* TREE
+ *Enum(MealType1)
+ * EnumItem(rice)
+ * EnumItem(noodles)
+ * EnumItem(other)
+*/
+enum MealType1 {
+ /* BUILD EnumItem (rice) */
+ "rice",
+ /* BUILD EnumItem (noodles) */
+ "noodles",
+ /* BUILD EnumItem(other) */
+ "other"
+};
+
+/* BUILD Error(Enum missing name.) */
+/* ERROR Enum missing name. */
+enum {
+ "rice",
+ "noodles",
+ "other"
+};
+
+/* TREE
+ *Enum(MealType2)
+ * EnumItem(rice)
+ * EnumItem(noodles)
+ * EnumItem(other)
+*/
+enum MealType2 {
+ /* BUILD EnumItem(rice) */
+ "rice",
+ /* BUILD EnumItem(noodles) */
+ "noodles",
+ /* BUILD EnumItem(other) */
+ "other"
+};
+
+/* TREE
+ *Enum(TrailingComma)
+ * EnumItem(rice)
+ * EnumItem(noodles)
+ * EnumItem(other)
+*/
+enum TrailingComma {
+ "rice",
+ "noodles",
+ "other",
+};
+
+/* BUILD Error(Unexpected string "noodles" after string "rice".) */
+/* ERROR Unexpected string "noodles" after string "rice". */
+enum MissingComma {
+ "rice"
+ "noodles",
+ "other"
+};
+
+/* BUILD Error(Unexpected "," after ",".) */
+/* ERROR Unexpected "," after ",". */
+enum ExtraComma {
+ "rice",
+ "noodles",
+ ,"other",
+};
+
+/* BUILD Error(Unexpected keyword "interface" after "{".) */
+/* ERROR Unexpected keyword "interface" after "{". */
+enum ExtraComma {
+ interface,
+ "noodles",
+ ,"other",
+};
+
+/* BUILD Error(Unexpected identifier "somename" after "{".) */
+/* ERROR Unexpected identifier "somename" after "{". */
+enum ExtraComma {
+ somename,
+ "noodles",
+ ,"other",
+};
+
+/* BUILD Enum(MealType3) */
+enum MealType3 {
+ /* BUILD EnumItem(rice) */
+ "rice",
+ /* BUILD EnumItem(noodles) */
+ "noodles",
+ /* BUILD EnumItem(other) */
+ "other"
+};
+
diff --git a/tools/idl_parser/test_parser/exception_web.idl b/tools/idl_parser/test_parser/exception_web.idl
new file mode 100644
index 0000000..3801a4a
--- /dev/null
+++ b/tools/idl_parser/test_parser/exception_web.idl
@@ -0,0 +1,87 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Exception productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+
+/* TREE
+ *Exception(MyExc)
+ */
+exception MyExc { };
+
+/* TREE
+ *Exception(MyExcInherit)
+ * Inherit(Foo)
+ */
+exception MyExcInherit : Foo {};
+
+/* ERROR Unexpected keyword "exception" after keyword "partial". */
+partial exception MyExcPartial { };
+
+/* TREE
+ *Exception(MyExcBig)
+ * ExceptionField(MyString)
+ * Type()
+ * PrimitiveType(DOMString)
+ * Error(Unexpected "=" after identifier "ErrorSetLong".)
+ * ExceptionField(MyLong)
+ * Type()
+ * PrimitiveType(long)
+ */
+exception MyExcBig {
+ DOMString MyString;
+ unsigned long long ErrorSetLong = 123;
+ long MyLong;
+};
+
+
+/* ERROR Unexpected "{" after keyword "exception". */
+exception {
+ DOMString? setString = null;
+};
+
+
+/* ERROR Unexpected identifier "NoColon" after identifier "ForParent". */
+exception ForParent NoColon {
+ DOMString? setString = null;
+};
+
+/* TREE
+ *Exception(MyExcConst)
+ * Const(setString)
+ * PrimitiveType(DOMString)
+ * Value(NULL)
+ */
+exception MyExcConst {
+ const DOMString? setString = null;
+};
+
+
+
+
diff --git a/tools/idl_parser/test_parser/extattr_ppapi.idl b/tools/idl_parser/test_parser/extattr_ppapi.idl
new file mode 100644
index 0000000..07afbc0
--- /dev/null
+++ b/tools/idl_parser/test_parser/extattr_ppapi.idl
@@ -0,0 +1,99 @@
+/* Copyright 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test ExtendedAttribute productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+/* TREE
+ *Interface(Foo)
+ * ExtAttributes()
+ * ExtAttribute(foo)
+ * Arguments()
+ */
+
+[foo()] interface Foo {};
+
+/* TREE
+ *Interface(Foo)
+ * ExtAttributes()
+ * ExtAttribute(foo)
+ * Values()
+ */
+
+[foo(1)] interface Foo {};
+
+/* TREE
+ *Interface(Foo)
+ * ExtAttributes()
+ * ExtAttribute(foo)
+ * Values()
+ */
+
+[foo(1 true 1.2e-3)] interface Foo {};
+
+/* TREE
+ *Interface(Foo)
+ * ExtAttributes()
+ * ExtAttribute(foo)
+ * Arguments()
+ * Error(Unexpected ).)
+ */
+
+[foo(null)] interface Foo {};
+
+/* TREE
+ *Interface(Foo)
+ * ExtAttributes()
+ * ExtAttribute(foo)
+ */
+
+[foo=1] interface Foo {};
+
+/* TREE
+ *Interface(Foo)
+ * ExtAttributes()
+ * ExtAttribute(foo)
+ */
+
+[foo=true] interface Foo {};
+
+/* TREE
+ *Interface(Foo)
+ * ExtAttributes()
+ * ExtAttribute(foo)
+ */
+
+[foo=1.2e-3] interface Foo {};
+
+/* TREE
+ *Interface(Foo)
+ * ExtAttributes()
+ * ExtAttribute(foo)
+ */
+
+[foo=(bar, baz)] interface Foo {};
diff --git a/tools/idl_parser/test_parser/implements_web.idl b/tools/idl_parser/test_parser/implements_web.idl
new file mode 100644
index 0000000..252dd4b
--- /dev/null
+++ b/tools/idl_parser/test_parser/implements_web.idl
@@ -0,0 +1,52 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Implements productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+/* BUILD Implements(A) */
+/* PROP REFERENCE=B */
+A implements B;
+
+/* ERROR Unexpected ";" after keyword "implements". */
+A implements;
+
+/* BUILD Implements(B) */
+/* PROP REFERENCE=C */
+B implements C;
+
+/* ERROR Unexpected keyword "implements" after "]". */
+[foo] implements B;
+
+/* BUILD Implements(D) */
+/* PROP REFERENCE=E */
+D implements E;
+
+/* ERROR Unexpected keyword "implements" after comment. */
+implements C;
+
diff --git a/tools/idl_parser/test_parser/inline_ppapi.idl b/tools/idl_parser/test_parser/inline_ppapi.idl
new file mode 100644
index 0000000..134f60d
--- /dev/null
+++ b/tools/idl_parser/test_parser/inline_ppapi.idl
@@ -0,0 +1,46 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Typedef productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+/* TREE
+ *Inline(C)
+ */
+
+#inline C
+This is my block of C code
+#endinl
+
+/* TREE
+ *Inline(CC)
+ */
+#inline CC
+This is my block of CC code
+#endinl
+
diff --git a/tools/idl_parser/test_parser/interface_web.idl b/tools/idl_parser/test_parser/interface_web.idl
new file mode 100644
index 0000000..8d41e91
--- /dev/null
+++ b/tools/idl_parser/test_parser/interface_web.idl
@@ -0,0 +1,183 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Interface productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+
+/* TREE
+ *Interface(MyIFace)
+ */
+interface MyIFace { };
+
+/* TREE
+ *Interface(MyIFaceInherit)
+ * Inherit(Foo)
+ */
+interface MyIFaceInherit : Foo {};
+
+/* TREE
+ *Interface(MyIFacePartial)
+ */
+partial interface MyIFacePartial { };
+
+/* ERROR Unexpected ":" after identifier "MyIFaceInherit". */
+partial interface MyIFaceInherit : Foo {};
+
+/* TREE
+ *Interface(MyIFaceBig)
+ * Const(setString)
+ * PrimitiveType(DOMString)
+ * Value(NULL)
+ */
+interface MyIFaceBig {
+ const DOMString? setString = null;
+};
+
+/* TREE
+ *Interface(MyIFaceBig2)
+ * Const(nullValue)
+ * PrimitiveType(DOMString)
+ * Value(NULL)
+ * Const(longValue)
+ * PrimitiveType(long)
+ * Value(123)
+ * Const(longValue2)
+ * PrimitiveType(long long)
+ * Value(123)
+ * Attribute(myString)
+ * Type()
+ * PrimitiveType(DOMString)
+ * Attribute(readOnlyString)
+ * Type()
+ * PrimitiveType(DOMString)
+ * Attribute(staticString)
+ * Type()
+ * PrimitiveType(DOMString)
+ * Operation(myFunction)
+ * Arguments()
+ * Argument(myLong)
+ * Type()
+ * PrimitiveType(long long)
+ * Type()
+ * PrimitiveType(void)
+ * Operation(staticFunction)
+ * Arguments()
+ * Argument(myLong)
+ * Type()
+ * PrimitiveType(long long)
+ * Type()
+ * PrimitiveType(void)
+ */
+interface MyIFaceBig2 {
+ const DOMString? nullValue = null;
+ const long longValue = 123;
+ const long long longValue2 = 123;
+ attribute DOMString myString;
+ readonly attribute DOMString readOnlyString;
+ static attribute DOMString staticString;
+ void myFunction(long long myLong);
+ static void staticFunction(long long myLong);
+};
+
+
+/* TREE
+ *Interface(MyIFaceSpecials)
+ * Operation(set)
+ * Arguments()
+ * Argument(property)
+ * Type()
+ * PrimitiveType(DOMString)
+ * Type()
+ * PrimitiveType(void)
+ * Operation(_unnamed_)
+ * Arguments()
+ * Argument(property)
+ * Type()
+ * PrimitiveType(DOMString)
+ * Type()
+ * PrimitiveType(double)
+ * Operation(GetFiveSix)
+ * Arguments()
+ * Argument(arg)
+ * Type()
+ * Typeref(SomeType)
+ * Type()
+ * PrimitiveType(long long)
+ * Array(5)
+ * Array(6)
+ */
+interface MyIFaceSpecials {
+ setter creator void set(DOMString property);
+ getter double (DOMString property);
+ long long [5][6] GetFiveSix(SomeType arg);
+};
+
+/* TREE
+ *Interface(MyIFaceStringifiers)
+ * Stringifier()
+ * Stringifier()
+ * Operation(_unnamed_)
+ * Arguments()
+ * Type()
+ * PrimitiveType(DOMString)
+ * Stringifier()
+ * Operation(namedStringifier)
+ * Arguments()
+ * Type()
+ * PrimitiveType(DOMString)
+ * Stringifier()
+ * Attribute(stringValue)
+ * Type()
+ * PrimitiveType(DOMString)
+ */
+interface MyIFaceStringifiers {
+ stringifier;
+ stringifier DOMString ();
+ stringifier DOMString namedStringifier();
+ stringifier attribute DOMString stringValue;
+};
+
+/* TREE
+ *Interface(MyExtendedAttributeInterface)
+ * Operation(method)
+ * Arguments()
+ * Type()
+ * PrimitiveType(void)
+ * ExtAttributes()
+ * ExtAttribute(Attr)
+ * ExtAttribute(MethodIdentList)
+ * ExtAttributes()
+ * ExtAttribute(MyExtendedAttribute)
+ * ExtAttribute(MyExtendedIdentListAttribute)
+ */
+[MyExtendedAttribute,
+ MyExtendedIdentListAttribute=(Foo, Bar, Baz)]
+interface MyExtendedAttributeInterface {
+ [Attr, MethodIdentList=(Foo, Bar)] void method();
+};
diff --git a/tools/idl_parser/test_parser/label_ppapi.idl b/tools/idl_parser/test_parser/label_ppapi.idl
new file mode 100644
index 0000000..264699d
--- /dev/null
+++ b/tools/idl_parser/test_parser/label_ppapi.idl
@@ -0,0 +1,48 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Typedef productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+/* TREE
+ *Label(Chrome1)
+ * LabelItem(M13)
+ */
+label Chrome1 {
+ M13 = 0.0
+};
+
+/* TREE
+ *Label(Chrome2)
+ * LabelItem(M12)
+ * LabelItem(M13)
+ */
+label Chrome2 {
+ M12 = 1.0,
+ M13 = 2.0,
+};
\ No newline at end of file
diff --git a/tools/idl_parser/test_parser/struct_ppapi.idl b/tools/idl_parser/test_parser/struct_ppapi.idl
new file mode 100644
index 0000000..dd017ac
--- /dev/null
+++ b/tools/idl_parser/test_parser/struct_ppapi.idl
@@ -0,0 +1,52 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Struct productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+/* TREE
+ *Struct(MyStruct)
+ * Member(x)
+ * Type()
+ * PrimitiveType(uint32_t)
+ * Member(y)
+ * Type()
+ * PrimitiveType(uint64_t)
+ * Member(string)
+ * ExtAttributes()
+ * ExtAttribute(fake_attribute)
+ * Type()
+ * PrimitiveType(str_t)
+ * ExtAttributes()
+ * ExtAttribute(union)
+ */
+[union] struct MyStruct {
+ uint32_t x;
+ uint64_t y;
+ [fake_attribute] str_t string;
+};
diff --git a/tools/idl_parser/test_parser/typedef_ppapi.idl b/tools/idl_parser/test_parser/typedef_ppapi.idl
new file mode 100644
index 0000000..1a80415
--- /dev/null
+++ b/tools/idl_parser/test_parser/typedef_ppapi.idl
@@ -0,0 +1,92 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Typedef productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+/* TREE
+ *Callback(foo)
+ * Type()
+ * PrimitiveType(void)
+ * Arguments()
+ * Argument(x)
+ * Type()
+ * PrimitiveType(int32_t)
+ */
+callback foo = void (int32_t x);
+
+/* TREE
+ *Callback(foo)
+ * Type()
+ * PrimitiveType(void)
+ * Arguments()
+ * Argument(x)
+ * Type()
+ * PrimitiveType(int32_t)
+ */
+typedef void foo(int32_t x);
+
+/* TREE
+ *Typedef(MyLong)
+ * Type()
+ * PrimitiveType(int32_t)
+ */
+typedef int32_t MyLong;
+
+/* TREE
+ *Typedef(MyLongArray)
+ * Type()
+ * PrimitiveType(str_t)
+ * Array()
+ */
+typedef str_t[] MyLongArray;
+
+/* TREE
+ *Typedef(MyLongArray5)
+ * Type()
+ * PrimitiveType(mem_t)
+ * Array(5)
+ */
+typedef mem_t[5] MyLongArray5;
+
+/* TREE
+ *Typedef(MyLongArrayN5)
+ * Type()
+ * PrimitiveType(handle_t)
+ * Array()
+ * Array(5)
+ */
+typedef handle_t[][5] MyLongArrayN5;
+
+
+/* TREE
+ *Typedef(bar)
+ * Type()
+ * Typeref(foo)
+ */
+typedef foo bar;
\ No newline at end of file
diff --git a/tools/idl_parser/test_parser/typedef_web.idl b/tools/idl_parser/test_parser/typedef_web.idl
new file mode 100644
index 0000000..ba95db7
--- /dev/null
+++ b/tools/idl_parser/test_parser/typedef_web.idl
@@ -0,0 +1,190 @@
+/* Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file. */
+
+/* Test Typedef productions
+
+Run with --test to generate an AST and verify that all comments accurately
+reflect the state of the Nodes.
+
+BUILD Type(Name)
+This comment signals that a node of type <Type> is created with the
+name <Name>.
+
+ERROR Error String
+This comment signals that a error of <Error String> is generated. The error
+is not assigned to a node, but are expected in order.
+
+PROP Key=Value
+This comment signals that a property has been set on the Node such that
+<Key> = <Value>.
+
+TREE
+Type(Name)
+ Type(Name)
+ Type(Name)
+ Type(Name)
+ ...
+This comment signals that a tree of nodes matching the BUILD comment
+symatics should exist. This is an exact match.
+*/
+
+
+/* TREE
+ *Typedef(MyLong)
+ * Type()
+ * PrimitiveType(long)
+ */
+typedef long MyLong;
+
+/* TREE
+ *Typedef(MyLong)
+ * ExtAttributes()
+ * ExtAttribute(foo)
+ * Type()
+ * PrimitiveType(long)
+ */
+typedef [foo] long MyLong;
+
+/* TREE
+ *Typedef(MyLongArray)
+ * Type()
+ * PrimitiveType(long)
+ * Array()
+ */
+typedef long[] MyLongArray;
+
+/* TREE
+ *Typedef(MyLongSizedArray)
+ * Type()
+ * PrimitiveType(long)
+ * Array(4)
+ */
+typedef long[4] MyLongSizedArray;
+
+/* TREE
+ *Typedef(MyLongSizedArrayArray)
+ * Type()
+ * PrimitiveType(long)
+ * Array(4)
+ * Array(5)
+ */
+typedef long[4][5] MyLongSizedArrayArray;
+
+/* TREE
+ *Typedef(MyLongArraySizedArray)
+ * Type()
+ * PrimitiveType(long)
+ * Array()
+ * Array(5)
+ */
+typedef long[][5] MyLongArraySizedArray;
+
+/* TREE
+ *Typedef(MyTypeFive)
+ * Type()
+ * Typeref(MyType)
+ * Array(5)
+ */
+typedef MyType[5] MyTypeFive;
+
+/* TREE
+ *Typedef(MyTypeUnsizedFive)
+ * Type()
+ * Typeref(MyType)
+ * Array()
+ * Array(5)
+ */
+typedef MyType[][5] MyTypeUnsizedFive;
+
+/* TREE
+ *Typedef(MyLongLong)
+ * Type()
+ * PrimitiveType(long long)
+ */
+typedef long long MyLongLong;
+
+/* TREE
+ *Typedef(MyULong)
+ * Type()
+ * PrimitiveType(unsigned long)
+ */
+typedef unsigned long MyULong;
+
+/* TREE
+ *Typedef(MyULongLong)
+ * Type()
+ * PrimitiveType(unsigned long long)
+ */
+typedef unsigned long long MyULongLong;
+
+/* TREE
+ *Typedef(MyString)
+ * Type()
+ * PrimitiveType(DOMString)
+ */
+typedef DOMString MyString;
+
+/* TREE
+ *Typedef(MyObject)
+ * Type()
+ * PrimitiveType(object)
+ */
+typedef object MyObject;
+
+/* TREE
+ *Typedef(MyDate)
+ * Type()
+ * PrimitiveType(Date)
+ */
+typedef Date MyDate;
+
+/* TREE
+ *Typedef(MyFloat)
+ * Type()
+ * PrimitiveType(float)
+ */
+typedef float MyFloat;
+
+/* TREE
+ *Typedef(MyUFloat)
+ * Type()
+ * PrimitiveType(float)
+ */
+typedef unrestricted float MyUFloat;
+
+/* TREE
+ *Typedef(MyDouble)
+ * Type()
+ * PrimitiveType(double)
+ */
+typedef double MyDouble;
+
+/* TREE
+ *Typedef(MyUDouble)
+ * Type()
+ * PrimitiveType(double)
+ */
+typedef unrestricted double MyUDouble;
+
+/* TREE
+ *Typedef(MyBool)
+ * Type()
+ * PrimitiveType(boolean)
+ */
+typedef boolean MyBool;
+
+/* TREE
+ *Typedef(MyByte)
+ * Type()
+ * PrimitiveType(byte)
+ */
+typedef byte MyByte;
+
+/* TREE
+ *Typedef(MyOctet)
+ * Type()
+ * PrimitiveType(octet)
+ */
+typedef octet MyOctet;
+
diff --git a/tools/json_comment_eater/everything.json b/tools/json_comment_eater/everything.json
new file mode 100644
index 0000000..31db503
--- /dev/null
+++ b/tools/json_comment_eater/everything.json
@@ -0,0 +1,13 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Test API.
+{ "namespace": "test",
+ "comments": "yo", // Comments all have a // in them.
+ "strings": "yes", // Comment with "strings" and " character
+ "escaped\"": "string\"isescaped",
+ "more//": "\"more",
+ "so\\many": "\\\\escapes\\\\\"whoa",
+ "comment//inmiddle": "of string"
+}
diff --git a/tools/json_comment_eater/everything_expected.json b/tools/json_comment_eater/everything_expected.json
new file mode 100644
index 0000000..3fa02c1
--- /dev/null
+++ b/tools/json_comment_eater/everything_expected.json
@@ -0,0 +1,13 @@
+
+
+
+
+
+{ "namespace": "test",
+ "comments": "yo",
+ "strings": "yes",
+ "escaped\"": "string\"isescaped",
+ "more//": "\"more",
+ "so\\many": "\\\\escapes\\\\\"whoa",
+ "comment//inmiddle": "of string"
+}
diff --git a/tools/json_comment_eater/json_comment_eater.py b/tools/json_comment_eater/json_comment_eater.py
new file mode 100755
index 0000000..93261bf
--- /dev/null
+++ b/tools/json_comment_eater/json_comment_eater.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''Utility to remove comments from JSON files so that they can be parsed by
+json.loads.
+'''
+
+import sys
+
+
+def _Rcount(string, chars):
+ '''Returns the number of consecutive characters from |chars| that occur at the
+ end of |string|.
+ '''
+ return len(string) - len(string.rstrip(chars))
+
+
+def _FindNextToken(string, tokens, start):
+ '''Finds the next token in |tokens| that occurs in |string| from |start|.
+ Returns a tuple (index, token key).
+ '''
+ min_index, min_key = (-1, None)
+ for k in tokens:
+ index = string.find(k, start)
+ if index != -1 and (min_index == -1 or index < min_index):
+ min_index, min_key = (index, k)
+ return (min_index, min_key)
+
+
+def _ReadString(input, start, output):
+ output.append('"')
+ start_range, end_range = (start, input.find('"', start))
+ # \" escapes the ", \\" doesn't, \\\" does, etc.
+ while (end_range != -1 and
+ _Rcount(input[start_range:end_range], '\\') % 2 == 1):
+ start_range, end_range = (end_range, input.find('"', end_range + 1))
+ if end_range == -1:
+ return start_range + 1
+ output.append(input[start:end_range + 1])
+ return end_range + 1
+
+
+def _ReadComment(input, start, output):
+ eol_tokens = ('\n', '\r')
+ eol_token_index, eol_token = _FindNextToken(input, eol_tokens, start)
+ if eol_token is None:
+ return len(input)
+ output.append(eol_token)
+ return eol_token_index + len(eol_token)
+
+
+def Nom(input):
+ token_actions = {
+ '"': _ReadString,
+ '//': _ReadComment,
+ }
+ output = []
+ pos = 0
+ while pos < len(input):
+ token_index, token = _FindNextToken(input, token_actions.keys(), pos)
+ if token is None:
+ output.append(input[pos:])
+ break
+ output.append(input[pos:token_index])
+ pos = token_actions[token](input, token_index + len(token), output)
+ return ''.join(output)
+
+
+if __name__ == '__main__':
+ sys.stdout.write(Nom(sys.stdin.read()))
diff --git a/tools/json_comment_eater/json_comment_eater_test.py b/tools/json_comment_eater/json_comment_eater_test.py
new file mode 100755
index 0000000..5a230eb
--- /dev/null
+++ b/tools/json_comment_eater/json_comment_eater_test.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from json_comment_eater import Nom
+import unittest
+
+class JsonCommentEaterTest(unittest.TestCase):
+ def _Load(self, test_name):
+ '''Loads the input and expected output for |test_name| as given by reading
+ in |test_name|.json and |test_name|_expected.json, and returns the string
+ contents as a tuple in that order.
+ '''
+ def read(file_name):
+ with open(file_name, 'r') as f:
+ return f.read()
+ return [read(pattern % test_name)
+ for pattern in ('%s.json', '%s_expected.json')]
+
+ def testEverything(self):
+ json, expected_json = self._Load('everything')
+ self.assertEqual(expected_json, Nom(json))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/json_schema_compiler/BUILD.gn b/tools/json_schema_compiler/BUILD.gn
new file mode 100644
index 0000000..2d7c183
--- /dev/null
+++ b/tools/json_schema_compiler/BUILD.gn
@@ -0,0 +1,13 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Utility sources against which generated API modules should be linked.
+source_set("generated_api_util") {
+ sources = [
+ "util.cc",
+ "util.h"
+ ]
+ deps = [ "//base" ]
+}
+
diff --git a/tools/json_schema_compiler/PRESUBMIT.py b/tools/json_schema_compiler/PRESUBMIT.py
new file mode 100644
index 0000000..b98649b
--- /dev/null
+++ b/tools/json_schema_compiler/PRESUBMIT.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit script for changes affecting tools/json_schema_compiler/
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details about the presubmit API built into gcl.
+"""
+
+WHITELIST = [ r'.+_test.py$' ]
+
+def CheckChangeOnUpload(input_api, output_api):
+ return input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api, '.', whitelist=WHITELIST)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api, '.', whitelist=WHITELIST)
diff --git a/tools/json_schema_compiler/api_gen_util.gyp b/tools/json_schema_compiler/api_gen_util.gyp
new file mode 100644
index 0000000..54966cc
--- /dev/null
+++ b/tools/json_schema_compiler/api_gen_util.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'chromium_code': 1,
+ },
+ 'targets': [{
+ 'target_name': 'api_gen_util',
+ 'type': 'static_library',
+ 'sources': [
+ 'util.cc',
+ ],
+ 'dependencies': ['<(DEPTH)/base/base.gyp:base'],
+ 'include_dirs': [
+ '<(DEPTH)',
+ ],
+ }],
+}
diff --git a/tools/json_schema_compiler/cc_generator.py b/tools/json_schema_compiler/cc_generator.py
new file mode 100644
index 0000000..1d243f0
--- /dev/null
+++ b/tools/json_schema_compiler/cc_generator.py
@@ -0,0 +1,1070 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from code import Code
+from model import PropertyType
+import cpp_util
+import schema_util
+import util_cc_helper
+from cpp_namespace_environment import CppNamespaceEnvironment
+
+class CCGenerator(object):
+ def __init__(self, type_generator):
+ self._type_generator = type_generator
+
+ def Generate(self, namespace):
+ return _Generator(namespace, self._type_generator).Generate()
+
+
+class _Generator(object):
+ """A .cc generator for a namespace.
+ """
+ def __init__(self, namespace, cpp_type_generator):
+ assert type(namespace.environment) is CppNamespaceEnvironment
+ self._namespace = namespace
+ self._type_helper = cpp_type_generator
+ self._util_cc_helper = (
+ util_cc_helper.UtilCCHelper(self._type_helper))
+ self._generate_error_messages = namespace.compiler_options.get(
+ 'generate_error_messages', False)
+
+ def Generate(self):
+ """Generates a Code object with the .cc for a single namespace.
+ """
+ cpp_namespace = cpp_util.GetCppNamespace(
+ self._namespace.environment.namespace_pattern,
+ self._namespace.unix_name)
+
+ c = Code()
+ (c.Append(cpp_util.CHROMIUM_LICENSE)
+ .Append()
+ .Append(cpp_util.GENERATED_FILE_MESSAGE % self._namespace.source_file)
+ .Append()
+ .Append(self._util_cc_helper.GetIncludePath())
+ .Append('#include "base/logging.h"')
+ .Append('#include "base/strings/string_number_conversions.h"')
+ .Append('#include "base/strings/utf_string_conversions.h"')
+ .Append('#include "%s/%s.h"' %
+ (self._namespace.source_file_dir, self._namespace.short_filename))
+ .Append('#include <set>')
+ .Cblock(self._type_helper.GenerateIncludes(include_soft=True))
+ .Append()
+ .Append('using base::UTF8ToUTF16;')
+ .Append()
+ .Concat(cpp_util.OpenNamespace(cpp_namespace))
+ )
+ if self._namespace.properties:
+ (c.Append('//')
+ .Append('// Properties')
+ .Append('//')
+ .Append()
+ )
+ for property in self._namespace.properties.values():
+ property_code = self._type_helper.GeneratePropertyValues(
+ property,
+ 'const %(type)s %(name)s = %(value)s;',
+ nodoc=True)
+ if property_code:
+ c.Cblock(property_code)
+ if self._namespace.types:
+ (c.Append('//')
+ .Append('// Types')
+ .Append('//')
+ .Append()
+ .Cblock(self._GenerateTypes(None, self._namespace.types.values()))
+ )
+ if self._namespace.functions:
+ (c.Append('//')
+ .Append('// Functions')
+ .Append('//')
+ .Append()
+ )
+ for function in self._namespace.functions.values():
+ c.Cblock(self._GenerateFunction(function))
+ if self._namespace.events:
+ (c.Append('//')
+ .Append('// Events')
+ .Append('//')
+ .Append()
+ )
+ for event in self._namespace.events.values():
+ c.Cblock(self._GenerateEvent(event))
+ c.Cblock(cpp_util.CloseNamespace(cpp_namespace))
+ c.Append()
+ return c
+
+ def _GenerateType(self, cpp_namespace, type_):
+ """Generates the function definitions for a type.
+ """
+ classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
+ c = Code()
+
+ if type_.functions:
+ # Wrap functions within types in the type's namespace.
+ (c.Append('namespace %s {' % classname)
+ .Append())
+ for function in type_.functions.values():
+ c.Cblock(self._GenerateFunction(function))
+ c.Append('} // namespace %s' % classname)
+ elif type_.property_type == PropertyType.ARRAY:
+ c.Cblock(self._GenerateType(cpp_namespace, type_.item_type))
+ elif type_.property_type in (PropertyType.CHOICES,
+ PropertyType.OBJECT):
+ if cpp_namespace is None:
+ classname_in_namespace = classname
+ else:
+ classname_in_namespace = '%s::%s' % (cpp_namespace, classname)
+
+ if type_.property_type == PropertyType.OBJECT:
+ c.Cblock(self._GeneratePropertyFunctions(classname_in_namespace,
+ type_.properties.values()))
+ else:
+ c.Cblock(self._GenerateTypes(classname_in_namespace, type_.choices))
+
+ (c.Append('%s::%s()' % (classname_in_namespace, classname))
+ .Cblock(self._GenerateInitializersAndBody(type_))
+ .Append('%s::~%s() {}' % (classname_in_namespace, classname))
+ .Append()
+ )
+ if type_.origin.from_json:
+ c.Cblock(self._GenerateTypePopulate(classname_in_namespace, type_))
+ if cpp_namespace is None: # only generate for top-level types
+ c.Cblock(self._GenerateTypeFromValue(classname_in_namespace, type_))
+ if type_.origin.from_client:
+ c.Cblock(self._GenerateTypeToValue(classname_in_namespace, type_))
+ elif type_.property_type == PropertyType.ENUM:
+ (c.Cblock(self._GenerateEnumToString(cpp_namespace, type_))
+ .Cblock(self._GenerateEnumFromString(cpp_namespace, type_))
+ )
+
+ return c
+
+ def _GenerateInitializersAndBody(self, type_):
+ items = []
+ for prop in type_.properties.values():
+ t = prop.type_
+
+ real_t = self._type_helper.FollowRef(t)
+ if real_t.property_type == PropertyType.ENUM:
+ items.append('%s(%s)' % (
+ prop.unix_name,
+ self._type_helper.GetEnumNoneValue(t)))
+ elif prop.optional:
+ continue
+ elif t.property_type == PropertyType.INTEGER:
+ items.append('%s(0)' % prop.unix_name)
+ elif t.property_type == PropertyType.DOUBLE:
+ items.append('%s(0.0)' % prop.unix_name)
+ elif t.property_type == PropertyType.BOOLEAN:
+ items.append('%s(false)' % prop.unix_name)
+ elif (t.property_type == PropertyType.ANY or
+ t.property_type == PropertyType.ARRAY or
+ t.property_type == PropertyType.BINARY or # mapped to std::string
+ t.property_type == PropertyType.CHOICES or
+ t.property_type == PropertyType.OBJECT or
+ t.property_type == PropertyType.FUNCTION or
+ t.property_type == PropertyType.REF or
+ t.property_type == PropertyType.STRING):
+ # TODO(miket): It would be nice to initialize CHOICES, but we
+ # don't presently have the semantics to indicate which one of a set
+ # should be the default.
+ continue
+ else:
+ raise TypeError(t)
+
+ if items:
+ s = ': %s' % (', '.join(items))
+ else:
+ s = ''
+ s = s + ' {}'
+ return Code().Append(s)
+
+ def _GenerateTypePopulate(self, cpp_namespace, type_):
+ """Generates the function for populating a type given a pointer to it.
+
+ E.g for type "Foo", generates Foo::Populate()
+ """
+ classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
+ c = Code()
+ (c.Append('// static')
+ .Append('bool %(namespace)s::Populate(')
+ .Sblock(' %s) {' % self._GenerateParams(
+ ('const base::Value& value', '%(name)s* out'))))
+
+ if self._generate_error_messages:
+ c.Append('DCHECK(error);')
+
+ if type_.property_type == PropertyType.CHOICES:
+ for choice in type_.choices:
+ (c.Sblock('if (%s) {' % self._GenerateValueIsTypeExpression('value',
+ choice))
+ .Concat(self._GeneratePopulateVariableFromValue(
+ choice,
+ '(&value)',
+ 'out->as_%s' % choice.unix_name,
+ 'false',
+ is_ptr=True))
+ .Append('return true;')
+ .Eblock('}')
+ )
+ (c.Concat(self._GenerateError(
+ '"expected %s, got " + %s' %
+ (" or ".join(choice.name for choice in type_.choices),
+ self._util_cc_helper.GetValueTypeString('value'))))
+ .Append('return false;'))
+ elif type_.property_type == PropertyType.OBJECT:
+ (c.Sblock('if (!value.IsType(base::Value::TYPE_DICTIONARY)) {')
+ .Concat(self._GenerateError(
+ '"expected dictionary, got " + ' +
+ self._util_cc_helper.GetValueTypeString('value')))
+ .Append('return false;')
+ .Eblock('}'))
+
+ if type_.properties or type_.additional_properties is not None:
+ c.Append('const base::DictionaryValue* dict = '
+ 'static_cast<const base::DictionaryValue*>(&value);')
+ if self._generate_error_messages:
+ c.Append('std::set<std::string> keys;')
+ for prop in type_.properties.itervalues():
+ c.Concat(self._InitializePropertyToDefault(prop, 'out'))
+ for prop in type_.properties.itervalues():
+ if self._generate_error_messages:
+ c.Append('keys.insert("%s");' % (prop.name))
+ c.Concat(self._GenerateTypePopulateProperty(prop, 'dict', 'out'))
+ # Check for extra values.
+ if self._generate_error_messages:
+ (c.Sblock('for (base::DictionaryValue::Iterator it(*dict); '
+ '!it.IsAtEnd(); it.Advance()) {')
+ .Sblock('if (!keys.count(it.key())) {')
+ .Concat(self._GenerateError('"found unexpected key \'" + '
+ 'it.key() + "\'"'))
+ .Eblock('}')
+ .Eblock('}')
+ )
+ if type_.additional_properties is not None:
+ if type_.additional_properties.property_type == PropertyType.ANY:
+ c.Append('out->additional_properties.MergeDictionary(dict);')
+ else:
+ cpp_type = self._type_helper.GetCppType(type_.additional_properties,
+ is_in_container=True)
+ (c.Append('for (base::DictionaryValue::Iterator it(*dict);')
+ .Sblock(' !it.IsAtEnd(); it.Advance()) {')
+ .Append('%s tmp;' % cpp_type)
+ .Concat(self._GeneratePopulateVariableFromValue(
+ type_.additional_properties,
+ '(&it.value())',
+ 'tmp',
+ 'false'))
+ .Append('out->additional_properties[it.key()] = tmp;')
+ .Eblock('}')
+ )
+ c.Append('return true;')
+ (c.Eblock('}')
+ .Substitute({'namespace': cpp_namespace, 'name': classname}))
+ return c
+
+ def _GenerateValueIsTypeExpression(self, var, type_):
+ real_type = self._type_helper.FollowRef(type_)
+ if real_type.property_type is PropertyType.CHOICES:
+ return '(%s)' % ' || '.join(self._GenerateValueIsTypeExpression(var,
+ choice)
+ for choice in real_type.choices)
+ return '%s.IsType(%s)' % (var, cpp_util.GetValueType(real_type))
+
+ def _GenerateTypePopulateProperty(self, prop, src, dst):
+ """Generate the code to populate a single property in a type.
+
+ src: base::DictionaryValue*
+ dst: Type*
+ """
+ c = Code()
+ value_var = prop.unix_name + '_value'
+ c.Append('const base::Value* %(value_var)s = NULL;')
+ if prop.optional:
+ (c.Sblock(
+ 'if (%(src)s->GetWithoutPathExpansion("%(key)s", &%(value_var)s)) {')
+ .Concat(self._GeneratePopulatePropertyFromValue(
+ prop, value_var, dst, 'false')))
+ underlying_type = self._type_helper.FollowRef(prop.type_)
+ if underlying_type.property_type == PropertyType.ENUM:
+ (c.Append('} else {')
+ .Append('%%(dst)s->%%(name)s = %s;' %
+ self._type_helper.GetEnumNoneValue(prop.type_)))
+ c.Eblock('}')
+ else:
+ (c.Sblock(
+ 'if (!%(src)s->GetWithoutPathExpansion("%(key)s", &%(value_var)s)) {')
+ .Concat(self._GenerateError('"\'%%(key)s\' is required"'))
+ .Append('return false;')
+ .Eblock('}')
+ .Concat(self._GeneratePopulatePropertyFromValue(
+ prop, value_var, dst, 'false'))
+ )
+ c.Append()
+ c.Substitute({
+ 'value_var': value_var,
+ 'key': prop.name,
+ 'src': src,
+ 'dst': dst,
+ 'name': prop.unix_name
+ })
+ return c
+
+ def _GenerateTypeFromValue(self, cpp_namespace, type_):
+ classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
+ c = Code()
+ (c.Append('// static')
+ .Append('scoped_ptr<%s> %s::FromValue(%s) {' % (classname,
+ cpp_namespace, self._GenerateParams(('const base::Value& value',))))
+ )
+ if self._generate_error_messages:
+ c.Append('DCHECK(error);')
+ (c.Append(' scoped_ptr<%s> out(new %s());' % (classname, classname))
+ .Append(' if (!Populate(%s))' % self._GenerateArgs(
+ ('value', 'out.get()')))
+ .Append(' return scoped_ptr<%s>();' % classname)
+ .Append(' return out.Pass();')
+ .Append('}')
+ )
+ return c
+
+ def _GenerateTypeToValue(self, cpp_namespace, type_):
+ """Generates a function that serializes the type into a base::Value.
+ E.g. for type "Foo" generates Foo::ToValue()
+ """
+ if type_.property_type == PropertyType.OBJECT:
+ return self._GenerateObjectTypeToValue(cpp_namespace, type_)
+ elif type_.property_type == PropertyType.CHOICES:
+ return self._GenerateChoiceTypeToValue(cpp_namespace, type_)
+ else:
+ raise ValueError("Unsupported property type %s" % type_.type_)
+
+ def _GenerateObjectTypeToValue(self, cpp_namespace, type_):
+ """Generates a function that serializes an object-representing type
+ into a base::DictionaryValue.
+ """
+ c = Code()
+ (c.Sblock('scoped_ptr<base::DictionaryValue> %s::ToValue() const {' %
+ cpp_namespace)
+ .Append('scoped_ptr<base::DictionaryValue> value('
+ 'new base::DictionaryValue());')
+ .Append()
+ )
+
+ for prop in type_.properties.values():
+ prop_var = 'this->%s' % prop.unix_name
+ if prop.optional:
+ # Optional enum values are generated with a NONE enum value.
+ underlying_type = self._type_helper.FollowRef(prop.type_)
+ if underlying_type.property_type == PropertyType.ENUM:
+ c.Sblock('if (%s != %s) {' %
+ (prop_var,
+ self._type_helper.GetEnumNoneValue(prop.type_)))
+ else:
+ c.Sblock('if (%s.get()) {' % prop_var)
+
+ # ANY is a base::Value which is abstract and cannot be a direct member, so
+ # it will always be a pointer.
+ is_ptr = prop.optional or prop.type_.property_type == PropertyType.ANY
+ c.Cblock(self._CreateValueFromType(
+ 'value->SetWithoutPathExpansion("%s", %%s);' % prop.name,
+ prop.name,
+ prop.type_,
+ prop_var,
+ is_ptr=is_ptr))
+
+ if prop.optional:
+ c.Eblock('}')
+
+ if type_.additional_properties is not None:
+ if type_.additional_properties.property_type == PropertyType.ANY:
+ c.Append('value->MergeDictionary(&additional_properties);')
+ else:
+ # Non-copyable types will be wrapped in a linked_ptr for inclusion in
+ # maps, so we need to unwrap them.
+ needs_unwrap = (
+ not self._type_helper.IsCopyable(type_.additional_properties))
+ cpp_type = self._type_helper.GetCppType(type_.additional_properties,
+ is_in_container=True)
+ (c.Sblock('for (std::map<std::string, %s>::const_iterator it =' %
+ cpp_util.PadForGenerics(cpp_type))
+ .Append(' additional_properties.begin();')
+ .Append(' it != additional_properties.end(); ++it) {')
+ .Cblock(self._CreateValueFromType(
+ 'value->SetWithoutPathExpansion(it->first, %s);',
+ type_.additional_properties.name,
+ type_.additional_properties,
+ '%sit->second' % ('*' if needs_unwrap else '')))
+ .Eblock('}')
+ )
+
+ return (c.Append()
+ .Append('return value.Pass();')
+ .Eblock('}'))
+
+ def _GenerateChoiceTypeToValue(self, cpp_namespace, type_):
+ """Generates a function that serializes a choice-representing type
+ into a base::Value.
+ """
+ c = Code()
+ c.Sblock('scoped_ptr<base::Value> %s::ToValue() const {' % cpp_namespace)
+ c.Append('scoped_ptr<base::Value> result;')
+ for choice in type_.choices:
+ choice_var = 'as_%s' % choice.unix_name
+ (c.Sblock('if (%s) {' % choice_var)
+ .Append('DCHECK(!result) << "Cannot set multiple choices for %s";' %
+ type_.unix_name)
+ .Cblock(self._CreateValueFromType('result.reset(%s);',
+ choice.name,
+ choice,
+ '*%s' % choice_var))
+ .Eblock('}')
+ )
+ (c.Append('DCHECK(result) << "Must set at least one choice for %s";' %
+ type_.unix_name)
+ .Append('return result.Pass();')
+ .Eblock('}')
+ )
+ return c
+
+ def _GenerateFunction(self, function):
+ """Generates the definitions for function structs.
+ """
+ c = Code()
+
+ # TODO(kalman): use function.unix_name not Classname.
+ function_namespace = cpp_util.Classname(function.name)
+ # Windows has a #define for SendMessage, so to avoid any issues, we need
+ # to not use the name.
+ if function_namespace == 'SendMessage':
+ function_namespace = 'PassMessage'
+ (c.Append('namespace %s {' % function_namespace)
+ .Append()
+ )
+
+ # Params::Populate function
+ if function.params:
+ c.Concat(self._GeneratePropertyFunctions('Params', function.params))
+ (c.Append('Params::Params() {}')
+ .Append('Params::~Params() {}')
+ .Append()
+ .Cblock(self._GenerateFunctionParamsCreate(function))
+ )
+
+ # Results::Create function
+ if function.callback:
+ c.Concat(self._GenerateCreateCallbackArguments(function_namespace,
+ 'Results',
+ function.callback))
+
+ c.Append('} // namespace %s' % function_namespace)
+ return c
+
+ def _GenerateEvent(self, event):
+ # TODO(kalman): use event.unix_name not Classname.
+ c = Code()
+ event_namespace = cpp_util.Classname(event.name)
+ (c.Append('namespace %s {' % event_namespace)
+ .Append()
+ .Cblock(self._GenerateEventNameConstant(None, event))
+ .Cblock(self._GenerateCreateCallbackArguments(event_namespace,
+ None,
+ event))
+ .Append('} // namespace %s' % event_namespace)
+ )
+ return c
+
+ def _CreateValueFromType(self, code, prop_name, type_, var, is_ptr=False):
+ """Creates a base::Value given a type. Generated code passes ownership
+ to caller.
+
+ var: variable or variable*
+
+ E.g for std::string, generate new base::StringValue(var)
+ """
+ c = Code()
+ underlying_type = self._type_helper.FollowRef(type_)
+ if underlying_type.property_type == PropertyType.ARRAY:
+ # Enums are treated specially because C++ templating thinks that they're
+ # ints, but really they're strings. So we create a vector of strings and
+ # populate it with the names of the enum in the array. The |ToString|
+ # function of the enum can be in another namespace when the enum is
+ # referenced. Templates can not be used here because C++ templating does
+ # not support passing a namespace as an argument.
+ item_type = self._type_helper.FollowRef(underlying_type.item_type)
+ if item_type.property_type == PropertyType.ENUM:
+ vardot = '(%s)%s' % (var, '->' if is_ptr else '.')
+
+ maybe_namespace = ''
+ if type_.item_type.property_type == PropertyType.REF:
+ maybe_namespace = '%s::' % item_type.namespace.unix_name
+
+ enum_list_var = '%s_list' % prop_name
+ # Scope the std::vector variable declaration inside braces.
+ (c.Sblock('{')
+ .Append('std::vector<std::string> %s;' % enum_list_var)
+ .Append('for (std::vector<%s>::const_iterator it = %sbegin();'
+ % (self._type_helper.GetCppType(item_type), vardot))
+ .Sblock(' it != %send(); ++it) {' % vardot)
+ .Append('%s.push_back(%sToString(*it));' % (enum_list_var,
+ maybe_namespace))
+ .Eblock('}'))
+
+ # Because the std::vector above is always created for both required and
+ # optional enum arrays, |is_ptr| is set to false and uses the
+ # std::vector to create the values.
+ (c.Append(code %
+ self._GenerateCreateValueFromType(type_, enum_list_var, False))
+ .Eblock('}'))
+ return c
+
+ c.Append(code % self._GenerateCreateValueFromType(type_, var, is_ptr))
+ return c
+
+ def _GenerateCreateValueFromType(self, type_, var, is_ptr):
+ """Generates the statement to create a base::Value given a type.
+
+ type_: The type of the values being converted.
+ var: The name of the variable.
+ is_ptr: Whether |type_| is optional.
+ """
+ underlying_type = self._type_helper.FollowRef(type_)
+ if (underlying_type.property_type == PropertyType.CHOICES or
+ underlying_type.property_type == PropertyType.OBJECT):
+ if is_ptr:
+ return '(%s)->ToValue().release()' % var
+ else:
+ return '(%s).ToValue().release()' % var
+ elif (underlying_type.property_type == PropertyType.ANY or
+ underlying_type.property_type == PropertyType.FUNCTION):
+ if is_ptr:
+ vardot = '(%s)->' % var
+ else:
+ vardot = '(%s).' % var
+ return '%sDeepCopy()' % vardot
+ elif underlying_type.property_type == PropertyType.ENUM:
+ maybe_namespace = ''
+ if type_.property_type == PropertyType.REF:
+ maybe_namespace = '%s::' % underlying_type.namespace.unix_name
+ return 'new base::StringValue(%sToString(%s))' % (maybe_namespace, var)
+ elif underlying_type.property_type == PropertyType.BINARY:
+ if is_ptr:
+ vardot = var + '->'
+ else:
+ vardot = var + '.'
+ return ('base::BinaryValue::CreateWithCopiedBuffer(%sdata(), %ssize())' %
+ (vardot, vardot))
+ elif underlying_type.property_type == PropertyType.ARRAY:
+ return '%s.release()' % self._util_cc_helper.CreateValueFromArray(
+ var,
+ is_ptr)
+ elif underlying_type.property_type.is_fundamental:
+ if is_ptr:
+ var = '*%s' % var
+ if underlying_type.property_type == PropertyType.STRING:
+ return 'new base::StringValue(%s)' % var
+ else:
+ return 'new base::FundamentalValue(%s)' % var
+ else:
+ raise NotImplementedError('Conversion of %s to base::Value not '
+ 'implemented' % repr(type_.type_))
+
+ def _GenerateParamsCheck(self, function, var):
+ """Generates a check for the correct number of arguments when creating
+ Params.
+ """
+ c = Code()
+ num_required = 0
+ for param in function.params:
+ if not param.optional:
+ num_required += 1
+ if num_required == len(function.params):
+ c.Sblock('if (%(var)s.GetSize() != %(total)d) {')
+ elif not num_required:
+ c.Sblock('if (%(var)s.GetSize() > %(total)d) {')
+ else:
+ c.Sblock('if (%(var)s.GetSize() < %(required)d'
+ ' || %(var)s.GetSize() > %(total)d) {')
+ (c.Concat(self._GenerateError(
+ '"expected %%(total)d arguments, got " '
+ '+ base::IntToString(%%(var)s.GetSize())'))
+ .Append('return scoped_ptr<Params>();')
+ .Eblock('}')
+ .Substitute({
+ 'var': var,
+ 'required': num_required,
+ 'total': len(function.params),
+ }))
+ return c
+
+ def _GenerateFunctionParamsCreate(self, function):
+ """Generate function to create an instance of Params. The generated
+ function takes a base::ListValue of arguments.
+
+ E.g for function "Bar", generate Bar::Params::Create()
+ """
+ c = Code()
+ (c.Append('// static')
+ .Sblock('scoped_ptr<Params> Params::Create(%s) {' % self._GenerateParams(
+ ['const base::ListValue& args']))
+ )
+ if self._generate_error_messages:
+ c.Append('DCHECK(error);')
+ (c.Concat(self._GenerateParamsCheck(function, 'args'))
+ .Append('scoped_ptr<Params> params(new Params());')
+ )
+
+ for param in function.params:
+ c.Concat(self._InitializePropertyToDefault(param, 'params'))
+
+ for i, param in enumerate(function.params):
+ # Any failure will cause this function to return. If any argument is
+ # incorrect or missing, those following it are not processed. Note that
+ # for optional arguments, we allow missing arguments and proceed because
+ # there may be other arguments following it.
+ failure_value = 'scoped_ptr<Params>()'
+ c.Append()
+ value_var = param.unix_name + '_value'
+ (c.Append('const base::Value* %(value_var)s = NULL;')
+ .Append('if (args.Get(%(i)s, &%(value_var)s) &&')
+ .Sblock(' !%(value_var)s->IsType(base::Value::TYPE_NULL)) {')
+ .Concat(self._GeneratePopulatePropertyFromValue(
+ param, value_var, 'params', failure_value))
+ .Eblock('}')
+ )
+ if not param.optional:
+ (c.Sblock('else {')
+ .Concat(self._GenerateError('"\'%%(key)s\' is required"'))
+ .Append('return %s;' % failure_value)
+ .Eblock('}'))
+ c.Substitute({'value_var': value_var, 'i': i, 'key': param.name})
+ (c.Append()
+ .Append('return params.Pass();')
+ .Eblock('}')
+ .Append()
+ )
+
+ return c
+
+ def _GeneratePopulatePropertyFromValue(self,
+ prop,
+ src_var,
+ dst_class_var,
+ failure_value):
+ """Generates code to populate property |prop| of |dst_class_var| (a
+ pointer) from a Value*. See |_GeneratePopulateVariableFromValue| for
+ semantics.
+ """
+ return self._GeneratePopulateVariableFromValue(prop.type_,
+ src_var,
+ '%s->%s' % (dst_class_var,
+ prop.unix_name),
+ failure_value,
+ is_ptr=prop.optional)
+
+ def _GeneratePopulateVariableFromValue(self,
+ type_,
+ src_var,
+ dst_var,
+ failure_value,
+ is_ptr=False):
+ """Generates code to populate a variable |dst_var| of type |type_| from a
+ Value* at |src_var|. The Value* is assumed to be non-NULL. In the generated
+ code, if |dst_var| fails to be populated then Populate will return
+ |failure_value|.
+ """
+ c = Code()
+
+ underlying_type = self._type_helper.FollowRef(type_)
+
+ if underlying_type.property_type.is_fundamental:
+ if is_ptr:
+ (c.Append('%(cpp_type)s temp;')
+ .Sblock('if (!%s) {' % cpp_util.GetAsFundamentalValue(
+ self._type_helper.FollowRef(type_), src_var, '&temp'))
+ .Concat(self._GenerateError(
+ '"\'%%(key)s\': expected ' + '%s, got " + %s' % (
+ type_.name,
+ self._util_cc_helper.GetValueTypeString(
+ '%%(src_var)s', True)))))
+ c.Append('%(dst_var)s.reset();')
+ if not self._generate_error_messages:
+ c.Append('return %(failure_value)s;')
+ (c.Eblock('}')
+ .Append('else')
+ .Append(' %(dst_var)s.reset(new %(cpp_type)s(temp));')
+ )
+ else:
+ (c.Sblock('if (!%s) {' % cpp_util.GetAsFundamentalValue(
+ self._type_helper.FollowRef(type_),
+ src_var,
+ '&%s' % dst_var))
+ .Concat(self._GenerateError(
+ '"\'%%(key)s\': expected ' + '%s, got " + %s' % (
+ type_.name,
+ self._util_cc_helper.GetValueTypeString(
+ '%%(src_var)s', True))))
+ .Append('return %(failure_value)s;')
+ .Eblock('}')
+ )
+ elif underlying_type.property_type == PropertyType.OBJECT:
+ if is_ptr:
+ (c.Append('const base::DictionaryValue* dictionary = NULL;')
+ .Sblock('if (!%(src_var)s->GetAsDictionary(&dictionary)) {')
+ .Concat(self._GenerateError(
+ '"\'%%(key)s\': expected dictionary, got " + ' +
+ self._util_cc_helper.GetValueTypeString('%%(src_var)s', True))))
+ # If an optional property fails to populate, the population can still
+ # succeed with a warning. If no error messages are generated, this
+ # warning is not set and we fail out instead.
+ if not self._generate_error_messages:
+ c.Append('return %(failure_value)s;')
+ (c.Eblock('}')
+ .Sblock('else {')
+ .Append('scoped_ptr<%(cpp_type)s> temp(new %(cpp_type)s());')
+ .Append('if (!%%(cpp_type)s::Populate(%s)) {' % self._GenerateArgs(
+ ('*dictionary', 'temp.get()')))
+ .Append(' return %(failure_value)s;')
+ )
+ (c.Append('}')
+ .Append('else')
+ .Append(' %(dst_var)s = temp.Pass();')
+ .Eblock('}')
+ )
+ else:
+ (c.Append('const base::DictionaryValue* dictionary = NULL;')
+ .Sblock('if (!%(src_var)s->GetAsDictionary(&dictionary)) {')
+ .Concat(self._GenerateError(
+ '"\'%%(key)s\': expected dictionary, got " + ' +
+ self._util_cc_helper.GetValueTypeString('%%(src_var)s', True)))
+ .Append('return %(failure_value)s;')
+ .Eblock('}')
+ .Append('if (!%%(cpp_type)s::Populate(%s)) {' % self._GenerateArgs(
+ ('*dictionary', '&%(dst_var)s')))
+ .Append(' return %(failure_value)s;')
+ .Append('}')
+ )
+ elif underlying_type.property_type == PropertyType.FUNCTION:
+ if is_ptr:
+ c.Append('%(dst_var)s.reset(new base::DictionaryValue());')
+ elif underlying_type.property_type == PropertyType.ANY:
+ c.Append('%(dst_var)s.reset(%(src_var)s->DeepCopy());')
+ elif underlying_type.property_type == PropertyType.ARRAY:
+ # util_cc_helper deals with optional and required arrays
+ (c.Append('const base::ListValue* list = NULL;')
+ .Sblock('if (!%(src_var)s->GetAsList(&list)) {')
+ .Concat(self._GenerateError(
+ '"\'%%(key)s\': expected list, got " + ' +
+ self._util_cc_helper.GetValueTypeString('%%(src_var)s', True)))
+ )
+ if is_ptr and self._generate_error_messages:
+ c.Append('%(dst_var)s.reset();')
+ else:
+ c.Append('return %(failure_value)s;')
+ c.Eblock('}')
+ c.Sblock('else {')
+ item_type = self._type_helper.FollowRef(underlying_type.item_type)
+ if item_type.property_type == PropertyType.ENUM:
+ c.Concat(self._GenerateListValueToEnumArrayConversion(
+ item_type,
+ 'list',
+ dst_var,
+ failure_value,
+ is_ptr=is_ptr))
+ else:
+ (c.Sblock('if (!%s) {' % self._util_cc_helper.PopulateArrayFromList(
+ 'list',
+ dst_var,
+ is_ptr)))
+ c.Concat(self._GenerateError(
+ '"unable to populate array \'%%(parent_key)s\'"'))
+ if is_ptr and self._generate_error_messages:
+ c.Append('%(dst_var)s.reset();')
+ else:
+ c.Append('return %(failure_value)s;')
+ c.Eblock('}')
+ c.Eblock('}')
+ elif underlying_type.property_type == PropertyType.CHOICES:
+ if is_ptr:
+ (c.Append('scoped_ptr<%(cpp_type)s> temp(new %(cpp_type)s());')
+ .Append('if (!%%(cpp_type)s::Populate(%s))' % self._GenerateArgs(
+ ('*%(src_var)s', 'temp.get()')))
+ .Append(' return %(failure_value)s;')
+ .Append('%(dst_var)s = temp.Pass();')
+ )
+ else:
+ (c.Append('if (!%%(cpp_type)s::Populate(%s))' % self._GenerateArgs(
+ ('*%(src_var)s', '&%(dst_var)s')))
+ .Append(' return %(failure_value)s;'))
+ elif underlying_type.property_type == PropertyType.ENUM:
+ c.Concat(self._GenerateStringToEnumConversion(underlying_type,
+ src_var,
+ dst_var,
+ failure_value))
+ elif underlying_type.property_type == PropertyType.BINARY:
+ (c.Append('const base::BinaryValue* binary_value = NULL;')
+ .Sblock('if (!%(src_var)s->IsType(base::Value::TYPE_BINARY)) {')
+ .Concat(self._GenerateError(
+ '"\'%%(key)s\': expected binary, got " + ' +
+ self._util_cc_helper.GetValueTypeString('%%(src_var)s', True)))
+ )
+ if not self._generate_error_messages:
+ c.Append('return %(failure_value)s;')
+ (c.Eblock('}')
+ .Sblock('else {')
+ .Append(' binary_value =')
+ .Append(' static_cast<const base::BinaryValue*>(%(src_var)s);')
+ )
+ if is_ptr:
+ (c.Append('%(dst_var)s.reset(')
+ .Append(' new std::string(binary_value->GetBuffer(),')
+ .Append(' binary_value->GetSize()));')
+ )
+ else:
+ (c.Append('%(dst_var)s.assign(binary_value->GetBuffer(),')
+ .Append(' binary_value->GetSize());')
+ )
+ c.Eblock('}')
+ else:
+ raise NotImplementedError(type_)
+ if c.IsEmpty():
+ return c
+ return Code().Sblock('{').Concat(c.Substitute({
+ 'cpp_type': self._type_helper.GetCppType(type_),
+ 'src_var': src_var,
+ 'dst_var': dst_var,
+ 'failure_value': failure_value,
+ 'key': type_.name,
+ 'parent_key': type_.parent.name,
+ })).Eblock('}')
+
+ def _GenerateListValueToEnumArrayConversion(self,
+ item_type,
+ src_var,
+ dst_var,
+ failure_value,
+ is_ptr=False):
+ """Returns Code that converts a ListValue of string constants from
+ |src_var| into an array of enums of |type_| in |dst_var|. On failure,
+ returns |failure_value|.
+ """
+ c = Code()
+ accessor = '.'
+ if is_ptr:
+ accessor = '->'
+ cpp_type = self._type_helper.GetCppType(item_type, is_in_container=True)
+ c.Append('%s.reset(new std::vector<%s>);' %
+ (dst_var, cpp_util.PadForGenerics(cpp_type)))
+ (c.Sblock('for (base::ListValue::const_iterator it = %s->begin(); '
+ 'it != %s->end(); ++it) {' % (src_var, src_var))
+ .Append('%s tmp;' % self._type_helper.GetCppType(item_type))
+ .Concat(self._GenerateStringToEnumConversion(item_type,
+ '(*it)',
+ 'tmp',
+ failure_value))
+ .Append('%s%spush_back(tmp);' % (dst_var, accessor))
+ .Eblock('}')
+ )
+ return c
+
+ def _GenerateStringToEnumConversion(self,
+ type_,
+ src_var,
+ dst_var,
+ failure_value):
+ """Returns Code that converts a string type in |src_var| to an enum with
+ type |type_| in |dst_var|. In the generated code, if |src_var| is not
+ a valid enum name then the function will return |failure_value|.
+ """
+ if type_.property_type != PropertyType.ENUM:
+ raise TypeError(type_)
+ c = Code()
+ enum_as_string = '%s_as_string' % type_.unix_name
+ cpp_type_namespace = ''
+ if type_.namespace != self._namespace:
+ cpp_type_namespace = '%s::' % type_.namespace.unix_name
+ cpp_type_name = self._type_helper.GetCppType(type_)
+ (c.Append('std::string %s;' % enum_as_string)
+ .Sblock('if (!%s->GetAsString(&%s)) {' % (src_var, enum_as_string))
+ .Concat(self._GenerateError(
+ '"\'%%(key)s\': expected string, got " + ' +
+ self._util_cc_helper.GetValueTypeString('%%(src_var)s', True)))
+ .Append('return %s;' % failure_value)
+ .Eblock('}')
+ .Append('%s = %sParse%s(%s);' % (dst_var,
+ cpp_type_namespace,
+ cpp_util.Classname(type_.name),
+ enum_as_string))
+ .Sblock('if (%s == %s%s) {' % (dst_var,
+ cpp_type_namespace,
+ self._type_helper.GetEnumNoneValue(type_)))
+ .Concat(self._GenerateError(
+ '\"\'%%(key)s\': expected \\"' +
+ '\\" or \\"'.join(
+ enum_value.name
+ for enum_value in self._type_helper.FollowRef(type_).enum_values) +
+ '\\", got \\"" + %s + "\\""' % enum_as_string))
+ .Append('return %s;' % failure_value)
+ .Eblock('}')
+ .Substitute({'src_var': src_var, 'key': type_.name})
+ )
+ return c
+
+ def _GeneratePropertyFunctions(self, namespace, params):
+ """Generates the member functions for a list of parameters.
+ """
+ return self._GenerateTypes(namespace, (param.type_ for param in params))
+
+ def _GenerateTypes(self, namespace, types):
+ """Generates the member functions for a list of types.
+ """
+ c = Code()
+ for type_ in types:
+ c.Cblock(self._GenerateType(namespace, type_))
+ return c
+
+ def _GenerateEnumToString(self, cpp_namespace, type_):
+ """Generates ToString() which gets the string representation of an enum.
+ """
+ c = Code()
+ classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
+
+ if cpp_namespace is not None:
+ c.Append('// static')
+ maybe_namespace = '' if cpp_namespace is None else '%s::' % cpp_namespace
+
+ c.Sblock('std::string %sToString(%s enum_param) {' %
+ (maybe_namespace, classname))
+ c.Sblock('switch (enum_param) {')
+ for enum_value in self._type_helper.FollowRef(type_).enum_values:
+ name = enum_value.name
+ if 'camel_case_enum_to_string' in self._namespace.compiler_options:
+ name = enum_value.CamelName()
+ (c.Append('case %s: ' % self._type_helper.GetEnumValue(type_, enum_value))
+ .Append(' return "%s";' % name))
+ (c.Append('case %s:' % self._type_helper.GetEnumNoneValue(type_))
+ .Append(' return "";')
+ .Eblock('}')
+ .Append('NOTREACHED();')
+ .Append('return "";')
+ .Eblock('}')
+ )
+ return c
+
+ def _GenerateEnumFromString(self, cpp_namespace, type_):
+ """Generates FromClassNameString() which gets an enum from its string
+ representation.
+ """
+ c = Code()
+ classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
+
+ if cpp_namespace is not None:
+ c.Append('// static')
+ maybe_namespace = '' if cpp_namespace is None else '%s::' % cpp_namespace
+
+ c.Sblock('%s%s %sParse%s(const std::string& enum_string) {' %
+ (maybe_namespace, classname, maybe_namespace, classname))
+ for i, enum_value in enumerate(
+ self._type_helper.FollowRef(type_).enum_values):
+ # This is broken up into all ifs with no else ifs because we get
+ # "fatal error C1061: compiler limit : blocks nested too deeply"
+ # on Windows.
+ (c.Append('if (enum_string == "%s")' % enum_value.name)
+ .Append(' return %s;' %
+ self._type_helper.GetEnumValue(type_, enum_value)))
+ (c.Append('return %s;' % self._type_helper.GetEnumNoneValue(type_))
+ .Eblock('}')
+ )
+ return c
+
+ def _GenerateCreateCallbackArguments(self,
+ cpp_namespace,
+ function_scope,
+ callback):
+ """Generate all functions to create Value parameters for a callback.
+
+ E.g for function "Bar", generate Bar::Results::Create
+ E.g for event "Baz", generate Baz::Create
+
+ function_scope: the function scope path, e.g. Foo::Bar for the function
+ Foo::Bar::Baz(). May be None if there is no function scope.
+ callback: the Function object we are creating callback arguments for.
+ """
+ c = Code()
+ params = callback.params
+ c.Concat(self._GeneratePropertyFunctions(function_scope, params))
+
+ (c.Sblock('scoped_ptr<base::ListValue> %(function_scope)s'
+ 'Create(%(declaration_list)s) {')
+ .Append('scoped_ptr<base::ListValue> create_results('
+ 'new base::ListValue());')
+ )
+ declaration_list = []
+ for param in params:
+ declaration_list.append(cpp_util.GetParameterDeclaration(
+ param, self._type_helper.GetCppType(param.type_)))
+ c.Cblock(self._CreateValueFromType('create_results->Append(%s);',
+ param.name,
+ param.type_,
+ param.unix_name))
+ c.Append('return create_results.Pass();')
+ c.Eblock('}')
+ c.Substitute({
+ 'function_scope': ('%s::' % function_scope) if function_scope else '',
+ 'declaration_list': ', '.join(declaration_list),
+ 'param_names': ', '.join(param.unix_name for param in params)
+ })
+ return c
+
+ def _GenerateEventNameConstant(self, function_scope, event):
+ """Generates a constant string array for the event name.
+ """
+ c = Code()
+ c.Append('const char kEventName[] = "%s.%s";' % (
+ self._namespace.name, event.name))
+ return c
+
+ def _InitializePropertyToDefault(self, prop, dst):
+ """Initialize a model.Property to its default value inside an object.
+
+ E.g for optional enum "state", generate dst->state = STATE_NONE;
+
+ dst: Type*
+ """
+ c = Code()
+ underlying_type = self._type_helper.FollowRef(prop.type_)
+ if (underlying_type.property_type == PropertyType.ENUM and
+ prop.optional):
+ c.Append('%s->%s = %s;' % (
+ dst,
+ prop.unix_name,
+ self._type_helper.GetEnumNoneValue(prop.type_)))
+ return c
+
+ def _GenerateError(self, body):
+ """Generates an error message pertaining to population failure.
+
+ E.g 'expected bool, got int'
+ """
+ c = Code()
+ if not self._generate_error_messages:
+ return c
+ (c.Append('if (error->length())')
+ .Append(' error->append(UTF8ToUTF16("; "));')
+ .Append('error->append(UTF8ToUTF16(%s));' % body))
+ return c
+
+ def _GenerateParams(self, params):
+ """Builds the parameter list for a function, given an array of parameters.
+ """
+ if self._generate_error_messages:
+ params = list(params) + ['base::string16* error']
+ return ', '.join(str(p) for p in params)
+
+ def _GenerateArgs(self, args):
+ """Builds the argument list for a function, given an array of arguments.
+ """
+ if self._generate_error_messages:
+ args = list(args) + ['error']
+ return ', '.join(str(a) for a in args)
diff --git a/tools/json_schema_compiler/code.py b/tools/json_schema_compiler/code.py
new file mode 100644
index 0000000..8ce6afa
--- /dev/null
+++ b/tools/json_schema_compiler/code.py
@@ -0,0 +1,142 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+class Code(object):
+ """A convenience object for constructing code.
+
+ Logically each object should be a block of code. All methods except |Render|
+ and |IsEmpty| return self.
+ """
+ def __init__(self, indent_size=2, comment_length=80):
+ self._code = []
+ self._indent_level = 0
+ self._indent_size = indent_size
+ self._comment_length = comment_length
+
+ def Append(self, line='', substitute=True, indent_level=None):
+ """Appends a line of code at the current indent level or just a newline if
+ line is not specified. Trailing whitespace is stripped.
+
+ substitute: indicated whether this line should be affected by
+ code.Substitute().
+ """
+ if indent_level is None:
+ indent_level = self._indent_level
+ self._code.append(Line(((' ' * indent_level) + line).rstrip(),
+ substitute=substitute))
+ return self
+
+ def IsEmpty(self):
+ """Returns True if the Code object is empty.
+ """
+ return not bool(self._code)
+
+ def Concat(self, obj):
+ """Concatenate another Code object onto this one. Trailing whitespace is
+ stripped.
+
+ Appends the code at the current indent level. Will fail if there are any
+ un-interpolated format specifiers eg %s, %(something)s which helps
+ isolate any strings that haven't been substituted.
+ """
+ if not isinstance(obj, Code):
+ raise TypeError(type(obj))
+ assert self is not obj
+ for line in obj._code:
+ try:
+ # line % () will fail if any substitution tokens are left in line
+ if line.substitute:
+ line.value %= ()
+ except TypeError:
+ raise TypeError('Unsubstituted value when concatting\n' + line.value)
+ except ValueError:
+ raise ValueError('Stray % character when concatting\n' + line.value)
+ self.Append(line.value, line.substitute)
+
+ return self
+
+ def Cblock(self, code):
+ """Concatenates another Code object |code| onto this one followed by a
+ blank line, if |code| is non-empty."""
+ if not code.IsEmpty():
+ self.Concat(code).Append()
+ return self
+
+ def Sblock(self, line=None):
+ """Starts a code block.
+
+ Appends a line of code and then increases the indent level.
+ """
+ if line is not None:
+ self.Append(line)
+ self._indent_level += self._indent_size
+ return self
+
+ def Eblock(self, line=None):
+ """Ends a code block by decreasing and then appending a line (or a blank
+ line if not given).
+ """
+ # TODO(calamity): Decide if type checking is necessary
+ #if not isinstance(line, basestring):
+ # raise TypeError
+ self._indent_level -= self._indent_size
+ if line is not None:
+ self.Append(line)
+ return self
+
+ def Comment(self, comment, comment_prefix='// '):
+ """Adds the given string as a comment.
+
+ Will split the comment if it's too long. Use mainly for variable length
+ comments. Otherwise just use code.Append('// ...') for comments.
+
+ Unaffected by code.Substitute().
+ """
+ max_len = self._comment_length - self._indent_level - len(comment_prefix)
+ while len(comment) >= max_len:
+ line = comment[0:max_len]
+ last_space = line.rfind(' ')
+ if last_space != -1:
+ line = line[0:last_space]
+ comment = comment[last_space + 1:]
+ else:
+ comment = comment[max_len:]
+ self.Append(comment_prefix + line, substitute=False)
+ self.Append(comment_prefix + comment, substitute=False)
+ return self
+
+ def Substitute(self, d):
+ """Goes through each line and interpolates using the given dict.
+
+ Raises type error if passed something that isn't a dict
+
+ Use for long pieces of code using interpolation with the same variables
+ repeatedly. This will reduce code and allow for named placeholders which
+ are more clear.
+ """
+ if not isinstance(d, dict):
+ raise TypeError('Passed argument is not a dictionary: ' + d)
+ for i, line in enumerate(self._code):
+ if self._code[i].substitute:
+ # Only need to check %s because arg is a dict and python will allow
+ # '%s %(named)s' but just about nothing else
+ if '%s' in self._code[i].value or '%r' in self._code[i].value:
+ raise TypeError('"%s" or "%r" found in substitution. '
+ 'Named arguments only. Use "%" to escape')
+ self._code[i].value = line.value % d
+ self._code[i].substitute = False
+ return self
+
+ def Render(self):
+ """Renders Code as a string.
+ """
+ return '\n'.join([l.value for l in self._code])
+
+
+class Line(object):
+ """A line of code.
+ """
+ def __init__(self, value, substitute=True):
+ self.value = value
+ self.substitute = substitute
diff --git a/tools/json_schema_compiler/code_test.py b/tools/json_schema_compiler/code_test.py
new file mode 100755
index 0000000..ca36524
--- /dev/null
+++ b/tools/json_schema_compiler/code_test.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from code import Code
+import unittest
+
+class CodeTest(unittest.TestCase):
+ def testAppend(self):
+ c = Code()
+ c.Append('line')
+ self.assertEquals('line', c.Render())
+
+ def testBlock(self):
+ c = Code()
+ (c.Append('line')
+ .Sblock('sblock')
+ .Append('inner')
+ .Append('moreinner')
+ .Sblock('moresblock')
+ .Append('inner')
+ .Eblock('out')
+ .Append('inner')
+ .Eblock('out')
+ )
+ self.assertEquals(
+ 'line\n'
+ 'sblock\n'
+ ' inner\n'
+ ' moreinner\n'
+ ' moresblock\n'
+ ' inner\n'
+ ' out\n'
+ ' inner\n'
+ 'out',
+ c.Render())
+
+ def testConcat(self):
+ b = Code()
+ (b.Sblock('2')
+ .Append('2')
+ .Eblock('2')
+ )
+ c = Code()
+ (c.Sblock('1')
+ .Concat(b)
+ .Append('1')
+ .Eblock('1')
+ )
+ self.assertEquals(
+ '1\n'
+ ' 2\n'
+ ' 2\n'
+ ' 2\n'
+ ' 1\n'
+ '1',
+ c.Render())
+ d = Code()
+ a = Code()
+ a.Concat(d)
+ self.assertEquals('', a.Render())
+ a.Concat(c)
+ self.assertEquals(
+ '1\n'
+ ' 2\n'
+ ' 2\n'
+ ' 2\n'
+ ' 1\n'
+ '1',
+ a.Render())
+
+ def testConcatErrors(self):
+ c = Code()
+ d = Code()
+ d.Append('%s')
+ self.assertRaises(TypeError, c.Concat, d)
+ d = Code()
+ d.Append('%(classname)s')
+ self.assertRaises(TypeError, c.Concat, d)
+ d = 'line of code'
+ self.assertRaises(TypeError, c.Concat, d)
+
+ def testSubstitute(self):
+ c = Code()
+ c.Append('%(var1)s %(var2)s %(var1)s')
+ c.Substitute({'var1': 'one', 'var2': 'two'})
+ self.assertEquals('one two one', c.Render())
+ c.Append('%(var1)s %(var2)s %(var3)s')
+ c.Append('%(var2)s %(var1)s %(var3)s')
+ c.Substitute({'var1': 'one', 'var2': 'two', 'var3': 'three'})
+ self.assertEquals(
+ 'one two one\n'
+ 'one two three\n'
+ 'two one three',
+ c.Render())
+
+ def testSubstituteErrors(self):
+ # No unnamed placeholders allowed when substitute is run
+ c = Code()
+ c.Append('%s %s')
+ self.assertRaises(TypeError, c.Substitute, ('var1', 'one'))
+ c = Code()
+ c.Append('%s %(var1)s')
+ self.assertRaises(TypeError, c.Substitute, {'var1': 'one'})
+ c = Code()
+ c.Append('%s %(var1)s')
+ self.assertRaises(TypeError, c.Substitute, {'var1': 'one'})
+ c = Code()
+ c.Append('%(var1)s')
+ self.assertRaises(KeyError, c.Substitute, {'clearlynotvar1': 'one'})
+
+ def testIsEmpty(self):
+ c = Code()
+ self.assertTrue(c.IsEmpty())
+ c.Append('asdf')
+ self.assertFalse(c.IsEmpty())
+
+ def testComment(self):
+ long_comment = ('This comment is eighty nine characters in longness, '
+ 'that is, to use another word, length')
+ c = Code()
+ c.Comment(long_comment)
+ self.assertEquals(
+ '// This comment is eighty nine characters '
+ 'in longness, that is, to use another\n'
+ '// word, length',
+ c.Render())
+ c = Code()
+ c.Sblock('sblock')
+ c.Comment(long_comment)
+ c.Eblock('eblock')
+ c.Comment(long_comment)
+ self.assertEquals(
+ 'sblock\n'
+ ' // This comment is eighty nine characters '
+ 'in longness, that is, to use\n'
+ ' // another word, length\n'
+ 'eblock\n'
+ '// This comment is eighty nine characters in '
+ 'longness, that is, to use another\n'
+ '// word, length',
+ c.Render())
+ long_word = 'x' * 100
+ c = Code()
+ c.Comment(long_word)
+ self.assertEquals(
+ '// ' + 'x' * 77 + '\n'
+ '// ' + 'x' * 23,
+ c.Render())
+
+ def testCommentWithSpecialCharacters(self):
+ c = Code()
+ c.Comment('20% of 80%s')
+ c.Substitute({})
+ self.assertEquals('// 20% of 80%s', c.Render())
+ d = Code()
+ d.Append('90')
+ d.Concat(c)
+ self.assertEquals('90\n'
+ '// 20% of 80%s',
+ d.Render())
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/json_schema_compiler/compiler.py b/tools/json_schema_compiler/compiler.py
new file mode 100755
index 0000000..7a2e4dd
--- /dev/null
+++ b/tools/json_schema_compiler/compiler.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generator for C++ structs from api json files.
+
+The purpose of this tool is to remove the need for hand-written code that
+converts to and from base::Value types when receiving javascript api calls.
+Originally written for generating code for extension apis. Reference schemas
+are in chrome/common/extensions/api.
+
+Usage example:
+ compiler.py --root /home/Work/src --namespace extensions windows.json
+ tabs.json
+ compiler.py --destdir gen --root /home/Work/src
+ --namespace extensions windows.json tabs.json
+"""
+
+import optparse
+import os
+import shlex
+import sys
+
+from cpp_bundle_generator import CppBundleGenerator
+from cpp_generator import CppGenerator
+from cpp_type_generator import CppTypeGenerator
+from dart_generator import DartGenerator
+import json_schema
+from cpp_namespace_environment import CppNamespaceEnvironment
+from model import Model
+from schema_loader import SchemaLoader
+
+# Names of supported code generators, as specified on the command-line.
+# First is default.
+GENERATORS = ['cpp', 'cpp-bundle-registration', 'cpp-bundle-schema', 'dart']
+
+def GenerateSchema(generator_name,
+ file_paths,
+ root,
+ destdir,
+ cpp_namespace_pattern,
+ dart_overrides_dir,
+ impl_dir,
+ include_rules):
+ # Merge the source files into a single list of schemas.
+ api_defs = []
+ for file_path in file_paths:
+ schema = os.path.relpath(file_path, root)
+ schema_loader = SchemaLoader(
+ root,
+ os.path.dirname(schema),
+ include_rules,
+ cpp_namespace_pattern)
+ api_def = schema_loader.LoadSchema(schema)
+
+ # If compiling the C++ model code, delete 'nocompile' nodes.
+ if generator_name == 'cpp':
+ api_def = json_schema.DeleteNodes(api_def, 'nocompile')
+ api_defs.extend(api_def)
+
+ api_model = Model()
+
+ # For single-schema compilation make sure that the first (i.e. only) schema
+ # is the default one.
+ default_namespace = None
+
+ # If we have files from multiple source paths, we'll use the common parent
+ # path as the source directory.
+ src_path = None
+
+ # Load the actual namespaces into the model.
+ for target_namespace, file_path in zip(api_defs, file_paths):
+ relpath = os.path.relpath(os.path.normpath(file_path), root)
+ namespace = api_model.AddNamespace(target_namespace,
+ relpath,
+ include_compiler_options=True,
+ environment=CppNamespaceEnvironment(
+ cpp_namespace_pattern))
+
+ if default_namespace is None:
+ default_namespace = namespace
+
+ if src_path is None:
+ src_path = namespace.source_file_dir
+ else:
+ src_path = os.path.commonprefix((src_path, namespace.source_file_dir))
+
+ path, filename = os.path.split(file_path)
+ filename_base, _ = os.path.splitext(filename)
+
+ # Construct the type generator with all the namespaces in this model.
+ type_generator = CppTypeGenerator(api_model,
+ schema_loader,
+ default_namespace)
+ if generator_name in ('cpp-bundle-registration', 'cpp-bundle-schema'):
+ cpp_bundle_generator = CppBundleGenerator(root,
+ api_model,
+ api_defs,
+ type_generator,
+ cpp_namespace_pattern,
+ src_path,
+ impl_dir)
+ if generator_name == 'cpp-bundle-registration':
+ generators = [
+ ('generated_api_registration.cc',
+ cpp_bundle_generator.api_cc_generator),
+ ('generated_api_registration.h', cpp_bundle_generator.api_h_generator),
+ ]
+ elif generator_name == 'cpp-bundle-schema':
+ generators = [
+ ('generated_schemas.cc', cpp_bundle_generator.schemas_cc_generator),
+ ('generated_schemas.h', cpp_bundle_generator.schemas_h_generator)
+ ]
+ elif generator_name == 'cpp':
+ cpp_generator = CppGenerator(type_generator)
+ generators = [
+ ('%s.h' % filename_base, cpp_generator.h_generator),
+ ('%s.cc' % filename_base, cpp_generator.cc_generator)
+ ]
+ elif generator_name == 'dart':
+ generators = [
+ ('%s.dart' % namespace.unix_name, DartGenerator(
+ dart_overrides_dir))
+ ]
+ else:
+ raise Exception('Unrecognised generator %s' % generator)
+
+ output_code = []
+ for filename, generator in generators:
+ code = generator.Generate(namespace).Render()
+ if destdir:
+ if generator_name == 'cpp-bundle-registration':
+ # Function registrations must be output to impl_dir, since they link in
+ # API implementations.
+ output_dir = os.path.join(destdir, impl_dir)
+ else:
+ output_dir = os.path.join(destdir, src_path)
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+ with open(os.path.join(output_dir, filename), 'w') as f:
+ f.write(code)
+ output_code += [filename, '', code, '']
+
+ return '\n'.join(output_code)
+
+
+if __name__ == '__main__':
+ parser = optparse.OptionParser(
+ description='Generates a C++ model of an API from JSON schema',
+ usage='usage: %prog [option]... schema')
+ parser.add_option('-r', '--root', default='.',
+ help='logical include root directory. Path to schema files from specified'
+ ' dir will be the include path.')
+ parser.add_option('-d', '--destdir',
+ help='root directory to output generated files.')
+ parser.add_option('-n', '--namespace', default='generated_api_schemas',
+ help='C++ namespace for generated files. e.g extensions::api.')
+ parser.add_option('-g', '--generator', default=GENERATORS[0],
+ choices=GENERATORS,
+ help='The generator to use to build the output code. Supported values are'
+ ' %s' % GENERATORS)
+ parser.add_option('-D', '--dart-overrides-dir', dest='dart_overrides_dir',
+ help='Adds custom dart from files in the given directory (Dart only).')
+ parser.add_option('-i', '--impl-dir', dest='impl_dir',
+ help='The root path of all API implementations')
+ parser.add_option('-I', '--include-rules',
+ help='A list of paths to include when searching for referenced objects,'
+ ' with the namespace separated by a \':\'. Example: '
+ '/foo/bar:Foo::Bar::%(namespace)s')
+
+ (opts, file_paths) = parser.parse_args()
+
+ if not file_paths:
+ sys.exit(0) # This is OK as a no-op
+
+ # Unless in bundle mode, only one file should be specified.
+ if (opts.generator not in ('cpp-bundle-registration', 'cpp-bundle-schema') and
+ len(file_paths) > 1):
+ # TODO(sashab): Could also just use file_paths[0] here and not complain.
+ raise Exception(
+ "Unless in bundle mode, only one file can be specified at a time.")
+
+ def split_path_and_namespace(path_and_namespace):
+ if ':' not in path_and_namespace:
+ raise ValueError('Invalid include rule "%s". Rules must be of '
+ 'the form path:namespace' % path_and_namespace)
+ return path_and_namespace.split(':', 1)
+
+ include_rules = []
+ if opts.include_rules:
+ include_rules = map(split_path_and_namespace,
+ shlex.split(opts.include_rules))
+
+ result = GenerateSchema(opts.generator, file_paths, opts.root, opts.destdir,
+ opts.namespace, opts.dart_overrides_dir,
+ opts.impl_dir, include_rules)
+ if not opts.destdir:
+ print result
diff --git a/tools/json_schema_compiler/cpp_bundle_generator.py b/tools/json_schema_compiler/cpp_bundle_generator.py
new file mode 100644
index 0000000..0a4f9a6
--- /dev/null
+++ b/tools/json_schema_compiler/cpp_bundle_generator.py
@@ -0,0 +1,322 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import code
+import cpp_util
+from model import Platforms
+from schema_util import CapitalizeFirstLetter
+from schema_util import JsFunctionNameToClassName
+
+import json
+import os
+import re
+
+
+def _RemoveDescriptions(node):
+ """Returns a copy of |schema| with "description" fields removed.
+ """
+ if isinstance(node, dict):
+ result = {}
+ for key, value in node.items():
+ # Some schemas actually have properties called "description", so only
+ # remove descriptions that have string values.
+ if key == 'description' and isinstance(value, basestring):
+ continue
+ result[key] = _RemoveDescriptions(value)
+ return result
+ if isinstance(node, list):
+ return [_RemoveDescriptions(v) for v in node]
+ return node
+
+
+class CppBundleGenerator(object):
+ """This class contains methods to generate code based on multiple schemas.
+ """
+
+ def __init__(self,
+ root,
+ model,
+ api_defs,
+ cpp_type_generator,
+ cpp_namespace_pattern,
+ source_file_dir,
+ impl_dir):
+ self._root = root
+ self._model = model
+ self._api_defs = api_defs
+ self._cpp_type_generator = cpp_type_generator
+ self._source_file_dir = source_file_dir
+ self._impl_dir = impl_dir
+
+ # Hack: assume that the C++ namespace for the bundle is the namespace of the
+ # files without the last component of the namespace. A cleaner way to do
+ # this would be to make it a separate variable in the gyp file.
+ self._cpp_namespace = cpp_namespace_pattern.rsplit('::', 1)[0]
+
+ self.api_cc_generator = _APICCGenerator(self)
+ self.api_h_generator = _APIHGenerator(self)
+ self.schemas_cc_generator = _SchemasCCGenerator(self)
+ self.schemas_h_generator = _SchemasHGenerator(self)
+
+ def _GenerateHeader(self, file_base, body_code):
+ """Generates a code.Code object for a header file
+
+ Parameters:
+ - |file_base| - the base of the filename, e.g. 'foo' (for 'foo.h')
+ - |body_code| - the code to put in between the multiple inclusion guards"""
+ c = code.Code()
+ c.Append(cpp_util.CHROMIUM_LICENSE)
+ c.Append()
+ c.Append(cpp_util.GENERATED_BUNDLE_FILE_MESSAGE % self._source_file_dir)
+ ifndef_name = cpp_util.GenerateIfndefName(
+ '%s/%s.h' % (self._source_file_dir, file_base))
+ c.Append()
+ c.Append('#ifndef %s' % ifndef_name)
+ c.Append('#define %s' % ifndef_name)
+ c.Append()
+ c.Concat(body_code)
+ c.Append()
+ c.Append('#endif // %s' % ifndef_name)
+ c.Append()
+ return c
+
+ def _GetPlatformIfdefs(self, model_object):
+ """Generates the "defined" conditional for an #if check if |model_object|
+ has platform restrictions. Returns None if there are no restrictions.
+ """
+ if model_object.platforms is None:
+ return None
+ ifdefs = []
+ for platform in model_object.platforms:
+ if platform == Platforms.CHROMEOS:
+ ifdefs.append('defined(OS_CHROMEOS)')
+ elif platform == Platforms.LINUX:
+ ifdefs.append('defined(OS_LINUX)')
+ elif platform == Platforms.MAC:
+ ifdefs.append('defined(OS_MACOSX)')
+ elif platform == Platforms.WIN:
+ ifdefs.append('defined(OS_WIN)')
+ else:
+ raise ValueError("Unsupported platform ifdef: %s" % platform.name)
+ return ' || '.join(ifdefs)
+
+ def _GenerateRegisterFunctions(self, namespace_name, function):
+ c = code.Code()
+ function_ifdefs = self._GetPlatformIfdefs(function)
+ if function_ifdefs is not None:
+ c.Append("#if %s" % function_ifdefs, indent_level=0)
+
+ function_name = JsFunctionNameToClassName(namespace_name, function.name)
+ c.Append("registry->RegisterFunction<%sFunction>();" % (
+ function_name))
+
+ if function_ifdefs is not None:
+ c.Append("#endif // %s" % function_ifdefs, indent_level=0)
+ return c
+
+ def _GenerateFunctionRegistryRegisterAll(self):
+ c = code.Code()
+ c.Append('// static')
+ c.Sblock('void GeneratedFunctionRegistry::RegisterAll('
+ 'ExtensionFunctionRegistry* registry) {')
+ for namespace in self._model.namespaces.values():
+ namespace_ifdefs = self._GetPlatformIfdefs(namespace)
+ if namespace_ifdefs is not None:
+ c.Append("#if %s" % namespace_ifdefs, indent_level=0)
+
+ namespace_name = CapitalizeFirstLetter(namespace.name.replace(
+ "experimental.", ""))
+ for function in namespace.functions.values():
+ if function.nocompile:
+ continue
+ c.Concat(self._GenerateRegisterFunctions(namespace.name, function))
+
+ for type_ in namespace.types.values():
+ for function in type_.functions.values():
+ if function.nocompile:
+ continue
+ namespace_types_name = JsFunctionNameToClassName(
+ namespace.name, type_.name)
+ c.Concat(self._GenerateRegisterFunctions(namespace_types_name,
+ function))
+
+ if namespace_ifdefs is not None:
+ c.Append("#endif // %s" % namespace_ifdefs, indent_level=0)
+ c.Eblock("}")
+ return c
+
+
+class _APIHGenerator(object):
+ """Generates the header for API registration / declaration"""
+ def __init__(self, cpp_bundle):
+ self._bundle = cpp_bundle
+
+ def Generate(self, _): # namespace not relevant, this is a bundle
+ c = code.Code()
+
+ c.Append('#include <string>')
+ c.Append()
+ c.Append('#include "base/basictypes.h"')
+ c.Append()
+ c.Append("class ExtensionFunctionRegistry;")
+ c.Append()
+ c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace))
+ c.Append()
+ c.Append('class GeneratedFunctionRegistry {')
+ c.Sblock(' public:')
+ c.Append('static void RegisterAll('
+ 'ExtensionFunctionRegistry* registry);')
+ c.Eblock('};')
+ c.Append()
+ c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace))
+ return self._bundle._GenerateHeader('generated_api', c)
+
+
+class _APICCGenerator(object):
+ """Generates a code.Code object for the generated API .cc file"""
+
+ def __init__(self, cpp_bundle):
+ self._bundle = cpp_bundle
+
+ def Generate(self, _): # namespace not relevant, this is a bundle
+ c = code.Code()
+ c.Append(cpp_util.CHROMIUM_LICENSE)
+ c.Append()
+ c.Append('#include "%s"' % (
+ os.path.join(self._bundle._impl_dir,
+ 'generated_api_registration.h')))
+ c.Append()
+ for namespace in self._bundle._model.namespaces.values():
+ namespace_name = namespace.unix_name.replace("experimental_", "")
+ implementation_header = namespace.compiler_options.get(
+ "implemented_in",
+ "%s/%s/%s_api.h" % (self._bundle._impl_dir,
+ namespace_name,
+ namespace_name))
+ if not os.path.exists(
+ os.path.join(self._bundle._root,
+ os.path.normpath(implementation_header))):
+ if "implemented_in" in namespace.compiler_options:
+ raise ValueError('Header file for namespace "%s" specified in '
+ 'compiler_options not found: %s' %
+ (namespace.unix_name, implementation_header))
+ continue
+ ifdefs = self._bundle._GetPlatformIfdefs(namespace)
+ if ifdefs is not None:
+ c.Append("#if %s" % ifdefs, indent_level=0)
+
+ c.Append('#include "%s"' % implementation_header)
+
+ if ifdefs is not None:
+ c.Append("#endif // %s" % ifdefs, indent_level=0)
+ c.Append()
+ c.Append('#include '
+ '"extensions/browser/extension_function_registry.h"')
+ c.Append()
+ c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace))
+ c.Append()
+ c.Concat(self._bundle._GenerateFunctionRegistryRegisterAll())
+ c.Append()
+ c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace))
+ c.Append()
+ return c
+
+
+class _SchemasHGenerator(object):
+ """Generates a code.Code object for the generated schemas .h file"""
+ def __init__(self, cpp_bundle):
+ self._bundle = cpp_bundle
+
+ def Generate(self, _): # namespace not relevant, this is a bundle
+ c = code.Code()
+ c.Append('#include <map>')
+ c.Append('#include <string>')
+ c.Append()
+ c.Append('#include "base/strings/string_piece.h"')
+ c.Append()
+ c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace))
+ c.Append()
+ c.Append('class GeneratedSchemas {')
+ c.Sblock(' public:')
+ c.Append('// Determines if schema named |name| is generated.')
+ c.Append('static bool IsGenerated(std::string name);')
+ c.Append()
+ c.Append('// Gets the API schema named |name|.')
+ c.Append('static base::StringPiece Get(const std::string& name);')
+ c.Eblock('};')
+ c.Append()
+ c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace))
+ return self._bundle._GenerateHeader('generated_schemas', c)
+
+
+def _FormatNameAsConstant(name):
+ """Formats a name to be a C++ constant of the form kConstantName"""
+ name = '%s%s' % (name[0].upper(), name[1:])
+ return 'k%s' % re.sub('_[a-z]',
+ lambda m: m.group(0)[1].upper(),
+ name.replace('.', '_'))
+
+
+class _SchemasCCGenerator(object):
+ """Generates a code.Code object for the generated schemas .cc file"""
+
+ def __init__(self, cpp_bundle):
+ self._bundle = cpp_bundle
+
+ def Generate(self, _): # namespace not relevant, this is a bundle
+ c = code.Code()
+ c.Append(cpp_util.CHROMIUM_LICENSE)
+ c.Append()
+ c.Append('#include "%s"' % (os.path.join(self._bundle._source_file_dir,
+ 'generated_schemas.h')))
+ c.Append()
+ c.Append('#include "base/lazy_instance.h"')
+ c.Append()
+ c.Append('namespace {')
+ for api in self._bundle._api_defs:
+ namespace = self._bundle._model.namespaces[api.get('namespace')]
+ # JSON parsing code expects lists of schemas, so dump a singleton list.
+ json_content = json.dumps([_RemoveDescriptions(api)],
+ separators=(',', ':'))
+ # Escape all double-quotes and backslashes. For this to output a valid
+ # JSON C string, we need to escape \ and ". Note that some schemas are
+ # too large to compile on windows. Split the JSON up into several
+ # strings, since apparently that helps.
+ max_length = 8192
+ segments = [json_content[i:i + max_length].replace('\\', '\\\\')
+ .replace('"', '\\"')
+ for i in xrange(0, len(json_content), max_length)]
+ c.Append('const char %s[] = "%s";' %
+ (_FormatNameAsConstant(namespace.name), '" "'.join(segments)))
+ c.Append('}')
+ c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace))
+ c.Append()
+ c.Sblock('struct Static {')
+ c.Sblock('Static() {')
+ for api in self._bundle._api_defs:
+ namespace = self._bundle._model.namespaces[api.get('namespace')]
+ c.Append('schemas["%s"] = %s;' % (namespace.name,
+ _FormatNameAsConstant(namespace.name)))
+ c.Eblock('}')
+ c.Append()
+ c.Append('std::map<std::string, const char*> schemas;')
+ c.Eblock('};')
+ c.Append()
+ c.Append('base::LazyInstance<Static> g_lazy_instance;')
+ c.Append()
+ c.Append('// static')
+ c.Sblock('base::StringPiece GeneratedSchemas::Get('
+ 'const std::string& name) {')
+ c.Append('return IsGenerated(name) ? '
+ 'g_lazy_instance.Get().schemas[name] : "";')
+ c.Eblock('}')
+ c.Append()
+ c.Append('// static')
+ c.Sblock('bool GeneratedSchemas::IsGenerated(std::string name) {')
+ c.Append('return g_lazy_instance.Get().schemas.count(name) > 0;')
+ c.Eblock('}')
+ c.Append()
+ c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace))
+ c.Append()
+ return c
diff --git a/tools/json_schema_compiler/cpp_generator.py b/tools/json_schema_compiler/cpp_generator.py
new file mode 100644
index 0000000..5521ea9
--- /dev/null
+++ b/tools/json_schema_compiler/cpp_generator.py
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from cc_generator import CCGenerator
+from h_generator import HGenerator
+
+class CppGenerator(object):
+ def __init__(self, type_generator):
+ self.h_generator = HGenerator(type_generator)
+ self.cc_generator = CCGenerator(type_generator)
diff --git a/tools/json_schema_compiler/cpp_namespace_environment.py b/tools/json_schema_compiler/cpp_namespace_environment.py
new file mode 100644
index 0000000..20e77bb
--- /dev/null
+++ b/tools/json_schema_compiler/cpp_namespace_environment.py
@@ -0,0 +1,7 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+class CppNamespaceEnvironment(object):
+ def __init__(self, namespace_pattern):
+ self.namespace_pattern = namespace_pattern
diff --git a/tools/json_schema_compiler/cpp_type_generator.py b/tools/json_schema_compiler/cpp_type_generator.py
new file mode 100644
index 0000000..6bec67e
--- /dev/null
+++ b/tools/json_schema_compiler/cpp_type_generator.py
@@ -0,0 +1,273 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from code import Code
+from model import PropertyType
+import cpp_util
+from json_parse import OrderedDict
+import schema_util
+
+class _TypeDependency(object):
+ """Contains information about a dependency a namespace has on a type: the
+ type's model, and whether that dependency is "hard" meaning that it cannot be
+ forward declared.
+ """
+ def __init__(self, type_, hard=False):
+ self.type_ = type_
+ self.hard = hard
+
+ def GetSortKey(self):
+ return '%s.%s' % (self.type_.namespace.name, self.type_.name)
+
+
+class CppTypeGenerator(object):
+ """Manages the types of properties and provides utilities for getting the
+ C++ type out of a model.Property
+ """
+ def __init__(self, model, schema_loader, default_namespace=None):
+ """Creates a cpp_type_generator. The given root_namespace should be of the
+ format extensions::api::sub. The generator will generate code suitable for
+ use in the given model's namespace.
+ """
+ self._default_namespace = default_namespace
+ if self._default_namespace is None:
+ self._default_namespace = model.namespaces.values()[0]
+ self._schema_loader = schema_loader
+
+ def GetEnumNoneValue(self, type_):
+ """Gets the enum value in the given model.Property indicating no value has
+ been set.
+ """
+ return '%s_NONE' % self.FollowRef(type_).unix_name.upper()
+
+ def GetEnumLastValue(self, type_):
+ """Gets the enum value in the given model.Property indicating the last value
+ for the type.
+ """
+ return '%s_LAST' % self.FollowRef(type_).unix_name.upper()
+
+ def GetEnumValue(self, type_, enum_value):
+ """Gets the enum value of the given model.Property of the given type.
+
+ e.g VAR_STRING
+ """
+ value = cpp_util.Classname(enum_value.name.upper())
+ prefix = (type_.cpp_enum_prefix_override or
+ self.FollowRef(type_).unix_name)
+ value = '%s_%s' % (prefix.upper(), value)
+ # To avoid collisions with built-in OS_* preprocessor definitions, we add a
+ # trailing slash to enum names that start with OS_.
+ if value.startswith("OS_"):
+ value += "_"
+ return value
+
+ def GetCppType(self, type_, is_ptr=False, is_in_container=False):
+ """Translates a model.Property or model.Type into its C++ type.
+
+ If REF types from different namespaces are referenced, will resolve
+ using self._schema_loader.
+
+ Use |is_ptr| if the type is optional. This will wrap the type in a
+ scoped_ptr if possible (it is not possible to wrap an enum).
+
+ Use |is_in_container| if the type is appearing in a collection, e.g. a
+ std::vector or std::map. This will wrap it in the correct type with spacing.
+ """
+ cpp_type = None
+ if type_.property_type == PropertyType.REF:
+ ref_type = self._FindType(type_.ref_type)
+ if ref_type is None:
+ raise KeyError('Cannot find referenced type: %s' % type_.ref_type)
+ cpp_type = self.GetCppType(ref_type)
+ elif type_.property_type == PropertyType.BOOLEAN:
+ cpp_type = 'bool'
+ elif type_.property_type == PropertyType.INTEGER:
+ cpp_type = 'int'
+ elif type_.property_type == PropertyType.INT64:
+ cpp_type = 'int64'
+ elif type_.property_type == PropertyType.DOUBLE:
+ cpp_type = 'double'
+ elif type_.property_type == PropertyType.STRING:
+ cpp_type = 'std::string'
+ elif type_.property_type in (PropertyType.ENUM,
+ PropertyType.OBJECT,
+ PropertyType.CHOICES):
+ if self._default_namespace is type_.namespace:
+ cpp_type = cpp_util.Classname(type_.name)
+ else:
+ cpp_namespace = cpp_util.GetCppNamespace(
+ type_.namespace.environment.namespace_pattern,
+ type_.namespace.unix_name)
+ cpp_type = '%s::%s' % (cpp_namespace,
+ cpp_util.Classname(type_.name))
+ elif type_.property_type == PropertyType.ANY:
+ cpp_type = 'base::Value'
+ elif type_.property_type == PropertyType.FUNCTION:
+ # Functions come into the json schema compiler as empty objects. We can
+ # record these as empty DictionaryValues so that we know if the function
+ # was passed in or not.
+ cpp_type = 'base::DictionaryValue'
+ elif type_.property_type == PropertyType.ARRAY:
+ item_cpp_type = self.GetCppType(type_.item_type, is_in_container=True)
+ cpp_type = 'std::vector<%s>' % cpp_util.PadForGenerics(item_cpp_type)
+ elif type_.property_type == PropertyType.BINARY:
+ cpp_type = 'std::string'
+ else:
+ raise NotImplementedError('Cannot get type of %s' % type_.property_type)
+
+ # HACK: optional ENUM is represented elsewhere with a _NONE value, so it
+ # never needs to be wrapped in pointer shenanigans.
+ # TODO(kalman): change this - but it's an exceedingly far-reaching change.
+ if not self.FollowRef(type_).property_type == PropertyType.ENUM:
+ if is_in_container and (is_ptr or not self.IsCopyable(type_)):
+ cpp_type = 'linked_ptr<%s>' % cpp_util.PadForGenerics(cpp_type)
+ elif is_ptr:
+ cpp_type = 'scoped_ptr<%s>' % cpp_util.PadForGenerics(cpp_type)
+
+ return cpp_type
+
+ def IsCopyable(self, type_):
+ return not (self.FollowRef(type_).property_type in (PropertyType.ANY,
+ PropertyType.ARRAY,
+ PropertyType.OBJECT,
+ PropertyType.CHOICES))
+
+ def GenerateForwardDeclarations(self):
+ """Returns the forward declarations for self._default_namespace.
+ """
+ c = Code()
+ for namespace, deps in self._NamespaceTypeDependencies().iteritems():
+ filtered_deps = [
+ dep for dep in deps
+ # Add more ways to forward declare things as necessary.
+ if (not dep.hard and
+ dep.type_.property_type in (PropertyType.CHOICES,
+ PropertyType.OBJECT))]
+ if not filtered_deps:
+ continue
+
+ cpp_namespace = cpp_util.GetCppNamespace(
+ namespace.environment.namespace_pattern,
+ namespace.unix_name)
+ c.Concat(cpp_util.OpenNamespace(cpp_namespace))
+ for dep in filtered_deps:
+ c.Append('struct %s;' % dep.type_.name)
+ c.Concat(cpp_util.CloseNamespace(cpp_namespace))
+ return c
+
+ def GenerateIncludes(self, include_soft=False):
+ """Returns the #include lines for self._default_namespace.
+ """
+ c = Code()
+ for namespace, dependencies in self._NamespaceTypeDependencies().items():
+ for dependency in dependencies:
+ if dependency.hard or include_soft:
+ c.Append('#include "%s/%s.h"' % (namespace.source_file_dir,
+ namespace.unix_name))
+ return c
+
+ def _FindType(self, full_name):
+ """Finds the model.Type with name |qualified_name|. If it's not from
+ |self._default_namespace| then it needs to be qualified.
+ """
+ namespace = self._schema_loader.ResolveType(full_name,
+ self._default_namespace)
+ if namespace is None:
+ raise KeyError('Cannot resolve type %s. Maybe it needs a prefix '
+ 'if it comes from another namespace?' % full_name)
+ return namespace.types[schema_util.StripNamespace(full_name)]
+
+ def FollowRef(self, type_):
+ """Follows $ref link of types to resolve the concrete type a ref refers to.
+
+ If the property passed in is not of type PropertyType.REF, it will be
+ returned unchanged.
+ """
+ if type_.property_type != PropertyType.REF:
+ return type_
+ return self.FollowRef(self._FindType(type_.ref_type))
+
+ def _NamespaceTypeDependencies(self):
+ """Returns a dict ordered by namespace name containing a mapping of
+ model.Namespace to every _TypeDependency for |self._default_namespace|,
+ sorted by the type's name.
+ """
+ dependencies = set()
+ for function in self._default_namespace.functions.values():
+ for param in function.params:
+ dependencies |= self._TypeDependencies(param.type_,
+ hard=not param.optional)
+ if function.callback:
+ for param in function.callback.params:
+ dependencies |= self._TypeDependencies(param.type_,
+ hard=not param.optional)
+ for type_ in self._default_namespace.types.values():
+ for prop in type_.properties.values():
+ dependencies |= self._TypeDependencies(prop.type_,
+ hard=not prop.optional)
+ for event in self._default_namespace.events.values():
+ for param in event.params:
+ dependencies |= self._TypeDependencies(param.type_,
+ hard=not param.optional)
+
+ # Make sure that the dependencies are returned in alphabetical order.
+ dependency_namespaces = OrderedDict()
+ for dependency in sorted(dependencies, key=_TypeDependency.GetSortKey):
+ namespace = dependency.type_.namespace
+ if namespace is self._default_namespace:
+ continue
+ if namespace not in dependency_namespaces:
+ dependency_namespaces[namespace] = []
+ dependency_namespaces[namespace].append(dependency)
+
+ return dependency_namespaces
+
+ def _TypeDependencies(self, type_, hard=False):
+ """Gets all the type dependencies of a property.
+ """
+ deps = set()
+ if type_.property_type == PropertyType.REF:
+ deps.add(_TypeDependency(self._FindType(type_.ref_type), hard=hard))
+ elif type_.property_type == PropertyType.ARRAY:
+ # Non-copyable types are not hard because they are wrapped in linked_ptrs
+ # when generated. Otherwise they're typedefs, so they're hard (though we
+ # could generate those typedefs in every dependent namespace, but that
+ # seems weird).
+ deps = self._TypeDependencies(type_.item_type,
+ hard=self.IsCopyable(type_.item_type))
+ elif type_.property_type == PropertyType.CHOICES:
+ for type_ in type_.choices:
+ deps |= self._TypeDependencies(type_, hard=self.IsCopyable(type_))
+ elif type_.property_type == PropertyType.OBJECT:
+ for p in type_.properties.values():
+ deps |= self._TypeDependencies(p.type_, hard=not p.optional)
+ return deps
+
+ def GeneratePropertyValues(self, property, line, nodoc=False):
+ """Generates the Code to display all value-containing properties.
+ """
+ c = Code()
+ if not nodoc:
+ c.Comment(property.description)
+
+ if property.value is not None:
+ c.Append(line % {
+ "type": self.GetCppType(property.type_),
+ "name": property.name,
+ "value": property.value
+ })
+ else:
+ has_child_code = False
+ c.Sblock('namespace %s {' % property.name)
+ for child_property in property.type_.properties.values():
+ child_code = self.GeneratePropertyValues(child_property,
+ line,
+ nodoc=nodoc)
+ if child_code:
+ has_child_code = True
+ c.Concat(child_code)
+ c.Eblock('} // namespace %s' % property.name)
+ if not has_child_code:
+ c = None
+ return c
diff --git a/tools/json_schema_compiler/cpp_type_generator_test.py b/tools/json_schema_compiler/cpp_type_generator_test.py
new file mode 100755
index 0000000..51fcfe9
--- /dev/null
+++ b/tools/json_schema_compiler/cpp_type_generator_test.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from cpp_namespace_environment import CppNamespaceEnvironment
+from cpp_type_generator import CppTypeGenerator
+from json_schema import CachedLoad
+import model
+import unittest
+
+from collections import defaultdict
+
+class _FakeSchemaLoader(object):
+ def __init__(self, model):
+ self._model = model
+
+ def ResolveType(self, type_name, default):
+ parts = type_name.rsplit('.', 1)
+ if len(parts) == 1:
+ return default if type_name in default.types else None
+ return self._model.namespaces[parts[0]]
+
+class CppTypeGeneratorTest(unittest.TestCase):
+ def setUp(self):
+ self.models = defaultdict(model.Model)
+
+ self.forbidden_json = CachedLoad('test/forbidden.json')
+ self.forbidden = self.models['forbidden'].AddNamespace(
+ self.forbidden_json[0], 'path/to/forbidden.json')
+ self.permissions_json = CachedLoad('test/permissions.json')
+ self.permissions = self.models['permissions'].AddNamespace(
+ self.permissions_json[0], 'path/to/permissions.json')
+ self.windows_json = CachedLoad('test/windows.json')
+ self.windows = self.models['windows'].AddNamespace(self.windows_json[0],
+ 'path/to/window.json')
+ self.tabs_json = CachedLoad('test/tabs.json')
+ self.tabs = self.models['tabs'].AddNamespace(self.tabs_json[0],
+ 'path/to/tabs.json')
+ self.browser_action_json = CachedLoad('test/browser_action.json')
+ self.browser_action = self.models['browser_action'].AddNamespace(
+ self.browser_action_json[0], 'path/to/browser_action.json')
+ self.font_settings_json = CachedLoad('test/font_settings.json')
+ self.font_settings = self.models['font_settings'].AddNamespace(
+ self.font_settings_json[0], 'path/to/font_settings.json')
+ self.dependency_tester_json = CachedLoad('test/dependency_tester.json')
+ self.dependency_tester = self.models['dependency_tester'].AddNamespace(
+ self.dependency_tester_json[0], 'path/to/dependency_tester.json')
+ self.content_settings_json = CachedLoad('test/content_settings.json')
+ self.content_settings = self.models['content_settings'].AddNamespace(
+ self.content_settings_json[0], 'path/to/content_settings.json')
+
+ def testGenerateIncludesAndForwardDeclarations(self):
+ m = model.Model()
+ m.AddNamespace(self.windows_json[0],
+ 'path/to/windows.json',
+ environment=CppNamespaceEnvironment('%(namespace)s'))
+ m.AddNamespace(self.tabs_json[0],
+ 'path/to/tabs.json',
+ environment=CppNamespaceEnvironment('%(namespace)s'))
+ manager = CppTypeGenerator(m, _FakeSchemaLoader(m))
+
+ self.assertEquals('', manager.GenerateIncludes().Render())
+ self.assertEquals('#include "path/to/tabs.h"',
+ manager.GenerateIncludes(include_soft=True).Render())
+ self.assertEquals(
+ 'namespace tabs {\n'
+ 'struct Tab;\n'
+ '} // namespace tabs',
+ manager.GenerateForwardDeclarations().Render())
+
+ m = model.Model()
+ m.AddNamespace(self.windows_json[0],
+ 'path/to/windows.json',
+ environment=CppNamespaceEnvironment(
+ 'foo::bar::%(namespace)s'))
+ m.AddNamespace(self.tabs_json[0],
+ 'path/to/tabs.json',
+ environment=CppNamespaceEnvironment(
+ 'foo::bar::%(namespace)s'))
+ manager = CppTypeGenerator(m, _FakeSchemaLoader(m))
+ self.assertEquals(
+ 'namespace foo {\n'
+ 'namespace bar {\n'
+ 'namespace tabs {\n'
+ 'struct Tab;\n'
+ '} // namespace tabs\n'
+ '} // namespace bar\n'
+ '} // namespace foo',
+ manager.GenerateForwardDeclarations().Render())
+ manager = CppTypeGenerator(self.models.get('permissions'),
+ _FakeSchemaLoader(m))
+ self.assertEquals('', manager.GenerateIncludes().Render())
+ self.assertEquals('', manager.GenerateIncludes().Render())
+ self.assertEquals('', manager.GenerateForwardDeclarations().Render())
+ manager = CppTypeGenerator(self.models.get('content_settings'),
+ _FakeSchemaLoader(m))
+ self.assertEquals('', manager.GenerateIncludes().Render())
+
+ def testGenerateIncludesAndForwardDeclarationsDependencies(self):
+ m = model.Model()
+ # Insert 'font_settings' before 'browser_action' in order to test that
+ # CppTypeGenerator sorts them properly.
+ m.AddNamespace(self.font_settings_json[0], 'path/to/font_settings.json')
+ m.AddNamespace(self.browser_action_json[0], 'path/to/browser_action.json')
+ dependency_tester = m.AddNamespace(self.dependency_tester_json[0],
+ 'path/to/dependency_tester.json')
+ manager = CppTypeGenerator(m,
+ _FakeSchemaLoader(m),
+ default_namespace=dependency_tester)
+ self.assertEquals('#include "path/to/browser_action.h"\n'
+ '#include "path/to/font_settings.h"',
+ manager.GenerateIncludes().Render())
+ self.assertEquals('', manager.GenerateForwardDeclarations().Render())
+
+ def testGetCppTypeSimple(self):
+ manager = CppTypeGenerator(self.models.get('tabs'), _FakeSchemaLoader(None))
+ self.assertEquals(
+ 'int',
+ manager.GetCppType(self.tabs.types['Tab'].properties['id'].type_))
+ self.assertEquals(
+ 'std::string',
+ manager.GetCppType(self.tabs.types['Tab'].properties['status'].type_))
+ self.assertEquals(
+ 'bool',
+ manager.GetCppType(self.tabs.types['Tab'].properties['selected'].type_))
+
+ def testStringAsType(self):
+ manager = CppTypeGenerator(self.models.get('font_settings'),
+ _FakeSchemaLoader(None))
+ self.assertEquals(
+ 'std::string',
+ manager.GetCppType(self.font_settings.types['FakeStringType']))
+
+ def testArrayAsType(self):
+ manager = CppTypeGenerator(self.models.get('browser_action'),
+ _FakeSchemaLoader(None))
+ self.assertEquals(
+ 'std::vector<int>',
+ manager.GetCppType(self.browser_action.types['ColorArray']))
+
+ def testGetCppTypeArray(self):
+ manager = CppTypeGenerator(self.models.get('windows'),
+ _FakeSchemaLoader(None))
+ self.assertEquals(
+ 'std::vector<linked_ptr<Window> >',
+ manager.GetCppType(
+ self.windows.functions['getAll'].callback.params[0].type_))
+ manager = CppTypeGenerator(self.models.get('permissions'),
+ _FakeSchemaLoader(None))
+ self.assertEquals(
+ 'std::vector<std::string>',
+ manager.GetCppType(
+ self.permissions.types['Permissions'].properties['origins'].type_))
+
+ def testGetCppTypeLocalRef(self):
+ manager = CppTypeGenerator(self.models.get('tabs'), _FakeSchemaLoader(None))
+ self.assertEquals(
+ 'Tab',
+ manager.GetCppType(self.tabs.functions['get'].callback.params[0].type_))
+
+ def testGetCppTypeIncludedRef(self):
+ m = model.Model()
+ m.AddNamespace(self.windows_json[0],
+ 'path/to/windows.json',
+ environment=CppNamespaceEnvironment('%(namespace)s'))
+ m.AddNamespace(self.tabs_json[0],
+ 'path/to/tabs.json',
+ environment=CppNamespaceEnvironment('%(namespace)s'))
+ manager = CppTypeGenerator(m, _FakeSchemaLoader(m))
+ self.assertEquals(
+ 'std::vector<linked_ptr<tabs::Tab> >',
+ manager.GetCppType(
+ self.windows.types['Window'].properties['tabs'].type_))
+
+ def testGetCppTypeWithPadForGeneric(self):
+ manager = CppTypeGenerator(self.models.get('permissions'),
+ _FakeSchemaLoader(None))
+ self.assertEquals('std::vector<std::string>',
+ manager.GetCppType(
+ self.permissions.types['Permissions'].properties['origins'].type_,
+ is_in_container=False))
+ self.assertEquals('linked_ptr<std::vector<std::string> >',
+ manager.GetCppType(
+ self.permissions.types['Permissions'].properties['origins'].type_,
+ is_in_container=True))
+ self.assertEquals('bool',
+ manager.GetCppType(
+ self.permissions.functions['contains'].callback.params[0].type_,
+ is_in_container=True))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/json_schema_compiler/cpp_util.py b/tools/json_schema_compiler/cpp_util.py
new file mode 100644
index 0000000..187d99f
--- /dev/null
+++ b/tools/json_schema_compiler/cpp_util.py
@@ -0,0 +1,158 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilies and constants specific to Chromium C++ code.
+"""
+
+from code import Code
+from datetime import datetime
+from model import PropertyType
+import os
+import re
+
+CHROMIUM_LICENSE = (
+"""// Copyright (c) %d The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.""" % datetime.now().year
+)
+GENERATED_FILE_MESSAGE = """// GENERATED FROM THE API DEFINITION IN
+// %s
+// DO NOT EDIT.
+"""
+GENERATED_BUNDLE_FILE_MESSAGE = """// GENERATED FROM THE API DEFINITIONS IN
+// %s
+// DO NOT EDIT.
+"""
+GENERATED_FEATURE_MESSAGE = """// GENERATED FROM THE FEATURE DEFINITIONS IN
+// %s
+// DO NOT EDIT.
+"""
+
+def Classname(s):
+ """Translates a namespace name or function name into something more
+ suited to C++.
+
+ eg experimental.downloads -> Experimental_Downloads
+ updateAll -> UpdateAll.
+ """
+ return '_'.join([x[0].upper() + x[1:] for x in re.split('\W', s)])
+
+
+def GetAsFundamentalValue(type_, src, dst):
+ """Returns the C++ code for retrieving a fundamental type from a
+ Value into a variable.
+
+ src: Value*
+ dst: Property*
+ """
+ return {
+ PropertyType.BOOLEAN: '%s->GetAsBoolean(%s)',
+ PropertyType.DOUBLE: '%s->GetAsDouble(%s)',
+ PropertyType.INTEGER: '%s->GetAsInteger(%s)',
+ PropertyType.STRING: '%s->GetAsString(%s)',
+ }[type_.property_type] % (src, dst)
+
+
+def GetValueType(type_):
+ """Returns the Value::Type corresponding to the model.Type.
+ """
+ return {
+ PropertyType.ARRAY: 'base::Value::TYPE_LIST',
+ PropertyType.BINARY: 'base::Value::TYPE_BINARY',
+ PropertyType.BOOLEAN: 'base::Value::TYPE_BOOLEAN',
+ # PropertyType.CHOICES can be any combination of types.
+ PropertyType.DOUBLE: 'base::Value::TYPE_DOUBLE',
+ PropertyType.ENUM: 'base::Value::TYPE_STRING',
+ PropertyType.FUNCTION: 'base::Value::TYPE_DICTIONARY',
+ PropertyType.INTEGER: 'base::Value::TYPE_INTEGER',
+ PropertyType.OBJECT: 'base::Value::TYPE_DICTIONARY',
+ PropertyType.STRING: 'base::Value::TYPE_STRING',
+ }[type_.property_type]
+
+
+def GetParameterDeclaration(param, type_):
+ """Gets a parameter declaration of a given model.Property and its C++
+ type.
+ """
+ if param.type_.property_type in (PropertyType.ANY,
+ PropertyType.ARRAY,
+ PropertyType.CHOICES,
+ PropertyType.OBJECT,
+ PropertyType.REF,
+ PropertyType.STRING):
+ arg = 'const %(type)s& %(name)s'
+ else:
+ arg = '%(type)s %(name)s'
+ return arg % {
+ 'type': type_,
+ 'name': param.unix_name,
+ }
+
+
+def GenerateIfndefName(file_path):
+ """Formats |file_path| as a #define name. Presumably |file_path| is a header
+ file, or there's little point in generating a #define for it.
+
+ e.g chrome/extensions/gen/file.h becomes CHROME_EXTENSIONS_GEN_FILE_H__.
+ """
+ return (('%s__' % file_path).upper()
+ .replace('\\', '_')
+ .replace('/', '_')
+ .replace('.', '_'))
+
+
+def PadForGenerics(var):
+ """Appends a space to |var| if it ends with a >, so that it can be compiled
+ within generic types.
+ """
+ return ('%s ' % var) if var.endswith('>') else var
+
+
+
+def OpenNamespace(cpp_namespace):
+ """Get opening root namespace declarations.
+ """
+ c = Code()
+ for component in cpp_namespace.split('::'):
+ c.Append('namespace %s {' % component)
+ return c
+
+
+def CloseNamespace(cpp_namespace):
+ """Get closing root namespace declarations.
+ """
+ c = Code()
+ for component in reversed(cpp_namespace.split('::')):
+ c.Append('} // namespace %s' % component)
+ return c
+
+
+def ConstantName(feature_name):
+ """Returns a kName for a feature's name.
+ """
+ return ('k' + ''.join(word[0].upper() + word[1:]
+ for word in feature_name.replace('.', ' ').split()))
+
+
+def CamelCase(unix_name):
+ return ''.join(word.capitalize() for word in unix_name.split('_'))
+
+
+def ClassName(filepath):
+ return CamelCase(os.path.split(filepath)[1])
+
+
+def GetCppNamespace(pattern, namespace):
+ '''Returns the C++ namespace given |pattern| which includes a %(namespace)s
+ substitution, and the |namespace| to substitute. It is expected that |pattern|
+ has been passed as a flag to compiler.py from GYP/GN.
+ '''
+ # For some reason Windows builds escape the % characters, so unescape them.
+ # This means that %% can never appear legitimately within a pattern, but
+ # that's ok. It should never happen.
+ cpp_namespace = pattern.replace('%%', '%') % { 'namespace': namespace }
+ assert '%' not in cpp_namespace, \
+ ('Did not manage to fully substitute namespace "%s" into pattern "%s"'
+ % (namespace, pattern))
+ return cpp_namespace
diff --git a/tools/json_schema_compiler/cpp_util_test.py b/tools/json_schema_compiler/cpp_util_test.py
new file mode 100755
index 0000000..eef4c55
--- /dev/null
+++ b/tools/json_schema_compiler/cpp_util_test.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from cpp_util import (
+ Classname, CloseNamespace, GenerateIfndefName, OpenNamespace)
+
+class CppUtilTest(unittest.TestCase):
+ def testClassname(self):
+ self.assertEquals('Permissions', Classname('permissions'))
+ self.assertEquals('UpdateAllTheThings',
+ Classname('updateAllTheThings'))
+ self.assertEquals('Aa_Bb_Cc', Classname('aa.bb.cc'))
+
+ def testNamespaceDeclaration(self):
+ self.assertEquals('namespace foo {',
+ OpenNamespace('foo').Render())
+ self.assertEquals('} // namespace foo',
+ CloseNamespace('foo').Render())
+
+ self.assertEquals(
+ 'namespace extensions {\n'
+ 'namespace foo {',
+ OpenNamespace('extensions::foo').Render())
+ self.assertEquals(
+ '} // namespace foo\n'
+ '} // namespace extensions',
+ CloseNamespace('extensions::foo').Render())
+
+ self.assertEquals(
+ 'namespace extensions {\n'
+ 'namespace gen {\n'
+ 'namespace api {',
+ OpenNamespace('extensions::gen::api').Render())
+ self.assertEquals(
+ '} // namespace api\n'
+ '} // namespace gen\n'
+ '} // namespace extensions',
+ CloseNamespace('extensions::gen::api').Render())
+
+ self.assertEquals(
+ 'namespace extensions {\n'
+ 'namespace gen {\n'
+ 'namespace api {\n'
+ 'namespace foo {',
+ OpenNamespace('extensions::gen::api::foo').Render())
+ self.assertEquals(
+ '} // namespace foo\n'
+ '} // namespace api\n'
+ '} // namespace gen\n'
+ '} // namespace extensions',
+ CloseNamespace('extensions::gen::api::foo').Render())
+
+ def testGenerateIfndefName(self):
+ self.assertEquals('FOO_BAR_BAZ_H__', GenerateIfndefName('foo\\bar\\baz.h'))
+ self.assertEquals('FOO_BAR_BAZ_H__', GenerateIfndefName('foo/bar/baz.h'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/json_schema_compiler/dart_generator.py b/tools/json_schema_compiler/dart_generator.py
new file mode 100644
index 0000000..a9ba0d6
--- /dev/null
+++ b/tools/json_schema_compiler/dart_generator.py
@@ -0,0 +1,763 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+Generator language component for compiler.py that adds Dart language support.
+"""
+
+from code import Code
+from model import Function, PropertyType
+from schema_util import StripNamespace
+
+import os
+from datetime import datetime
+
+LICENSE = (
+"""// Copyright (c) %s, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.""" %
+ datetime.now().year)
+
+class DartGenerator(object):
+ def __init__(self, dart_overrides_dir=None):
+ self._dart_overrides_dir = dart_overrides_dir
+
+ def Generate(self, namespace):
+ return _Generator(namespace, self._dart_overrides_dir).Generate()
+
+
+class _Generator(object):
+ """A .dart generator for a namespace.
+ """
+
+ def __init__(self, namespace, dart_overrides_dir=None):
+ self._namespace = namespace
+ # TODO(sashab): Once inline type definitions start being added to
+ # self._types, make a _FindType(self, type_) function that looks at
+ # self._namespace.types.
+ self._types = namespace.types
+
+ # Build a dictionary of Type Name --> Custom Dart code.
+ self._type_overrides = {}
+ if dart_overrides_dir is not None:
+ for filename in os.listdir(dart_overrides_dir):
+ if filename.startswith(namespace.unix_name):
+ with open(os.path.join(dart_overrides_dir, filename)) as f:
+ # Split off the namespace and file extension, leaving just the type.
+ type_path = '.'.join(filename.split('.')[1:-1])
+ self._type_overrides[type_path] = f.read()
+
+ # TODO(sashab): Add all inline type definitions to the global Types
+ # dictionary here, so they have proper names, and are implemented along with
+ # all other types. Also update the parameters/members with these types
+ # to reference these new types instead.
+
+ def Generate(self):
+ """Generates a Code object with the .dart for the entire namespace.
+ """
+ c = Code()
+ (c.Append(LICENSE)
+ .Append()
+ .Append('// Generated from namespace: %s' % self._namespace.name)
+ .Append()
+ .Append('part of chrome;'))
+
+ if self._types:
+ (c.Append()
+ .Append('/**')
+ .Append(' * Types')
+ .Append(' */')
+ .Append()
+ )
+ for type_ in self._types.values():
+ # Check for custom dart for this whole type.
+ override = self._GetOverride([type_.name], document_with=type_)
+ c.Cblock(override if override is not None else self._GenerateType(type_))
+
+ if self._namespace.events:
+ (c.Append('/**')
+ .Append(' * Events')
+ .Append(' */')
+ .Append()
+ )
+ for event_name in self._namespace.events:
+ c.Cblock(self._GenerateEvent(self._namespace.events[event_name]))
+
+ (c.Append('/**')
+ .Append(' * Functions')
+ .Append(' */')
+ .Append()
+ )
+ c.Cblock(self._GenerateMainClass())
+
+ return c
+
+ def _GenerateType(self, type_):
+ """Given a Type object, returns the Code with the .dart for this
+ type's definition.
+
+ Assumes this type is a Parameter Type (creatable by user), and creates an
+ object that extends ChromeObject. All parameters are specifiable as named
+ arguments in the constructor, and all methods are wrapped with getters and
+ setters that hide the JS() implementation.
+ """
+ c = Code()
+
+ # Since enums are just treated as strings for now, don't generate their
+ # type.
+ # TODO(sashab): Find a nice way to wrap enum objects.
+ if type_.property_type is PropertyType.ENUM:
+ return c
+
+ (c.Concat(self._GenerateDocumentation(type_))
+ .Sblock('class %(type_name)s extends ChromeObject {')
+ )
+
+ # Check whether this type has function members. If it does, don't allow
+ # public construction.
+ add_public_constructor = all(not self._IsFunction(p.type_)
+ for p in type_.properties.values())
+ constructor_fields = [self._GeneratePropertySignature(p)
+ for p in type_.properties.values()]
+
+ if add_public_constructor:
+ (c.Append('/*')
+ .Append(' * Public constructor')
+ .Append(' */')
+ .Sblock('%(type_name)s({%(constructor_fields)s}) {')
+ )
+
+ for prop_name in type_.properties:
+ (c.Sblock('if (%s != null)' % prop_name)
+ .Append('this.%s = %s;' % (prop_name, prop_name))
+ .Eblock()
+ )
+ (c.Eblock('}')
+ .Append()
+ )
+
+ (c.Append('/*')
+ .Append(' * Private constructor')
+ .Append(' */')
+ .Append('%(type_name)s._proxy(_jsObject) : super._proxy(_jsObject);')
+ )
+
+ # Add an accessor (getter & setter) for each property.
+ properties = [p for p in type_.properties.values()
+ if not self._IsFunction(p.type_)]
+ if properties:
+ (c.Append()
+ .Append('/*')
+ .Append(' * Public accessors')
+ .Append(' */')
+ )
+ for prop in properties:
+ override = self._GetOverride([type_.name, prop.name], document_with=prop)
+ c.Concat(override if override is not None
+ else self._GenerateGetterAndSetter(type_, prop))
+
+ # Now add all the methods.
+ methods = [t for t in type_.properties.values()
+ if self._IsFunction(t.type_)]
+ if methods:
+ (c.Append()
+ .Append('/*')
+ .Append(' * Methods')
+ .Append(' */')
+ )
+ for prop in methods:
+ # Check if there's an override for this method.
+ override = self._GetOverride([type_.name, prop.name], document_with=prop)
+ c.Cblock(override if override is not None
+ else self._GenerateFunction(prop.type_.function))
+
+ (c.Eblock('}')
+ .Substitute({
+ 'type_name': self._AddPrefix(type_.simple_name),
+ 'constructor_fields': ', '.join(constructor_fields)
+ })
+ )
+
+ return c
+
+ def _GenerateGetterAndSetter(self, type_, prop):
+ """Given a Type and Property, returns the Code object for the getter and
+ setter for that property.
+ """
+ c = Code()
+ override = self._GetOverride([type_.name, prop.name, '.get'],
+ document_with=prop)
+ c.Cblock(override if override is not None
+ else self._GenerateGetter(type_, prop))
+ override = self._GetOverride([type_.name, prop.name, '.set'])
+ c.Cblock(override if override is not None
+ else self._GenerateSetter(type_, prop))
+ return c
+
+ def _GenerateGetter(self, type_, prop):
+ """Given a Type and Property, returns the Code object for the getter for
+ that property.
+
+ Also adds the documentation for this property before the method.
+ """
+ c = Code()
+ c.Concat(self._GenerateDocumentation(prop))
+
+ type_name = self._GetDartType(prop.type_)
+ if (self._IsBaseType(prop.type_)):
+ c.Append("%s get %s => JS('%s', '#.%s', this._jsObject);" %
+ (type_name, prop.name, type_name, prop.name))
+ elif self._IsSerializableObjectType(prop.type_):
+ c.Append("%s get %s => new %s._proxy(JS('', '#.%s', "
+ "this._jsObject));" %
+ (type_name, prop.name, type_name, prop.name))
+ elif self._IsListOfSerializableObjects(prop.type_):
+ (c.Sblock('%s get %s {' % (type_name, prop.name))
+ .Append('%s __proxy_%s = new %s();' % (type_name, prop.name,
+ type_name))
+ .Append("int count = JS('int', '#.%s.length', this._jsObject);" %
+ prop.name)
+ .Sblock("for (int i = 0; i < count; i++) {")
+ .Append("var item = JS('', '#.%s[#]', this._jsObject, i);" % prop.name)
+ .Append('__proxy_%s.add(new %s._proxy(item));' % (prop.name,
+ self._GetDartType(prop.type_.item_type)))
+ .Eblock('}')
+ .Append('return __proxy_%s;' % prop.name)
+ .Eblock('}')
+ )
+ elif self._IsObjectType(prop.type_):
+ # TODO(sashab): Think of a way to serialize generic Dart objects.
+ if type_name in self._types:
+ c.Append("%s get %s => new %s._proxy(JS('%s', '#.%s', "
+ "this._jsObject));" %
+ (type_name, prop.name, type_name, type_name, prop.name))
+ else:
+ c.Append("%s get %s => JS('%s', '#.%s', this._jsObject);" %
+ (type_name, prop.name, type_name, prop.name))
+ else:
+ raise Exception(
+ "Could not generate wrapper for %s.%s: unserializable type %s" %
+ (type_.name, prop.name, type_name)
+ )
+ return c
+
+ def _GenerateSetter(self, type_, prop):
+ """Given a Type and Property, returns the Code object for the setter for
+ that property.
+ """
+ c = Code()
+ type_name = self._GetDartType(prop.type_)
+ wrapped_name = prop.name
+ if not self._IsBaseType(prop.type_):
+ wrapped_name = 'convertArgument(%s)' % prop.name
+
+ (c.Sblock("void set %s(%s %s) {" % (prop.name, type_name, prop.name))
+ .Append("JS('void', '#.%s = #', this._jsObject, %s);" %
+ (prop.name, wrapped_name))
+ .Eblock("}")
+ )
+ return c
+
+ def _GenerateDocumentation(self, prop):
+ """Given an object, generates the documentation for this object (as a
+ code string) and returns the Code object.
+
+ Returns an empty code object if the object has no documentation.
+
+ Uses triple-quotes for the string.
+ """
+ c = Code()
+ if prop.description is not None:
+ for line in prop.description.split('\n'):
+ c.Comment(line, comment_prefix='/// ')
+ return c
+
+ def _GenerateFunction(self, f):
+ """Returns the Code object for the given function.
+ """
+ c = Code()
+ c.Concat(self._GenerateDocumentation(f))
+
+ if not self._NeedsProxiedCallback(f):
+ c.Append("%s => %s;" % (self._GenerateFunctionSignature(f),
+ self._GenerateProxyCall(f)))
+ return c
+
+ (c.Sblock("%s {" % self._GenerateFunctionSignature(f))
+ .Concat(self._GenerateProxiedFunction(f.callback, f.callback.name))
+ .Append('%s;' % self._GenerateProxyCall(f))
+ .Eblock('}')
+ )
+
+ return c
+
+ def _GenerateProxiedFunction(self, f, callback_name):
+ """Given a function (assumed to be a callback), generates the proxied
+ version of this function, which calls |callback_name| if it is defined.
+
+ Returns a Code object.
+ """
+ c = Code()
+ proxied_params = []
+ # A list of Properties, containing List<*> objects that need proxying for
+ # their members (by copying out each member and proxying it).
+ lists_to_proxy = []
+ for p in f.params:
+ if self._IsBaseType(p.type_):
+ proxied_params.append(p.name)
+ elif self._IsSerializableObjectType(p.type_):
+ proxied_params.append('new %s._proxy(%s)' % (
+ self._GetDartType(p.type_), p.name))
+ elif self._IsListOfSerializableObjects(p.type_):
+ proxied_params.append('__proxy_%s' % p.name)
+ lists_to_proxy.append(p)
+ elif self._IsObjectType(p.type_):
+ # TODO(sashab): Find a way to build generic JS objects back in Dart.
+ proxied_params.append('%s' % p.name)
+ elif p.type_.property_type is PropertyType.ARRAY:
+ # TODO(sashab): This might be okay - what if this is a list of
+ # FileEntry elements? In this case, a basic list will proxy the objects
+ # fine.
+ proxied_params.append('%s' % p.name)
+ else:
+ raise Exception(
+ "Cannot automatically create proxy; can't wrap %s, type %s" % (
+ self._GenerateFunctionSignature(f), self._GetDartType(p.type_)))
+
+ (c.Sblock("void __proxy_callback(%s) {" % ', '.join(p.name for p in
+ f.params))
+ .Sblock('if (%s != null) {' % callback_name)
+ )
+
+ # Add the proxied lists.
+ for list_to_proxy in lists_to_proxy:
+ (c.Append("%s __proxy_%s = new %s();" % (
+ self._GetDartType(list_to_proxy.type_),
+ list_to_proxy.name,
+ self._GetDartType(list_to_proxy.type_)))
+ .Sblock("for (var o in %s) {" % list_to_proxy.name)
+ .Append('__proxy_%s.add(new %s._proxy(o));' % (list_to_proxy.name,
+ self._GetDartType(list_to_proxy.type_.item_type)))
+ .Eblock("}")
+ )
+
+ (c.Append("%s(%s);" % (callback_name, ', '.join(proxied_params)))
+ .Eblock('}')
+ .Eblock('}')
+ )
+ return c
+
+ def _NeedsProxiedCallback(self, f):
+ """Given a function, returns True if this function's callback needs to be
+ proxied, False if not.
+
+ Function callbacks need to be proxied if they have at least one
+ non-base-type parameter.
+ """
+ return f.callback and self._NeedsProxy(f.callback)
+
+ def _NeedsProxy(self, f):
+ """Given a function, returns True if it needs to be proxied, False if not.
+
+ A function needs to be proxied if any of its members are non-base types.
+ This means that, when the function object is passed to Javascript, it
+ needs to be wrapped in a "proxied" call that converts the JS inputs to Dart
+ objects explicitly, before calling the real function with these new objects.
+ """
+ return any(not self._IsBaseType(p.type_) for p in f.params)
+
+ def _GenerateProxyCall(self, function, call_target='this._jsObject'):
+ """Given a function, generates the code to call that function via JS().
+ Returns a string.
+
+ |call_target| is the name of the object to call the function on. The default
+ is this._jsObject.
+
+ e.g.
+ JS('void', '#.resizeTo(#, #)', this._jsObject, width, height)
+ JS('void', '#.setBounds(#)', this._jsObject, convertArgument(bounds))
+ """
+ n_params = len(function.params)
+ if function.callback:
+ n_params += 1
+
+ return_type_str = self._GetDartType(function.returns)
+ params = []
+
+ # If this object is serializable, don't convert the type from JS - pass the
+ # JS object straight into the proxy.
+ if self._IsSerializableObjectType(function.returns):
+ params.append("''")
+ else:
+ params.append("'%s'" % return_type_str)
+
+ params.append("'#.%s(%s)'" % (function.name, ', '.join(['#'] * n_params)))
+ params.append(call_target)
+
+ for param in function.params:
+ if not self._IsBaseType(param.type_):
+ params.append('convertArgument(%s)' % param.name)
+ else:
+ params.append(param.name)
+ if function.callback:
+ # If this isn't a base type, we need a proxied callback.
+ callback_name = function.callback.name
+ if self._NeedsProxiedCallback(function):
+ callback_name = "__proxy_callback"
+ params.append('convertDartClosureToJS(%s, %s)' % (callback_name,
+ len(function.callback.params)))
+
+ # If the object is serializable, call the proxy constructor for this type.
+ proxy_call = 'JS(%s)' % ', '.join(params)
+ if self._IsSerializableObjectType(function.returns):
+ proxy_call = 'new %s._proxy(%s)' % (return_type_str, proxy_call)
+
+ return proxy_call
+
+ def _GenerateEvent(self, event):
+ """Given a Function object, returns the Code with the .dart for this event,
+ represented by the function.
+
+ All events extend the Event base type.
+ """
+ c = Code()
+
+ # Add documentation for this event.
+ (c.Concat(self._GenerateDocumentation(event))
+ .Sblock('class Event_%(event_name)s extends Event {')
+ )
+
+ # If this event needs a proxy, all calls need to be proxied.
+ needs_proxy = self._NeedsProxy(event)
+
+ # Override Event callback type definitions.
+ for ret_type, event_func in (('void', 'addListener'),
+ ('void', 'removeListener'),
+ ('bool', 'hasListener')):
+ param_list = self._GenerateParameterList(event.params, event.callback,
+ convert_optional=True)
+ if needs_proxy:
+ (c.Sblock('%s %s(void callback(%s)) {' % (ret_type, event_func,
+ param_list))
+ .Concat(self._GenerateProxiedFunction(event, 'callback'))
+ .Append('super.%s(__proxy_callback);' % event_func)
+ .Eblock('}')
+ )
+ else:
+ c.Append('%s %s(void callback(%s)) => super.%s(callback);' %
+ (ret_type, event_func, param_list, event_func))
+ c.Append()
+
+ # Generate the constructor.
+ (c.Append('Event_%(event_name)s(jsObject) : '
+ 'super._(jsObject, %(param_num)d);')
+ .Eblock('}')
+ .Substitute({
+ 'event_name': self._namespace.unix_name + '_' + event.name,
+ 'param_num': len(event.params)
+ })
+ )
+
+ return c
+
+ def _GenerateMainClass(self):
+ """Generates the main class for this file, which links to all functions
+ and events.
+
+ Returns a code object.
+ """
+ c = Code()
+ (c.Sblock('class API_%s {' % self._namespace.unix_name)
+ .Append('/*')
+ .Append(' * API connection')
+ .Append(' */')
+ .Append('Object _jsObject;')
+ )
+
+ # Add events.
+ if self._namespace.events:
+ (c.Append()
+ .Append('/*')
+ .Append(' * Events')
+ .Append(' */')
+ )
+ for event_name in self._namespace.events:
+ c.Append('Event_%s_%s %s;' % (self._namespace.unix_name, event_name,
+ event_name))
+
+ # Add functions.
+ if self._namespace.functions:
+ (c.Append()
+ .Append('/*')
+ .Append(' * Functions')
+ .Append(' */')
+ )
+ for function in self._namespace.functions.values():
+ # Check for custom dart for this whole property.
+ override = self._GetOverride([function.name], document_with=function)
+ c.Cblock(override if override is not None
+ else self._GenerateFunction(function))
+
+ # Add the constructor.
+ c.Sblock('API_%s(this._jsObject) {' % self._namespace.unix_name)
+
+ # Add events to constructor.
+ for event_name in self._namespace.events:
+ c.Append("%s = new Event_%s_%s(JS('', '#.%s', this._jsObject));" %
+ (event_name, self._namespace.unix_name, event_name, event_name))
+
+ (c.Eblock('}')
+ .Eblock('}')
+ )
+ return c
+
+ def _GeneratePropertySignature(self, prop):
+ """Given a property, returns a signature for that property.
+ Recursively generates the signature for callbacks.
+ Returns a String for the given property.
+
+ e.g.
+ bool x
+ void onClosed()
+ void doSomething(bool x, void callback([String x]))
+ """
+ if self._IsFunction(prop.type_):
+ return self._GenerateFunctionSignature(prop.type_.function)
+ return '%(type)s %(name)s' % {
+ 'type': self._GetDartType(prop.type_),
+ 'name': prop.simple_name
+ }
+
+ def _GenerateFunctionSignature(self, function, convert_optional=False):
+ """Given a function object, returns the signature for that function.
+ Recursively generates the signature for callbacks.
+ Returns a String for the given function.
+
+ If convert_optional is True, changes optional parameters to be required.
+
+ e.g.
+ void onClosed()
+ bool isOpen([String type])
+ void doSomething(bool x, void callback([String x]))
+ """
+ sig = '%(return_type)s %(name)s(%(params)s)'
+
+ if function.returns:
+ return_type = self._GetDartType(function.returns)
+ else:
+ return_type = 'void'
+
+ return sig % {
+ 'return_type': return_type,
+ 'name': function.simple_name,
+ 'params': self._GenerateParameterList(function.params,
+ function.callback,
+ convert_optional=convert_optional)
+ }
+
+ def _GenerateParameterList(self,
+ params,
+ callback=None,
+ convert_optional=False):
+ """Given a list of function parameters, generates their signature (as a
+ string).
+
+ e.g.
+ [String type]
+ bool x, void callback([String x])
+
+ If convert_optional is True, changes optional parameters to be required.
+ Useful for callbacks, where optional parameters are treated as required.
+ """
+ # Params lists (required & optional), to be joined with commas.
+ # TODO(sashab): Don't assume optional params always come after required
+ # ones.
+ params_req = []
+ params_opt = []
+ for param in params:
+ p_sig = self._GeneratePropertySignature(param)
+ if param.optional and not convert_optional:
+ params_opt.append(p_sig)
+ else:
+ params_req.append(p_sig)
+
+ # Add the callback, if it exists.
+ if callback:
+ c_sig = self._GenerateFunctionSignature(callback, convert_optional=True)
+ if callback.optional:
+ params_opt.append(c_sig)
+ else:
+ params_req.append(c_sig)
+
+ # Join the parameters with commas.
+ # Optional parameters have to be in square brackets, e.g.:
+ #
+ # required params | optional params | output
+ # [] | [] | ''
+ # [x, y] | [] | 'x, y'
+ # [] | [a, b] | '[a, b]'
+ # [x, y] | [a, b] | 'x, y, [a, b]'
+ if params_opt:
+ params_opt[0] = '[%s' % params_opt[0]
+ params_opt[-1] = '%s]' % params_opt[-1]
+ param_sets = [', '.join(params_req), ', '.join(params_opt)]
+
+ # The 'if p' part here is needed to prevent commas where there are no
+ # parameters of a certain type.
+ # If there are no optional parameters, this prevents a _trailing_ comma,
+ # e.g. '(x, y,)'. Similarly, if there are no required parameters, this
+ # prevents a leading comma, e.g. '(, [a, b])'.
+ return ', '.join(p for p in param_sets if p)
+
+ def _GetOverride(self, key_chain, document_with=None):
+ """Given a list of keys, joins them with periods and searches for them in
+ the custom dart overrides.
+ If there is an override for that key, finds the override code and returns
+ the Code object. If not, returns None.
+
+ If document_with is not None, adds the documentation for this property
+ before the override code.
+ """
+ c = Code()
+ contents = self._type_overrides.get('.'.join(key_chain))
+ if contents is None:
+ return None
+
+ if document_with is not None:
+ c.Concat(self._GenerateDocumentation(document_with))
+ for line in contents.strip('\n').split('\n'):
+ c.Append(line)
+ return c
+
+ def _AddPrefix(self, name):
+ """Given the name of a type, prefixes the namespace (as camelcase) and
+ returns the new name.
+ """
+ # TODO(sashab): Split the dart library into multiple files, avoiding the
+ # need for this prefixing.
+ return ('%s%s' % (
+ ''.join(s.capitalize() for s in self._namespace.name.split('.')),
+ name))
+
+ def _IsFunction(self, type_):
+ """Given a model.Type, returns whether this type is a function.
+ """
+ return type_.property_type == PropertyType.FUNCTION
+
+ def _IsSerializableObjectType(self, type_):
+ """Given a model.Type, returns whether this type is a serializable object.
+ Serializable objects are custom types defined in this namespace.
+
+ If this object is a reference to something not in this namespace, assumes
+ its a serializable object.
+ """
+ if type_ is None:
+ return False
+ if type_.property_type is PropertyType.CHOICES:
+ return all(self._IsSerializableObjectType(c) for c in type_.choices)
+ if type_.property_type is PropertyType.REF:
+ if type_.ref_type in self._types:
+ return self._IsObjectType(self._types[type_.ref_type])
+ return True
+ if (type_.property_type == PropertyType.OBJECT
+ and type_.instance_of in self._types):
+ return self._IsObjectType(self._types[type_.instance_of])
+ return False
+
+ def _IsObjectType(self, type_):
+ """Given a model.Type, returns whether this type is an object.
+ """
+ return (self._IsSerializableObjectType(type_)
+ or type_.property_type in [PropertyType.OBJECT, PropertyType.ANY])
+
+ def _IsListOfSerializableObjects(self, type_):
+ """Given a model.Type, returns whether this type is a list of serializable
+ objects (or regular objects, if this list is treated as a type - in this
+ case, the item type was defined inline).
+
+ If this type is a reference to something not in this namespace, assumes
+ it is not a list of serializable objects.
+ """
+ if type_.property_type is PropertyType.CHOICES:
+ return all(self._IsListOfSerializableObjects(c) for c in type_.choices)
+ if type_.property_type is PropertyType.REF:
+ if type_.ref_type in self._types:
+ return self._IsListOfSerializableObjects(self._types[type_.ref_type])
+ return False
+ return (type_.property_type is PropertyType.ARRAY and
+ (self._IsSerializableObjectType(type_.item_type)))
+
+ def _IsListOfBaseTypes(self, type_):
+ """Given a model.Type, returns whether this type is a list of base type
+ objects (PropertyType.REF types).
+ """
+ if type_.property_type is PropertyType.CHOICES:
+ return all(self._IsListOfBaseTypes(c) for c in type_.choices)
+ return (type_.property_type is PropertyType.ARRAY and
+ self._IsBaseType(type_.item_type))
+
+ def _IsBaseType(self, type_):
+ """Given a model.type_, returns whether this type is a base type
+ (string, number, boolean, or a list of these).
+
+ If type_ is a Choices object, returns True if all possible choices are base
+ types.
+ """
+ # TODO(sashab): Remove 'Choices' as a base type once they are wrapped in
+ # native Dart classes.
+ if type_.property_type is PropertyType.CHOICES:
+ return all(self._IsBaseType(c) for c in type_.choices)
+ return (
+ (self._GetDartType(type_) in ['bool', 'num', 'int', 'double', 'String'])
+ or (type_.property_type is PropertyType.ARRAY
+ and self._IsBaseType(type_.item_type))
+ )
+
+ def _GetDartType(self, type_):
+ """Given a model.Type object, returns its type as a Dart string.
+ """
+ if type_ is None:
+ return 'void'
+
+ prop_type = type_.property_type
+ if prop_type is PropertyType.REF:
+ if type_.ref_type in self._types:
+ return self._GetDartType(self._types[type_.ref_type])
+ # TODO(sashab): If the type is foreign, it might have to be imported.
+ return StripNamespace(type_.ref_type)
+ elif prop_type is PropertyType.BOOLEAN:
+ return 'bool'
+ elif prop_type is PropertyType.INTEGER:
+ return 'int'
+ elif prop_type is PropertyType.INT64:
+ return 'num'
+ elif prop_type is PropertyType.DOUBLE:
+ return 'double'
+ elif prop_type is PropertyType.STRING:
+ return 'String'
+ elif prop_type is PropertyType.ENUM:
+ return 'String'
+ elif prop_type is PropertyType.CHOICES:
+ # TODO(sashab): Think of a nice way to generate code for Choices objects
+ # in Dart.
+ return 'Object'
+ elif prop_type is PropertyType.ANY:
+ return 'Object'
+ elif prop_type is PropertyType.OBJECT:
+ # TODO(sashab): type_.name is the name of the function's parameter for
+ # inline types defined in functions. Think of a way to generate names
+ # for this, or remove all inline type definitions at the start.
+ if type_.instance_of is not None:
+ return type_.instance_of
+ if not isinstance(type_.parent, Function):
+ return self._AddPrefix(type_.name)
+ return 'Object'
+ elif prop_type is PropertyType.FUNCTION:
+ return 'Function'
+ elif prop_type is PropertyType.ARRAY:
+ return 'List<%s>' % self._GetDartType(type_.item_type)
+ elif prop_type is PropertyType.BINARY:
+ return 'String'
+ else:
+ raise NotImplementedError(prop_type)
+
diff --git a/tools/json_schema_compiler/dart_generator_test.py b/tools/json_schema_compiler/dart_generator_test.py
new file mode 100755
index 0000000..2005e7e
--- /dev/null
+++ b/tools/json_schema_compiler/dart_generator_test.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+import unittest
+
+from compiler import GenerateSchema
+
+# If --rebase is passed to this test, this is set to True, indicating the test
+# output should be re-generated for each test (rather than running the tests
+# themselves).
+REBASE_MODE = False
+
+# The directory containing the input and expected output files corresponding
+# to each test name.
+TESTS_DIR = 'dart_test'
+
+class DartTest(unittest.TestCase):
+
+ def _RunTest(self, test_filename):
+ '''Given the name of a test, runs compiler.py on the file:
+ TESTS_DIR/test_filename.idl
+ and compares it to the output in the file:
+ TESTS_DIR/test_filename.dart
+ '''
+ file_rel = os.path.join(TESTS_DIR, test_filename)
+
+ output_dir = None
+ if REBASE_MODE:
+ output_dir = TESTS_DIR
+ output_code = GenerateSchema('dart', ['%s.idl' % file_rel], TESTS_DIR,
+ output_dir, '', None, None, [])
+
+ if not REBASE_MODE:
+ with open('%s.dart' % file_rel) as f:
+ expected_output = f.read()
+ # Remove the first line of the output code (as it contains the filename).
+ # Also remove all blank lines, ignoring them from the comparison.
+ # Compare with lists instead of strings for clearer diffs (especially with
+ # whitespace) when a test fails.
+ self.assertEqual([l for l in expected_output.split('\n') if l],
+ [l for l in output_code.split('\n')[1:] if l])
+
+ def setUp(self):
+ # Increase the maximum diff amount to see the full diff on a failed test.
+ self.maxDiff = 2000
+
+ def testComments(self):
+ self._RunTest('comments')
+
+ def testDictionaries(self):
+ self._RunTest('dictionaries')
+
+ def testEmptyNamespace(self):
+ self._RunTest('empty_namespace')
+
+ def testEmptyType(self):
+ self._RunTest('empty_type')
+
+ def testEvents(self):
+ self._RunTest('events')
+
+ def testBasicFunction(self):
+ self._RunTest('functions')
+
+ def testOpratableType(self):
+ self._RunTest('operatable_type')
+
+ def testTags(self):
+ self._RunTest('tags')
+
+
+if __name__ == '__main__':
+ if '--rebase' in sys.argv:
+ print "Running in rebase mode."
+ REBASE_MODE = True
+ sys.argv.remove('--rebase')
+ unittest.main()
diff --git a/tools/json_schema_compiler/dart_test/comments.dart b/tools/json_schema_compiler/dart_test/comments.dart
new file mode 100644
index 0000000..6c30bb7
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/comments.dart
@@ -0,0 +1,31 @@
+// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Generated from namespace: comments
+
+part of chrome;
+/**
+ * Functions
+ */
+
+class API_comments {
+ /*
+ * API connection
+ */
+ Object _jsObject;
+
+ /*
+ * Functions
+ */
+ /// <p>There's a blank line at the start of this comment.</p><p>Documentation
+ /// for basicFunction. BasicFunction() is a great function. There is a newline
+ /// after this.</p><p>It works like so: +-----+ | | +--+
+ /// | | | | +-----+ --> +--+</p><p>Some other stuff here.
+ /// This paragraph starts with whitespace. Overall, its a great function.
+ /// There's also a blank line at the end of this comment.</p>
+ void basicFunction() => JS('void', '#.basicFunction()', this._jsObject);
+
+ API_comments(this._jsObject) {
+ }
+}
diff --git a/tools/json_schema_compiler/dart_test/comments.idl b/tools/json_schema_compiler/dart_test/comments.idl
new file mode 100644
index 0000000..7ac1474
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/comments.idl
@@ -0,0 +1,32 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This comment is for the comments namespace.
+namespace comments {
+ // This comments the "Functions" block, and should be ignored.
+ interface Functions {
+ // This comment is separated by at least one blank line from the start of
+ // the function, and should be ignored.
+
+ //
+ // There's a blank line at the start of this comment.
+ //
+ // Documentation for basicFunction.
+ // BasicFunction() is a great function.
+ // There is a newline after this.
+ //
+ // It works like so:
+ // +-----+
+ // | | +--+
+ // | | | |
+ // +-----+ --> +--+
+ //
+ // Some other stuff here.
+ // This paragraph starts with whitespace.
+ // Overall, its a great function.
+ // There's also a blank line at the end of this comment.
+ //
+ static void basicFunction();
+ };
+};
diff --git a/tools/json_schema_compiler/dart_test/dictionaries.dart b/tools/json_schema_compiler/dart_test/dictionaries.dart
new file mode 100644
index 0000000..d6b6874
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/dictionaries.dart
@@ -0,0 +1,235 @@
+// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Generated from namespace: dictionaries
+
+part of chrome;
+
+/**
+ * Types
+ */
+
+class DictionariesInnerType extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ DictionariesInnerType({String s, int b, int i, int l, double d, FileEntry f, String os, int ob, int oi, int ol, double od, FileEntry of}) {
+ if (s != null)
+ this.s = s;
+ if (b != null)
+ this.b = b;
+ if (i != null)
+ this.i = i;
+ if (l != null)
+ this.l = l;
+ if (d != null)
+ this.d = d;
+ if (f != null)
+ this.f = f;
+ if (os != null)
+ this.os = os;
+ if (ob != null)
+ this.ob = ob;
+ if (oi != null)
+ this.oi = oi;
+ if (ol != null)
+ this.ol = ol;
+ if (od != null)
+ this.od = od;
+ if (of != null)
+ this.of = of;
+ }
+
+ /*
+ * Private constructor
+ */
+ DictionariesInnerType._proxy(_jsObject) : super._proxy(_jsObject);
+
+ /*
+ * Public accessors
+ */
+ /// Documentation for the String s.
+ String get s => JS('String', '#.s', this._jsObject);
+
+ void set s(String s) {
+ JS('void', '#.s = #', this._jsObject, s);
+ }
+
+ /// Documentation for the boolean b.
+ int get b => JS('int', '#.b', this._jsObject);
+
+ void set b(int b) {
+ JS('void', '#.b = #', this._jsObject, b);
+ }
+
+ /// Documentation for the int i.
+ int get i => JS('int', '#.i', this._jsObject);
+
+ void set i(int i) {
+ JS('void', '#.i = #', this._jsObject, i);
+ }
+
+ /// Documentation for the long l.
+ int get l => JS('int', '#.l', this._jsObject);
+
+ void set l(int l) {
+ JS('void', '#.l = #', this._jsObject, l);
+ }
+
+ /// Documentation for the double d.
+ double get d => JS('double', '#.d', this._jsObject);
+
+ void set d(double d) {
+ JS('void', '#.d = #', this._jsObject, d);
+ }
+
+ /// Documentation for the file entry f.
+ FileEntry get f => JS('FileEntry', '#.f', this._jsObject);
+
+ void set f(FileEntry f) {
+ JS('void', '#.f = #', this._jsObject, convertArgument(f));
+ }
+
+ /// Documentation for the optional String s.
+ String get os => JS('String', '#.os', this._jsObject);
+
+ void set os(String os) {
+ JS('void', '#.os = #', this._jsObject, os);
+ }
+
+ /// Documentation for the optional boolean ob.
+ int get ob => JS('int', '#.ob', this._jsObject);
+
+ void set ob(int ob) {
+ JS('void', '#.ob = #', this._jsObject, ob);
+ }
+
+ /// Documentation for the optional int i.
+ int get oi => JS('int', '#.oi', this._jsObject);
+
+ void set oi(int oi) {
+ JS('void', '#.oi = #', this._jsObject, oi);
+ }
+
+ /// Documentation for the optional long l.
+ int get ol => JS('int', '#.ol', this._jsObject);
+
+ void set ol(int ol) {
+ JS('void', '#.ol = #', this._jsObject, ol);
+ }
+
+ /// Documentation for the optional double d.
+ double get od => JS('double', '#.od', this._jsObject);
+
+ void set od(double od) {
+ JS('void', '#.od = #', this._jsObject, od);
+ }
+
+ /// Documentation for the optional file entry f.
+ FileEntry get of => JS('FileEntry', '#.of', this._jsObject);
+
+ void set of(FileEntry of) {
+ JS('void', '#.of = #', this._jsObject, convertArgument(of));
+ }
+
+}
+
+class DictionariesOuterType extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ DictionariesOuterType({List<DictionariesInnerType> items, List<DictionariesInnerType> oitems}) {
+ if (items != null)
+ this.items = items;
+ if (oitems != null)
+ this.oitems = oitems;
+ }
+
+ /*
+ * Private constructor
+ */
+ DictionariesOuterType._proxy(_jsObject) : super._proxy(_jsObject);
+
+ /*
+ * Public accessors
+ */
+ /// Documentation for the array of InnerTypes items.
+ List<DictionariesInnerType> get items {
+ List<DictionariesInnerType> __proxy_items = new List<DictionariesInnerType>();
+ int count = JS('int', '#.items.length', this._jsObject);
+ for (int i = 0; i < count; i++) {
+ var item = JS('', '#.items[#]', this._jsObject, i);
+ __proxy_items.add(new DictionariesInnerType._proxy(item));
+ }
+ return __proxy_items;
+ }
+
+ void set items(List<DictionariesInnerType> items) {
+ JS('void', '#.items = #', this._jsObject, convertArgument(items));
+ }
+
+ /// Documentation for the optional array of Inner Types oitems.
+ List<DictionariesInnerType> get oitems {
+ List<DictionariesInnerType> __proxy_oitems = new List<DictionariesInnerType>();
+ int count = JS('int', '#.oitems.length', this._jsObject);
+ for (int i = 0; i < count; i++) {
+ var item = JS('', '#.oitems[#]', this._jsObject, i);
+ __proxy_oitems.add(new DictionariesInnerType._proxy(item));
+ }
+ return __proxy_oitems;
+ }
+
+ void set oitems(List<DictionariesInnerType> oitems) {
+ JS('void', '#.oitems = #', this._jsObject, convertArgument(oitems));
+ }
+
+}
+
+class DictionariesComplexType extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ DictionariesComplexType({int i, DictionariesComplexType c}) {
+ if (i != null)
+ this.i = i;
+ if (c != null)
+ this.c = c;
+ }
+
+ /*
+ * Private constructor
+ */
+ DictionariesComplexType._proxy(_jsObject) : super._proxy(_jsObject);
+
+ /*
+ * Public accessors
+ */
+ /// Documentation for the int i.
+ int get i => JS('int', '#.i', this._jsObject);
+
+ void set i(int i) {
+ JS('void', '#.i = #', this._jsObject, i);
+ }
+
+ /// Documentation for the ComplexType c.
+ DictionariesComplexType get c => new DictionariesComplexType._proxy(JS('', '#.c', this._jsObject));
+
+ void set c(DictionariesComplexType c) {
+ JS('void', '#.c = #', this._jsObject, convertArgument(c));
+ }
+
+}
+
+/**
+ * Functions
+ */
+
+class API_dictionaries {
+ /*
+ * API connection
+ */
+ Object _jsObject;
+ API_dictionaries(this._jsObject) {
+ }
+}
diff --git a/tools/json_schema_compiler/dart_test/dictionaries.idl b/tools/json_schema_compiler/dart_test/dictionaries.idl
new file mode 100644
index 0000000..1eb9e25
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/dictionaries.idl
@@ -0,0 +1,61 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This comment is for the dictionaries namespace.
+namespace dictionaries {
+ // Documentation for ComplexType.
+ dictionary InnerType {
+ // Documentation for the String s.
+ DOMString s;
+
+ // Documentation for the boolean b.
+ int b;
+
+ // Documentation for the int i.
+ int i;
+
+ // Documentation for the long l.
+ long l;
+
+ // Documentation for the double d.
+ double d;
+
+ // Documentation for the file entry f.
+ [instanceOf=FileEntry] object f;
+
+ // Documentation for the optional String s.
+ DOMString? os;
+
+ // Documentation for the optional boolean ob.
+ int ob;
+
+ // Documentation for the optional int i.
+ int? oi;
+
+ // Documentation for the optional long l.
+ long? ol;
+
+ // Documentation for the optional double d.
+ double? od;
+
+ // Documentation for the optional file entry f.
+ [instanceOf=FileEntry] object? of;
+ };
+
+ dictionary OuterType {
+ // Documentation for the array of InnerTypes items.
+ InnerType[] items;
+
+ // Documentation for the optional array of Inner Types oitems.
+ InnerType[]? oitems;
+ };
+
+ dictionary ComplexType {
+ // Documentation for the int i.
+ int i;
+
+ // Documentation for the ComplexType c.
+ ComplexType c;
+ };
+};
diff --git a/tools/json_schema_compiler/dart_test/empty_namespace.dart b/tools/json_schema_compiler/dart_test/empty_namespace.dart
new file mode 100644
index 0000000..f6e48be
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/empty_namespace.dart
@@ -0,0 +1,19 @@
+// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Generated from namespace: empty_namespace
+
+part of chrome;
+/**
+ * Functions
+ */
+
+class API_empty_namespace {
+ /*
+ * API connection
+ */
+ Object _jsObject;
+ API_empty_namespace(this._jsObject) {
+ }
+}
diff --git a/tools/json_schema_compiler/dart_test/empty_namespace.idl b/tools/json_schema_compiler/dart_test/empty_namespace.idl
new file mode 100644
index 0000000..824de2d
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/empty_namespace.idl
@@ -0,0 +1,7 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// An empty comment is required for an empty namespace.
+namespace empty_namespace {
+};
diff --git a/tools/json_schema_compiler/dart_test/empty_type.dart b/tools/json_schema_compiler/dart_test/empty_type.dart
new file mode 100644
index 0000000..29e8df9
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/empty_type.dart
@@ -0,0 +1,37 @@
+// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Generated from namespace: empty_type
+
+part of chrome;
+
+/**
+ * Types
+ */
+
+class Empty_typeEmptyType extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ Empty_typeEmptyType({}) {
+ }
+
+ /*
+ * Private constructor
+ */
+ Empty_typeEmptyType._proxy(_jsObject) : super._proxy(_jsObject);
+}
+
+/**
+ * Functions
+ */
+
+class API_empty_type {
+ /*
+ * API connection
+ */
+ Object _jsObject;
+ API_empty_type(this._jsObject) {
+ }
+}
diff --git a/tools/json_schema_compiler/dart_test/empty_type.idl b/tools/json_schema_compiler/dart_test/empty_type.idl
new file mode 100644
index 0000000..9d7de6f
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/empty_type.idl
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Namespace-level comment for EmptyType.
+namespace empty_type {
+ // Documentation for EmptyType.
+ dictionary EmptyType {
+ };
+};
diff --git a/tools/json_schema_compiler/dart_test/events.dart b/tools/json_schema_compiler/dart_test/events.dart
new file mode 100644
index 0000000..14cb19b
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/events.dart
@@ -0,0 +1,282 @@
+// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Generated from namespace: events
+
+part of chrome;
+
+/**
+ * Types
+ */
+
+class EventsEventArgumentElement extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ EventsEventArgumentElement({String elementStringArg}) {
+ if (elementStringArg != null)
+ this.elementStringArg = elementStringArg;
+ }
+
+ /*
+ * Private constructor
+ */
+ EventsEventArgumentElement._proxy(_jsObject) : super._proxy(_jsObject);
+
+ /*
+ * Public accessors
+ */
+ String get elementStringArg => JS('String', '#.elementStringArg', this._jsObject);
+
+ void set elementStringArg(String elementStringArg) {
+ JS('void', '#.elementStringArg = #', this._jsObject, elementStringArg);
+ }
+
+}
+
+class EventsEventArgument extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ EventsEventArgument({FileEntry entryArg, String stringArg, int intArg, List<EventsEventArgumentElement> elements, FileEntry optionalEntryArg, String optionalStringArg, int optionalIntArg, List<EventsEventArgumentElement> optionalElements}) {
+ if (entryArg != null)
+ this.entryArg = entryArg;
+ if (stringArg != null)
+ this.stringArg = stringArg;
+ if (intArg != null)
+ this.intArg = intArg;
+ if (elements != null)
+ this.elements = elements;
+ if (optionalEntryArg != null)
+ this.optionalEntryArg = optionalEntryArg;
+ if (optionalStringArg != null)
+ this.optionalStringArg = optionalStringArg;
+ if (optionalIntArg != null)
+ this.optionalIntArg = optionalIntArg;
+ if (optionalElements != null)
+ this.optionalElements = optionalElements;
+ }
+
+ /*
+ * Private constructor
+ */
+ EventsEventArgument._proxy(_jsObject) : super._proxy(_jsObject);
+
+ /*
+ * Public accessors
+ */
+ /// A file entry
+ FileEntry get entryArg => JS('FileEntry', '#.entryArg', this._jsObject);
+
+ void set entryArg(FileEntry entryArg) {
+ JS('void', '#.entryArg = #', this._jsObject, convertArgument(entryArg));
+ }
+
+ /// A string
+ String get stringArg => JS('String', '#.stringArg', this._jsObject);
+
+ void set stringArg(String stringArg) {
+ JS('void', '#.stringArg = #', this._jsObject, stringArg);
+ }
+
+ /// A primitive
+ int get intArg => JS('int', '#.intArg', this._jsObject);
+
+ void set intArg(int intArg) {
+ JS('void', '#.intArg = #', this._jsObject, intArg);
+ }
+
+ /// An array
+ List<EventsEventArgumentElement> get elements {
+ List<EventsEventArgumentElement> __proxy_elements = new List<EventsEventArgumentElement>();
+ int count = JS('int', '#.elements.length', this._jsObject);
+ for (int i = 0; i < count; i++) {
+ var item = JS('', '#.elements[#]', this._jsObject, i);
+ __proxy_elements.add(new EventsEventArgumentElement._proxy(item));
+ }
+ return __proxy_elements;
+ }
+
+ void set elements(List<EventsEventArgumentElement> elements) {
+ JS('void', '#.elements = #', this._jsObject, convertArgument(elements));
+ }
+
+ /// Optional file entry
+ FileEntry get optionalEntryArg => JS('FileEntry', '#.optionalEntryArg', this._jsObject);
+
+ void set optionalEntryArg(FileEntry optionalEntryArg) {
+ JS('void', '#.optionalEntryArg = #', this._jsObject, convertArgument(optionalEntryArg));
+ }
+
+ /// A string
+ String get optionalStringArg => JS('String', '#.optionalStringArg', this._jsObject);
+
+ void set optionalStringArg(String optionalStringArg) {
+ JS('void', '#.optionalStringArg = #', this._jsObject, optionalStringArg);
+ }
+
+ /// A primitive
+ int get optionalIntArg => JS('int', '#.optionalIntArg', this._jsObject);
+
+ void set optionalIntArg(int optionalIntArg) {
+ JS('void', '#.optionalIntArg = #', this._jsObject, optionalIntArg);
+ }
+
+ /// An array
+ List<EventsEventArgumentElement> get optionalElements {
+ List<EventsEventArgumentElement> __proxy_optionalElements = new List<EventsEventArgumentElement>();
+ int count = JS('int', '#.optionalElements.length', this._jsObject);
+ for (int i = 0; i < count; i++) {
+ var item = JS('', '#.optionalElements[#]', this._jsObject, i);
+ __proxy_optionalElements.add(new EventsEventArgumentElement._proxy(item));
+ }
+ return __proxy_optionalElements;
+ }
+
+ void set optionalElements(List<EventsEventArgumentElement> optionalElements) {
+ JS('void', '#.optionalElements = #', this._jsObject, convertArgument(optionalElements));
+ }
+
+}
+
+/**
+ * Events
+ */
+
+/// Documentation for the first basic event.
+class Event_events_firstBasicEvent extends Event {
+ void addListener(void callback()) => super.addListener(callback);
+
+ void removeListener(void callback()) => super.removeListener(callback);
+
+ bool hasListener(void callback()) => super.hasListener(callback);
+
+ Event_events_firstBasicEvent(jsObject) : super._(jsObject, 0);
+}
+
+/// Documentation for the second basic event.
+class Event_events_secondBasicEvent extends Event {
+ void addListener(void callback()) => super.addListener(callback);
+
+ void removeListener(void callback()) => super.removeListener(callback);
+
+ bool hasListener(void callback()) => super.hasListener(callback);
+
+ Event_events_secondBasicEvent(jsObject) : super._(jsObject, 0);
+}
+
+/// Documentation for an event with a non-optional primitive argument.
+class Event_events_nonOptionalPrimitiveArgEvent extends Event {
+ void addListener(void callback(int argument)) => super.addListener(callback);
+
+ void removeListener(void callback(int argument)) => super.removeListener(callback);
+
+ bool hasListener(void callback(int argument)) => super.hasListener(callback);
+
+ Event_events_nonOptionalPrimitiveArgEvent(jsObject) : super._(jsObject, 1);
+}
+
+/// Documentation for an event with an optional primitive argument.
+class Event_events_optionalPrimitiveArgEvent extends Event {
+ void addListener(void callback(int argument)) => super.addListener(callback);
+
+ void removeListener(void callback(int argument)) => super.removeListener(callback);
+
+ bool hasListener(void callback(int argument)) => super.hasListener(callback);
+
+ Event_events_optionalPrimitiveArgEvent(jsObject) : super._(jsObject, 1);
+}
+
+/// Documentation for an event with a non-optional dictionary argument.
+class Event_events_nonOptionalDictArgEvent extends Event {
+ void addListener(void callback(EventsEventArgument argument)) {
+ void __proxy_callback(argument) {
+ if (callback != null) {
+ callback(new EventsEventArgument._proxy(argument));
+ }
+ }
+ super.addListener(__proxy_callback);
+ }
+
+ void removeListener(void callback(EventsEventArgument argument)) {
+ void __proxy_callback(argument) {
+ if (callback != null) {
+ callback(new EventsEventArgument._proxy(argument));
+ }
+ }
+ super.removeListener(__proxy_callback);
+ }
+
+ bool hasListener(void callback(EventsEventArgument argument)) {
+ void __proxy_callback(argument) {
+ if (callback != null) {
+ callback(new EventsEventArgument._proxy(argument));
+ }
+ }
+ super.hasListener(__proxy_callback);
+ }
+
+ Event_events_nonOptionalDictArgEvent(jsObject) : super._(jsObject, 1);
+}
+
+/// Documentation for an event with a optional dictionary argument.
+class Event_events_optionalDictArgEvent extends Event {
+ void addListener(void callback(EventsEventArgument argument)) {
+ void __proxy_callback(argument) {
+ if (callback != null) {
+ callback(new EventsEventArgument._proxy(argument));
+ }
+ }
+ super.addListener(__proxy_callback);
+ }
+
+ void removeListener(void callback(EventsEventArgument argument)) {
+ void __proxy_callback(argument) {
+ if (callback != null) {
+ callback(new EventsEventArgument._proxy(argument));
+ }
+ }
+ super.removeListener(__proxy_callback);
+ }
+
+ bool hasListener(void callback(EventsEventArgument argument)) {
+ void __proxy_callback(argument) {
+ if (callback != null) {
+ callback(new EventsEventArgument._proxy(argument));
+ }
+ }
+ super.hasListener(__proxy_callback);
+ }
+
+ Event_events_optionalDictArgEvent(jsObject) : super._(jsObject, 1);
+}
+
+/**
+ * Functions
+ */
+
+class API_events {
+ /*
+ * API connection
+ */
+ Object _jsObject;
+
+ /*
+ * Events
+ */
+ Event_events_firstBasicEvent firstBasicEvent;
+ Event_events_secondBasicEvent secondBasicEvent;
+ Event_events_nonOptionalPrimitiveArgEvent nonOptionalPrimitiveArgEvent;
+ Event_events_optionalPrimitiveArgEvent optionalPrimitiveArgEvent;
+ Event_events_nonOptionalDictArgEvent nonOptionalDictArgEvent;
+ Event_events_optionalDictArgEvent optionalDictArgEvent;
+ API_events(this._jsObject) {
+ firstBasicEvent = new Event_events_firstBasicEvent(JS('', '#.firstBasicEvent', this._jsObject));
+ secondBasicEvent = new Event_events_secondBasicEvent(JS('', '#.secondBasicEvent', this._jsObject));
+ nonOptionalPrimitiveArgEvent = new Event_events_nonOptionalPrimitiveArgEvent(JS('', '#.nonOptionalPrimitiveArgEvent', this._jsObject));
+ optionalPrimitiveArgEvent = new Event_events_optionalPrimitiveArgEvent(JS('', '#.optionalPrimitiveArgEvent', this._jsObject));
+ nonOptionalDictArgEvent = new Event_events_nonOptionalDictArgEvent(JS('', '#.nonOptionalDictArgEvent', this._jsObject));
+ optionalDictArgEvent = new Event_events_optionalDictArgEvent(JS('', '#.optionalDictArgEvent', this._jsObject));
+ }
+}
diff --git a/tools/json_schema_compiler/dart_test/events.idl b/tools/json_schema_compiler/dart_test/events.idl
new file mode 100644
index 0000000..f1fb4b6
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/events.idl
@@ -0,0 +1,56 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This comment is for the events namespace.
+namespace events {
+ dictionary EventArgumentElement {
+ DOMString elementStringArg;
+ };
+
+ dictionary EventArgument {
+ // A file entry
+ [instanceOf=FileEntry] object entryArg;
+
+ // A string
+ DOMString stringArg;
+
+ // A primitive
+ int intArg;
+
+ // An array
+ EventArgumentElement[] elements;
+
+ // Optional file entry
+ [instanceOf=FileEntry] object? optionalEntryArg;
+
+ // A string
+ DOMString? optionalStringArg;
+
+ // A primitive
+ int? optionalIntArg;
+
+ // An array
+ EventArgumentElement[]? optionalElements;
+ };
+
+ interface Events {
+ // Documentation for the first basic event.
+ static void firstBasicEvent();
+
+ // Documentation for the second basic event.
+ static void secondBasicEvent();
+
+ // Documentation for an event with a non-optional primitive argument.
+ static void nonOptionalPrimitiveArgEvent(int argument);
+
+ // Documentation for an event with an optional primitive argument.
+ static void optionalPrimitiveArgEvent(optional int argument);
+
+ // Documentation for an event with a non-optional dictionary argument.
+ static void nonOptionalDictArgEvent(EventArgument argument);
+
+ // Documentation for an event with a optional dictionary argument.
+ static void optionalDictArgEvent(EventArgument argument);
+ };
+};
diff --git a/tools/json_schema_compiler/dart_test/functions.dart b/tools/json_schema_compiler/dart_test/functions.dart
new file mode 100644
index 0000000..cea71fd
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/functions.dart
@@ -0,0 +1,93 @@
+// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Generated from namespace: functions
+
+part of chrome;
+
+/**
+ * Types
+ */
+
+class FunctionsDictType extends ChromeObject {
+ /*
+ * Private constructor
+ */
+ FunctionsDictType._proxy(_jsObject) : super._proxy(_jsObject);
+
+ /*
+ * Public accessors
+ */
+ /// A field.
+ int get a => JS('int', '#.a', this._jsObject);
+
+ void set a(int a) {
+ JS('void', '#.a = #', this._jsObject, a);
+ }
+
+
+ /*
+ * Methods
+ */
+ /// A parameter.
+ void voidFunc() => JS('void', '#.voidFunc()', this._jsObject);
+
+}
+
+/**
+ * Functions
+ */
+
+class API_functions {
+ /*
+ * API connection
+ */
+ Object _jsObject;
+
+ /*
+ * Functions
+ */
+ /// Simple function.
+ void voidFunc() => JS('void', '#.voidFunc()', this._jsObject);
+
+ /// Function taking a non-optional argument.
+ void argFunc(String s) => JS('void', '#.argFunc(#)', this._jsObject, s);
+
+ /// Function taking an optional argument.
+ void optionalArgFunc([String s]) => JS('void', '#.optionalArgFunc(#)', this._jsObject, s);
+
+ /// Function taking a non-optional dictionary argument.
+ void dictArgFunc(FunctionsDictType d) => JS('void', '#.dictArgFunc(#)', this._jsObject, convertArgument(d));
+
+ /// Function taking an optional dictionary argument.
+ void optionalDictArgFunc([FunctionsDictType d]) => JS('void', '#.optionalDictArgFunc(#)', this._jsObject, convertArgument(d));
+
+ /// Function taking an entry argument.
+ void entryArgFunc(Object entry) => JS('void', '#.entryArgFunc(#)', this._jsObject, convertArgument(entry));
+
+ /// Function taking a simple callback.
+ void callbackFunc(void c()) => JS('void', '#.callbackFunc(#)', this._jsObject, convertDartClosureToJS(c, 0));
+
+ /// Function taking an optional simple callback.
+ void optionalCallbackFunc([void c()]) => JS('void', '#.optionalCallbackFunc(#)', this._jsObject, convertDartClosureToJS(c, 0));
+
+ /// Function taking a primitive callback.
+ void primitiveCallbackFunc(void c(int i)) => JS('void', '#.primitiveCallbackFunc(#)', this._jsObject, convertDartClosureToJS(c, 1));
+
+ /// Function taking a dictionary callback.
+ void dictCallbackFunc(void c(DictType dict)) {
+ void __proxy_callback(dict) {
+ if (c != null) {
+ c(new DictType._proxy(dict));
+ }
+ }
+ JS('void', '#.dictCallbackFunc(#)', this._jsObject, convertDartClosureToJS(__proxy_callback, 1));
+ }
+
+ /// Function returning a dictionary.
+ FunctionsDictType dictRetFunc() => new FunctionsDictType._proxy(JS('', '#.dictRetFunc()', this._jsObject));
+
+ API_functions(this._jsObject) {
+ }
+}
diff --git a/tools/json_schema_compiler/dart_test/functions.idl b/tools/json_schema_compiler/dart_test/functions.idl
new file mode 100644
index 0000000..e303d0d
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/functions.idl
@@ -0,0 +1,55 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// A comment for the functions namespace.
+namespace functions {
+ callback SimpleCallback = void ();
+
+ callback PrimitiveCallback = void (int i);
+
+ callback DictCallback = void ([instanceOf=DictType] object dict);
+
+ dictionary DictType {
+ // A field.
+ int a;
+
+ // A parameter.
+ static void voidFunc();
+ };
+
+ interface Functions {
+ // Simple function.
+ static void voidFunc();
+
+ // Function taking a non-optional argument.
+ static void argFunc(DOMString s);
+
+ // Function taking an optional argument.
+ static void optionalArgFunc(optional DOMString s);
+
+ // Function taking a non-optional dictionary argument.
+ static void dictArgFunc(DictType d);
+
+ // Function taking an optional dictionary argument.
+ static void optionalDictArgFunc(optional DictType d);
+
+ // Function taking an entry argument.
+ static void entryArgFunc([intanceOf=FileEntry] object entry);
+
+ // Function taking a simple callback.
+ static void callbackFunc(SimpleCallback c);
+
+ // Function taking an optional simple callback.
+ static void optionalCallbackFunc(optional SimpleCallback c);
+
+ // Function taking a primitive callback.
+ static void primitiveCallbackFunc(PrimitiveCallback c);
+
+ // Function taking a dictionary callback.
+ static void dictCallbackFunc(DictCallback c);
+
+ // Function returning a dictionary.
+ static DictType dictRetFunc();
+ };
+};
diff --git a/tools/json_schema_compiler/dart_test/operatable_type.dart b/tools/json_schema_compiler/dart_test/operatable_type.dart
new file mode 100644
index 0000000..2cc9dc8
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/operatable_type.dart
@@ -0,0 +1,94 @@
+// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Generated from namespace: operatable_type
+
+part of chrome;
+
+/**
+ * Types
+ */
+
+class Operatable_typeDictType extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ Operatable_typeDictType({int x, int y}) {
+ if (x != null)
+ this.x = x;
+ if (y != null)
+ this.y = y;
+ }
+
+ /*
+ * Private constructor
+ */
+ Operatable_typeDictType._proxy(_jsObject) : super._proxy(_jsObject);
+
+ /*
+ * Public accessors
+ */
+ int get x => JS('int', '#.x', this._jsObject);
+
+ void set x(int x) {
+ JS('void', '#.x = #', this._jsObject, x);
+ }
+
+ int get y => JS('int', '#.y', this._jsObject);
+
+ void set y(int y) {
+ JS('void', '#.y = #', this._jsObject, y);
+ }
+
+}
+
+class Operatable_typeOperatableType extends ChromeObject {
+ /*
+ * Private constructor
+ */
+ Operatable_typeOperatableType._proxy(_jsObject) : super._proxy(_jsObject);
+
+ /*
+ * Public accessors
+ */
+ /// Documentation for the String t.
+ String get t => JS('String', '#.t', this._jsObject);
+
+ void set t(String t) {
+ JS('void', '#.t = #', this._jsObject, t);
+ }
+
+
+ /*
+ * Methods
+ */
+ /// Function returning nothing, taking nothing.
+ void voidFunc() => JS('void', '#.voidFunc()', this._jsObject);
+
+ /// Function returning primitive type.
+ int intRetFunc() => new int._proxy(JS('', '#.intRetFunc()', this._jsObject));
+
+ /// Function returning dictionary.
+ Operatable_typeDictType dictRetFunc() => new Operatable_typeDictType._proxy(JS('', '#.dictRetFunc()', this._jsObject));
+
+ /// Function taking primitive type.
+ void intArgFunc(int i) => JS('void', '#.intArgFunc(#)', this._jsObject, i);
+
+ /// Function taking dict type.
+ void dictArgFunc(Operatable_typeDictType d) => JS('void', '#.dictArgFunc(#)', this._jsObject, convertArgument(d));
+
+}
+
+/**
+ * Functions
+ */
+
+class API_operatable_type {
+ /*
+ * API connection
+ */
+ Object _jsObject;
+ API_operatable_type(this._jsObject) {
+ }
+}
diff --git a/tools/json_schema_compiler/dart_test/operatable_type.idl b/tools/json_schema_compiler/dart_test/operatable_type.idl
new file mode 100644
index 0000000..9c5f53c
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/operatable_type.idl
@@ -0,0 +1,32 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Top-level namespace-comment for operatableType
+namespace operatable_type {
+ dictionary DictType {
+ int x;
+ int y;
+ };
+
+ // Documentation for OperatableType.
+ dictionary OperatableType {
+ // Documentation for the String t.
+ DOMString t;
+
+ // Function returning nothing, taking nothing.
+ static void voidFunc();
+
+ // Function returning primitive type.
+ static int intRetFunc();
+
+ // Function returning dictionary.
+ static DictType dictRetFunc();
+
+ // Function taking primitive type.
+ static void intArgFunc(int i);
+
+ // Function taking dict type.
+ static void dictArgFunc(DictType d);
+ };
+};
diff --git a/tools/json_schema_compiler/dart_test/tags.dart b/tools/json_schema_compiler/dart_test/tags.dart
new file mode 100644
index 0000000..e4adfd8
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/tags.dart
@@ -0,0 +1,116 @@
+// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Generated from namespace: tags
+
+part of chrome;
+
+/**
+ * Types
+ */
+
+class TagsInlineDoc extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ TagsInlineDoc({}) {
+ }
+
+ /*
+ * Private constructor
+ */
+ TagsInlineDoc._proxy(_jsObject) : super._proxy(_jsObject);
+}
+
+class TagsNodoc extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ TagsNodoc({}) {
+ }
+
+ /*
+ * Private constructor
+ */
+ TagsNodoc._proxy(_jsObject) : super._proxy(_jsObject);
+}
+
+class TagsNocompile extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ TagsNocompile({}) {
+ }
+
+ /*
+ * Private constructor
+ */
+ TagsNocompile._proxy(_jsObject) : super._proxy(_jsObject);
+}
+
+class TagsPlainDict extends ChromeObject {
+ /*
+ * Public constructor
+ */
+ TagsPlainDict({int inline_doc, String nodoc, double nocompile, fileEntry instance_of_tag}) {
+ if (inline_doc != null)
+ this.inline_doc = inline_doc;
+ if (nodoc != null)
+ this.nodoc = nodoc;
+ if (nocompile != null)
+ this.nocompile = nocompile;
+ if (instance_of_tag != null)
+ this.instance_of_tag = instance_of_tag;
+ }
+
+ /*
+ * Private constructor
+ */
+ TagsPlainDict._proxy(_jsObject) : super._proxy(_jsObject);
+
+ /*
+ * Public accessors
+ */
+ /// This int has the property [inline_doc].
+ int get inline_doc => JS('int', '#.inline_doc', this._jsObject);
+
+ void set inline_doc(int inline_doc) {
+ JS('void', '#.inline_doc = #', this._jsObject, inline_doc);
+ }
+
+ /// This String has the property [nodoc].
+ String get nodoc => JS('String', '#.nodoc', this._jsObject);
+
+ void set nodoc(String nodoc) {
+ JS('void', '#.nodoc = #', this._jsObject, nodoc);
+ }
+
+ /// This double has the property [nocompile].
+ double get nocompile => JS('double', '#.nocompile', this._jsObject);
+
+ void set nocompile(double nocompile) {
+ JS('void', '#.nocompile = #', this._jsObject, nocompile);
+ }
+
+ /// This object has the property [instanceOf=fileEntry].
+ fileEntry get instance_of_tag => JS('fileEntry', '#.instance_of_tag', this._jsObject);
+
+ void set instance_of_tag(fileEntry instance_of_tag) {
+ JS('void', '#.instance_of_tag = #', this._jsObject, convertArgument(instance_of_tag));
+ }
+
+}
+
+/**
+ * Functions
+ */
+
+class API_tags {
+ /*
+ * API connection
+ */
+ Object _jsObject;
+ API_tags(this._jsObject) {
+ }
+}
diff --git a/tools/json_schema_compiler/dart_test/tags.idl b/tools/json_schema_compiler/dart_test/tags.idl
new file mode 100644
index 0000000..7a029cd
--- /dev/null
+++ b/tools/json_schema_compiler/dart_test/tags.idl
@@ -0,0 +1,33 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// A comment describing tags.
+namespace tags {
+ // This dictionary has the property [inline_doc].
+ [inline_doc] dictionary InlineDoc {
+ };
+
+ // This dictionary has the property [nodoc].
+ [nodoc] dictionary Nodoc {
+ };
+
+ // This dictionary has the property [nocompile].
+ [nocompile] dictionary Nocompile {
+ };
+
+ // This dictionary has no tags on the dictionary itself.
+ dictionary PlainDict {
+ // This int has the property [inline_doc].
+ [inline_doc] int inline_doc;
+
+ // This String has the property [nodoc].
+ [nodoc] String nodoc;
+
+ // This double has the property [nocompile].
+ [nocompile] double nocompile;
+
+ // This object has the property [instanceOf=fileEntry].
+ [instanceOf=fileEntry] object instance_of_tag;
+ };
+};
diff --git a/tools/json_schema_compiler/features_cc_generator.py b/tools/json_schema_compiler/features_cc_generator.py
new file mode 100644
index 0000000..d3b3717
--- /dev/null
+++ b/tools/json_schema_compiler/features_cc_generator.py
@@ -0,0 +1,95 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+
+from code import Code
+import cpp_util
+
+
+class CCGenerator(object):
+ def Generate(self, feature_defs, source_file, namespace):
+ return _Generator(feature_defs, source_file, namespace).Generate()
+
+
+class _Generator(object):
+ """A .cc generator for features.
+ """
+ def __init__(self, feature_defs, source_file, namespace):
+ self._feature_defs = feature_defs
+ self._source_file = source_file
+ self._source_file_filename, _ = os.path.splitext(source_file)
+ self._class_name = cpp_util.ClassName(self._source_file_filename)
+ self._namespace = namespace
+
+ def Generate(self):
+ """Generates a Code object for features.
+ """
+ c = Code()
+ (c.Append(cpp_util.CHROMIUM_LICENSE)
+ .Append()
+ .Append(cpp_util.GENERATED_FEATURE_MESSAGE % self._source_file)
+ .Append()
+ .Append('#include <string>')
+ .Append()
+ .Append('#include "%s.h"' % self._source_file_filename)
+ .Append()
+ .Append('#include "base/logging.h"')
+ .Append()
+ .Concat(cpp_util.OpenNamespace(self._namespace))
+ .Append()
+ )
+
+ # Generate the constructor.
+ (c.Append('%s::%s() {' % (self._class_name, self._class_name))
+ .Sblock()
+ )
+ for feature in self._feature_defs:
+ c.Append('features_["%s"] = %s;'
+ % (feature.name, cpp_util.ConstantName(feature.name)))
+ (c.Eblock()
+ .Append('}')
+ .Append()
+ )
+
+ # Generate the ToString function.
+ (c.Append('const char* %s::ToString('
+ '%s::ID id) const {' % (self._class_name, self._class_name))
+ .Sblock()
+ .Append('switch (id) {')
+ .Sblock()
+ )
+ for feature in self._feature_defs:
+ c.Append('case %s: return "%s";' %
+ (cpp_util.ConstantName(feature.name), feature.name))
+ (c.Append('case kUnknown: break;')
+ .Append('case kEnumBoundary: break;')
+ .Eblock()
+ .Append('}')
+ .Append('NOTREACHED();')
+ .Append('return "";')
+ )
+ (c.Eblock()
+ .Append('}')
+ .Append()
+ )
+
+ # Generate the FromString function.
+
+ (c.Append('%s::ID %s::FromString('
+ 'const std::string& id) const {'
+ % (self._class_name, self._class_name))
+ .Sblock()
+ .Append('std::map<std::string, %s::ID>::const_iterator it'
+ ' = features_.find(id);' % self._class_name)
+ .Append('if (it == features_.end())')
+ .Append(' return kUnknown;')
+ .Append('return it->second;')
+ .Eblock()
+ .Append('}')
+ .Append()
+ .Cblock(cpp_util.CloseNamespace(self._namespace))
+ )
+
+ return c
diff --git a/tools/json_schema_compiler/features_compiler.py b/tools/json_schema_compiler/features_compiler.py
new file mode 100755
index 0000000..1e4e81a
--- /dev/null
+++ b/tools/json_schema_compiler/features_compiler.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generator for C++ features from json files.
+
+Usage example:
+ features_compiler.py --destdir gen --root /home/Work/src _permissions.json
+"""
+
+import optparse
+import os
+
+from schema_loader import SchemaLoader
+from features_cc_generator import CCGenerator
+from features_h_generator import HGenerator
+from model import CreateFeature
+
+
+def _GenerateSchema(filename, root, destdir, namespace):
+ """Generates C++ features files from the json file |filename|.
+ """
+ # Load in the feature permissions from the JSON file.
+ schema = os.path.normpath(filename)
+ schema_loader = SchemaLoader(os.path.dirname(os.path.relpath(schema, root)),
+ os.path.dirname(schema))
+ schema_filename = os.path.splitext(schema)[0]
+ feature_defs = schema_loader.LoadSchema(schema)
+
+ # Generate a list of the features defined and a list of their models.
+ feature_list = []
+ for feature_def, feature in feature_defs.iteritems():
+ feature_list.append(CreateFeature(feature_def, feature))
+
+ source_file_dir, _ = os.path.split(schema)
+ relpath = os.path.relpath(os.path.normpath(source_file_dir), root)
+ full_path = os.path.join(relpath, schema)
+
+ generators = [
+ ('%s.cc' % schema_filename, CCGenerator()),
+ ('%s.h' % schema_filename, HGenerator())
+ ]
+
+ # Generate and output the code for all features.
+ output_code = []
+ for filename, generator in generators:
+ code = generator.Generate(feature_list, full_path, namespace).Render()
+ if destdir:
+ with open(os.path.join(destdir, relpath, filename), 'w') as f:
+ f.write(code)
+ output_code += [filename, '', code, '']
+
+ return '\n'.join(output_code)
+
+
+if __name__ == '__main__':
+ parser = optparse.OptionParser(
+ description='Generates a C++ features model from JSON schema',
+ usage='usage: %prog [option]... schema')
+ parser.add_option('-r', '--root', default='.',
+ help='logical include root directory. Path to schema files from '
+ 'specified dir will be the include path.')
+ parser.add_option('-d', '--destdir',
+ help='root directory to output generated files.')
+ parser.add_option('-n', '--namespace', default='generated_features',
+ help='C++ namespace for generated files. e.g extensions::api.')
+ (opts, filenames) = parser.parse_args()
+
+ # Only one file is currently specified.
+ if len(filenames) != 1:
+ raise ValueError('One (and only one) file is required (for now).')
+
+ result = _GenerateSchema(filenames[0], opts.root, opts.destdir,
+ opts.namespace)
+ if not opts.destdir:
+ print result
diff --git a/tools/json_schema_compiler/features_h_generator.py b/tools/json_schema_compiler/features_h_generator.py
new file mode 100644
index 0000000..4198bb4
--- /dev/null
+++ b/tools/json_schema_compiler/features_h_generator.py
@@ -0,0 +1,99 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+
+from code import Code
+import cpp_util
+
+
+class HGenerator(object):
+ def Generate(self, features, source_file, namespace):
+ return _Generator(features, source_file, namespace).Generate()
+
+
+class _Generator(object):
+ """A .cc generator for features.
+ """
+ def __init__(self, features, source_file, namespace):
+ self._feature_defs = features
+ self._source_file = source_file
+ self._source_file_filename, _ = os.path.splitext(source_file)
+ self._class_name = cpp_util.ClassName(self._source_file_filename)
+ self._namespace = namespace
+
+ def Generate(self):
+ """Generates a Code object for features.
+ """
+ c = Code()
+ (c.Append(cpp_util.CHROMIUM_LICENSE)
+ .Append()
+ .Append(cpp_util.GENERATED_FEATURE_MESSAGE % self._source_file)
+ .Append()
+ )
+
+ # Hack: for the purpose of gyp the header file will always be the source
+ # file with its file extension replaced by '.h'. Assume so.
+ output_file = os.path.splitext(self._namespace.source_file)[0] + '.h'
+ ifndef_name = cpp_util.GenerateIfndefName(output_file)
+
+ (c.Append('#ifndef %s' % ifndef_name)
+ .Append('#define %s' % ifndef_name)
+ .Append()
+ )
+
+ (c.Append('#include <map>')
+ .Append('#include <string>')
+ .Append()
+ .Concat(cpp_util.OpenNamespace(self._namespace))
+ .Append()
+ )
+
+ (c.Append('class %s {' % self._class_name)
+ .Append(' public:')
+ .Sblock()
+ .Concat(self._GeneratePublicBody())
+ .Eblock()
+ .Append(' private:')
+ .Sblock()
+ .Concat(self._GeneratePrivateBody())
+ .Eblock('};')
+ .Append()
+ .Cblock(cpp_util.CloseNamespace(self._namespace))
+ )
+ (c.Append('#endif // %s' % ifndef_name)
+ .Append()
+ )
+ return c
+
+ def _GeneratePublicBody(self):
+ c = Code()
+
+ (c.Append('%s();' % self._class_name)
+ .Append()
+ .Append('enum ID {')
+ .Concat(self._GenerateEnumConstants())
+ .Eblock('};')
+ .Append()
+ .Append('const char* ToString(ID id) const;')
+ .Append('ID FromString(const std::string& id) const;')
+ .Append()
+ )
+ return c
+
+ def _GeneratePrivateBody(self):
+ return Code().Append('std::map<std::string, '
+ '%s::ID> features_;' % self._class_name)
+
+ def _GenerateEnumConstants(self):
+ c = Code()
+
+ (c.Sblock()
+ .Append('kUnknown,')
+ )
+ for feature in self._feature_defs:
+ c.Append('%s,' % cpp_util.ConstantName(feature.name))
+ c.Append('kEnumBoundary')
+
+ return c
diff --git a/tools/json_schema_compiler/h_generator.py b/tools/json_schema_compiler/h_generator.py
new file mode 100644
index 0000000..a6d67db
--- /dev/null
+++ b/tools/json_schema_compiler/h_generator.py
@@ -0,0 +1,397 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+from code import Code
+from model import PropertyType
+import cpp_util
+import schema_util
+
+class HGenerator(object):
+ def __init__(self, type_generator):
+ self._type_generator = type_generator
+
+ def Generate(self, namespace):
+ return _Generator(namespace, self._type_generator).Generate()
+
+
+class _Generator(object):
+ """A .h generator for a namespace.
+ """
+ def __init__(self, namespace, cpp_type_generator):
+ self._namespace = namespace
+ self._type_helper = cpp_type_generator
+ self._generate_error_messages = namespace.compiler_options.get(
+ 'generate_error_messages', False)
+
+ def Generate(self):
+ """Generates a Code object with the .h for a single namespace.
+ """
+ c = Code()
+ (c.Append(cpp_util.CHROMIUM_LICENSE)
+ .Append()
+ .Append(cpp_util.GENERATED_FILE_MESSAGE % self._namespace.source_file)
+ .Append()
+ )
+
+ # Hack: for the purpose of gyp the header file will always be the source
+ # file with its file extension replaced by '.h'. Assume so.
+ output_file = os.path.splitext(self._namespace.source_file)[0] + '.h'
+ ifndef_name = cpp_util.GenerateIfndefName(output_file)
+
+ (c.Append('#ifndef %s' % ifndef_name)
+ .Append('#define %s' % ifndef_name)
+ .Append()
+ .Append('#include <map>')
+ .Append('#include <string>')
+ .Append('#include <vector>')
+ .Append()
+ .Append('#include "base/basictypes.h"')
+ .Append('#include "base/logging.h"')
+ .Append('#include "base/memory/linked_ptr.h"')
+ .Append('#include "base/memory/scoped_ptr.h"')
+ .Append('#include "base/values.h"')
+ .Cblock(self._type_helper.GenerateIncludes())
+ .Append()
+ )
+
+ # TODO(calamity): These forward declarations should be #includes to allow
+ # $ref types from other files to be used as required params. This requires
+ # some detangling of windows and tabs which will currently lead to circular
+ # #includes.
+ c.Cblock(self._type_helper.GenerateForwardDeclarations())
+
+ cpp_namespace = cpp_util.GetCppNamespace(
+ self._namespace.environment.namespace_pattern,
+ self._namespace.unix_name)
+ c.Concat(cpp_util.OpenNamespace(cpp_namespace))
+ c.Append()
+ if self._namespace.properties:
+ (c.Append('//')
+ .Append('// Properties')
+ .Append('//')
+ .Append()
+ )
+ for property in self._namespace.properties.values():
+ property_code = self._type_helper.GeneratePropertyValues(
+ property,
+ 'extern const %(type)s %(name)s;')
+ if property_code:
+ c.Cblock(property_code)
+ if self._namespace.types:
+ (c.Append('//')
+ .Append('// Types')
+ .Append('//')
+ .Append()
+ .Cblock(self._GenerateTypes(self._FieldDependencyOrder(),
+ is_toplevel=True,
+ generate_typedefs=True))
+ )
+ if self._namespace.functions:
+ (c.Append('//')
+ .Append('// Functions')
+ .Append('//')
+ .Append()
+ )
+ for function in self._namespace.functions.values():
+ c.Cblock(self._GenerateFunction(function))
+ if self._namespace.events:
+ (c.Append('//')
+ .Append('// Events')
+ .Append('//')
+ .Append()
+ )
+ for event in self._namespace.events.values():
+ c.Cblock(self._GenerateEvent(event))
+ (c.Concat(cpp_util.CloseNamespace(cpp_namespace))
+ .Append('#endif // %s' % ifndef_name)
+ .Append()
+ )
+ return c
+
+ def _FieldDependencyOrder(self):
+ """Generates the list of types in the current namespace in an order in which
+ depended-upon types appear before types which depend on them.
+ """
+ dependency_order = []
+
+ def ExpandType(path, type_):
+ if type_ in path:
+ raise ValueError("Illegal circular dependency via cycle " +
+ ", ".join(map(lambda x: x.name, path + [type_])))
+ for prop in type_.properties.values():
+ if (prop.type_ == PropertyType.REF and
+ schema_util.GetNamespace(prop.ref_type) == self._namespace.name):
+ ExpandType(path + [type_], self._namespace.types[prop.ref_type])
+ if not type_ in dependency_order:
+ dependency_order.append(type_)
+
+ for type_ in self._namespace.types.values():
+ ExpandType([], type_)
+ return dependency_order
+
+ def _GenerateEnumDeclaration(self, enum_name, type_):
+ """Generate a code object with the declaration of a C++ enum.
+ """
+ c = Code()
+ c.Sblock('enum %s {' % enum_name)
+ c.Append(self._type_helper.GetEnumNoneValue(type_) + ',')
+ for value in type_.enum_values:
+ current_enum_string = self._type_helper.GetEnumValue(type_, value)
+ c.Append(current_enum_string + ',')
+ c.Append('%s = %s,' % (
+ self._type_helper.GetEnumLastValue(type_), current_enum_string))
+ c.Eblock('};')
+ return c
+
+ def _GenerateFields(self, props):
+ """Generates the field declarations when declaring a type.
+ """
+ c = Code()
+ needs_blank_line = False
+ for prop in props:
+ if needs_blank_line:
+ c.Append()
+ needs_blank_line = True
+ if prop.description:
+ c.Comment(prop.description)
+ # ANY is a base::Value which is abstract and cannot be a direct member, so
+ # we always need to wrap it in a scoped_ptr.
+ is_ptr = prop.optional or prop.type_.property_type == PropertyType.ANY
+ (c.Append('%s %s;' % (
+ self._type_helper.GetCppType(prop.type_, is_ptr=is_ptr),
+ prop.unix_name))
+ )
+ return c
+
+ def _GenerateType(self, type_, is_toplevel=False, generate_typedefs=False):
+ """Generates a struct for |type_|.
+
+ |is_toplevel| implies that the type was declared in the "types" field
+ of an API schema. This determines the correct function
+ modifier(s).
+ |generate_typedefs| controls whether primitive types should be generated as
+ a typedef. This may not always be desired. If false,
+ primitive types are ignored.
+ """
+ classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
+ c = Code()
+
+ if type_.functions:
+ # Wrap functions within types in the type's namespace.
+ (c.Append('namespace %s {' % classname)
+ .Append()
+ )
+ for function in type_.functions.values():
+ c.Cblock(self._GenerateFunction(function))
+ c.Append('} // namespace %s' % classname)
+ elif type_.property_type == PropertyType.ARRAY:
+ if generate_typedefs and type_.description:
+ c.Comment(type_.description)
+ c.Cblock(self._GenerateType(type_.item_type))
+ if generate_typedefs:
+ (c.Append('typedef std::vector<%s > %s;' % (
+ self._type_helper.GetCppType(type_.item_type),
+ classname))
+ )
+ elif type_.property_type == PropertyType.STRING:
+ if generate_typedefs:
+ if type_.description:
+ c.Comment(type_.description)
+ c.Append('typedef std::string %(classname)s;')
+ elif type_.property_type == PropertyType.ENUM:
+ if type_.description:
+ c.Comment(type_.description)
+ c.Cblock(self._GenerateEnumDeclaration(classname, type_));
+ # Top level enums are in a namespace scope so the methods shouldn't be
+ # static. On the other hand, those declared inline (e.g. in an object) do.
+ maybe_static = '' if is_toplevel else 'static '
+ (c.Append()
+ .Append('%sstd::string ToString(%s as_enum);' %
+ (maybe_static, classname))
+ .Append('%s%s Parse%s(const std::string& as_string);' %
+ (maybe_static, classname, classname))
+ )
+ elif type_.property_type in (PropertyType.CHOICES,
+ PropertyType.OBJECT):
+ if type_.description:
+ c.Comment(type_.description)
+ (c.Sblock('struct %(classname)s {')
+ .Append('%(classname)s();')
+ .Append('~%(classname)s();')
+ )
+ if type_.origin.from_json:
+ (c.Append()
+ .Comment('Populates a %s object from a base::Value. Returns'
+ ' whether |out| was successfully populated.' % classname)
+ .Append('static bool Populate(%s);' % self._GenerateParams(
+ ('const base::Value& value', '%s* out' % classname)))
+ )
+ if is_toplevel:
+ (c.Append()
+ .Comment('Creates a %s object from a base::Value, or NULL on '
+ 'failure.' % classname)
+ .Append('static scoped_ptr<%s> FromValue(%s);' % (
+ classname, self._GenerateParams(('const base::Value& value',))))
+ )
+ if type_.origin.from_client:
+ value_type = ('base::Value'
+ if type_.property_type is PropertyType.CHOICES else
+ 'base::DictionaryValue')
+ (c.Append()
+ .Comment('Returns a new %s representing the serialized form of this '
+ '%s object.' % (value_type, classname))
+ .Append('scoped_ptr<%s> ToValue() const;' % value_type)
+ )
+ if type_.property_type == PropertyType.CHOICES:
+ # Choices are modelled with optional fields for each choice. Exactly one
+ # field of the choice is guaranteed to be set by the compiler.
+ c.Cblock(self._GenerateTypes(type_.choices))
+ c.Append('// Choices:')
+ for choice_type in type_.choices:
+ c.Append('%s as_%s;' % (
+ self._type_helper.GetCppType(choice_type, is_ptr=True),
+ choice_type.unix_name))
+ else:
+ properties = type_.properties.values()
+ (c.Append()
+ .Cblock(self._GenerateTypes(p.type_ for p in properties))
+ .Cblock(self._GenerateFields(properties)))
+ if type_.additional_properties is not None:
+ # Most additionalProperties actually have type "any", which is better
+ # modelled as a DictionaryValue rather than a map of string -> Value.
+ if type_.additional_properties.property_type == PropertyType.ANY:
+ c.Append('base::DictionaryValue additional_properties;')
+ else:
+ (c.Cblock(self._GenerateType(type_.additional_properties))
+ .Append('std::map<std::string, %s> additional_properties;' %
+ cpp_util.PadForGenerics(
+ self._type_helper.GetCppType(type_.additional_properties,
+ is_in_container=True)))
+ )
+ (c.Eblock()
+ .Append()
+ .Sblock(' private:')
+ .Append('DISALLOW_COPY_AND_ASSIGN(%(classname)s);')
+ .Eblock('};')
+ )
+ return c.Substitute({'classname': classname})
+
+ def _GenerateEvent(self, event):
+ """Generates the namespaces for an event.
+ """
+ c = Code()
+ # TODO(kalman): use event.unix_name not Classname.
+ event_namespace = cpp_util.Classname(event.name)
+ (c.Append('namespace %s {' % event_namespace)
+ .Append()
+ .Concat(self._GenerateEventNameConstant(event))
+ .Concat(self._GenerateCreateCallbackArguments(event))
+ .Eblock('} // namespace %s' % event_namespace)
+ )
+ return c
+
+ def _GenerateFunction(self, function):
+ """Generates the namespaces and structs for a function.
+ """
+ c = Code()
+ # TODO(kalman): Use function.unix_name not Classname here.
+ function_namespace = cpp_util.Classname(function.name)
+ # Windows has a #define for SendMessage, so to avoid any issues, we need
+ # to not use the name.
+ if function_namespace == 'SendMessage':
+ function_namespace = 'PassMessage'
+ (c.Append('namespace %s {' % function_namespace)
+ .Append()
+ .Cblock(self._GenerateFunctionParams(function))
+ )
+ if function.callback:
+ c.Cblock(self._GenerateFunctionResults(function.callback))
+ c.Append('} // namespace %s' % function_namespace)
+ return c
+
+ def _GenerateFunctionParams(self, function):
+ """Generates the struct for passing parameters from JSON to a function.
+ """
+ if not function.params:
+ return Code()
+
+ c = Code()
+ (c.Sblock('struct Params {')
+ .Append('static scoped_ptr<Params> Create(%s);' % self._GenerateParams(
+ ('const base::ListValue& args',)))
+ .Append('~Params();')
+ .Append()
+ .Cblock(self._GenerateTypes(p.type_ for p in function.params))
+ .Cblock(self._GenerateFields(function.params))
+ .Eblock()
+ .Append()
+ .Sblock(' private:')
+ .Append('Params();')
+ .Append()
+ .Append('DISALLOW_COPY_AND_ASSIGN(Params);')
+ .Eblock('};')
+ )
+ return c
+
+ def _GenerateTypes(self, types, is_toplevel=False, generate_typedefs=False):
+ """Generate the structures required by a property such as OBJECT classes
+ and enums.
+ """
+ c = Code()
+ for type_ in types:
+ c.Cblock(self._GenerateType(type_,
+ is_toplevel=is_toplevel,
+ generate_typedefs=generate_typedefs))
+ return c
+
+ def _GenerateCreateCallbackArguments(self, function):
+ """Generates functions for passing parameters to a callback.
+ """
+ c = Code()
+ params = function.params
+ c.Cblock(self._GenerateTypes((p.type_ for p in params), is_toplevel=True))
+
+ declaration_list = []
+ for param in params:
+ if param.description:
+ c.Comment(param.description)
+ declaration_list.append(cpp_util.GetParameterDeclaration(
+ param, self._type_helper.GetCppType(param.type_)))
+ c.Append('scoped_ptr<base::ListValue> Create(%s);' %
+ ', '.join(declaration_list))
+ return c
+
+ def _GenerateEventNameConstant(self, event):
+ """Generates a constant string array for the event name.
+ """
+ c = Code()
+ c.Append('extern const char kEventName[]; // "%s.%s"' % (
+ self._namespace.name, event.name))
+ c.Append()
+ return c
+
+ def _GenerateFunctionResults(self, callback):
+ """Generates namespace for passing a function's result back.
+ """
+ c = Code()
+ (c.Append('namespace Results {')
+ .Append()
+ .Concat(self._GenerateCreateCallbackArguments(callback))
+ .Append('} // namespace Results')
+ )
+ return c
+
+ def _GenerateParams(self, params):
+ """Builds the parameter list for a function, given an array of parameters.
+ """
+ # |error| is populated with warnings and/or errors found during parsing.
+ # |error| being set does not necessarily imply failure and may be
+ # recoverable.
+ # For example, optional properties may have failed to parse, but the
+ # parser was able to continue.
+ if self._generate_error_messages:
+ params += ('base::string16* error',)
+ return ', '.join(str(p) for p in params)
diff --git a/tools/json_schema_compiler/highlighters/__init__.py b/tools/json_schema_compiler/highlighters/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/json_schema_compiler/highlighters/__init__.py
diff --git a/tools/json_schema_compiler/highlighters/hilite_me_highlighter.py b/tools/json_schema_compiler/highlighters/hilite_me_highlighter.py
new file mode 100644
index 0000000..af04847
--- /dev/null
+++ b/tools/json_schema_compiler/highlighters/hilite_me_highlighter.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import urllib
+import urllib2
+
+class HiliteMeHighlighter(object):
+ """Highlighter that calls the http://hilite.me API to highlight code.
+ """
+ def GetCSS(self, style):
+ return ''
+
+ def GetCodeElement(self, code, style):
+ # Call hilite.me API to do syntax highlighting
+ return urllib2.urlopen('http://hilite.me/api',
+ urllib.urlencode([
+ ('code', code),
+ ('lexer', 'cpp'),
+ ('style', style),
+ ('linenos', 1)])
+ ).read()
+
+ def DisplayName(self):
+ return 'hilite.me (slow, requires internet)'
+
+ def GetStyles(self):
+ return ['monokai', 'manni', 'perldoc', 'borland', 'colorful', 'default',
+ 'murphy', 'vs', 'trac', 'tango', 'fruity', 'autumn', 'bw', 'emacs',
+ 'vim', 'pastie', 'friendly', 'native']
diff --git a/tools/json_schema_compiler/highlighters/none_highlighter.py b/tools/json_schema_compiler/highlighters/none_highlighter.py
new file mode 100644
index 0000000..ac1cc2b
--- /dev/null
+++ b/tools/json_schema_compiler/highlighters/none_highlighter.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import cgi
+
+class NoneHighlighter(object):
+ """Highlighter that just wraps code in a <pre>.
+ """
+ def GetCSS(self, style):
+ return ''
+
+ def GetCodeElement(self, code, style):
+ return '<pre>' + cgi.escape(code) + '</pre>'
+
+ def DisplayName(self):
+ return 'none'
+
+ def GetStyles(self):
+ return []
diff --git a/tools/json_schema_compiler/highlighters/pygments_highlighter.py b/tools/json_schema_compiler/highlighters/pygments_highlighter.py
new file mode 100644
index 0000000..06abd33
--- /dev/null
+++ b/tools/json_schema_compiler/highlighters/pygments_highlighter.py
@@ -0,0 +1,37 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+try:
+ import pygments
+ from pygments.lexers import CppLexer
+ from pygments.formatters import HtmlFormatter
+ PYGMENTS_IMPORTED = True
+except ImportError:
+ print('It appears that Pygments is not installed. '
+ 'Can be installed using easy_install Pygments or from http://pygments.org.')
+ PYGMENTS_IMPORTED = False
+
+class PygmentsHighlighter(object):
+ def __init__(self):
+ if not PYGMENTS_IMPORTED:
+ raise ImportError('Pygments not installed')
+
+ """Highlighter that uses the python pygments library to highlight code.
+ """
+ def GetCSS(self, style):
+ formatter = HtmlFormatter(linenos=True,
+ style=pygments.styles.get_style_by_name(style))
+ return formatter.get_style_defs('.highlight')
+
+ def GetCodeElement(self, code, style):
+ formatter = HtmlFormatter(linenos=True,
+ style=pygments.styles.get_style_by_name(style))
+ return pygments.highlight(code, CppLexer(), formatter)
+
+ def DisplayName(self):
+ return 'pygments' + ('' if PYGMENTS_IMPORTED else ' (not installed)')
+
+ def GetStyles(self):
+ return list(pygments.styles.get_all_styles())
diff --git a/tools/json_schema_compiler/idl_schema.py b/tools/json_schema_compiler/idl_schema.py
new file mode 100755
index 0000000..a913756
--- /dev/null
+++ b/tools/json_schema_compiler/idl_schema.py
@@ -0,0 +1,488 @@
+#! /usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import itertools
+import json
+import os.path
+import re
+import sys
+
+from json_parse import OrderedDict
+
+# This file is a peer to json_schema.py. Each of these files understands a
+# certain format describing APIs (either JSON or IDL), reads files written
+# in that format into memory, and emits them as a Python array of objects
+# corresponding to those APIs, where the objects are formatted in a way that
+# the JSON schema compiler understands. compiler.py drives both idl_schema.py
+# and json_schema.py.
+
+# idl_parser expects to be able to import certain files in its directory,
+# so let's set things up the way it wants.
+_idl_generators_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ os.pardir, os.pardir, 'ppapi', 'generators')
+if _idl_generators_path in sys.path:
+ import idl_parser
+else:
+ sys.path.insert(0, _idl_generators_path)
+ try:
+ import idl_parser
+ finally:
+ sys.path.pop(0)
+
+def ProcessComment(comment):
+ '''
+ Convert a comment into a parent comment and a list of parameter comments.
+
+ Function comments are of the form:
+ Function documentation. May contain HTML and multiple lines.
+
+ |arg1_name|: Description of arg1. Use <var>argument</var> to refer
+ to other arguments.
+ |arg2_name|: Description of arg2...
+
+ Newlines are removed, and leading and trailing whitespace is stripped.
+
+ Args:
+ comment: The string from a Comment node.
+
+ Returns: A tuple that looks like:
+ (
+ "The processed comment, minus all |parameter| mentions.",
+ {
+ 'parameter_name_1': "The comment that followed |parameter_name_1|:",
+ ...
+ }
+ )
+ '''
+ def add_paragraphs(content):
+ paragraphs = content.split('\n\n')
+ if len(paragraphs) < 2:
+ return content
+ return '<p>' + '</p><p>'.join(p.strip() for p in paragraphs) + '</p>'
+
+ # Find all the parameter comments of the form '|name|: comment'.
+ parameter_starts = list(re.finditer(r' *\|([^|]*)\| *: *', comment))
+
+ # Get the parent comment (everything before the first parameter comment.
+ first_parameter_location = (parameter_starts[0].start()
+ if parameter_starts else len(comment))
+ parent_comment = (add_paragraphs(comment[:first_parameter_location].strip())
+ .replace('\n', ''))
+
+ params = OrderedDict()
+ for (cur_param, next_param) in itertools.izip_longest(parameter_starts,
+ parameter_starts[1:]):
+ param_name = cur_param.group(1)
+
+ # A parameter's comment goes from the end of its introduction to the
+ # beginning of the next parameter's introduction.
+ param_comment_start = cur_param.end()
+ param_comment_end = next_param.start() if next_param else len(comment)
+ params[param_name] = (
+ add_paragraphs(comment[param_comment_start:param_comment_end].strip())
+ .replace('\n', ''))
+
+ return (parent_comment, params)
+
+
+class Callspec(object):
+ '''
+ Given a Callspec node representing an IDL function declaration, converts into
+ a tuple:
+ (name, list of function parameters, return type)
+ '''
+ def __init__(self, callspec_node, comment):
+ self.node = callspec_node
+ self.comment = comment
+
+ def process(self, callbacks):
+ parameters = []
+ return_type = None
+ if self.node.GetProperty('TYPEREF') not in ('void', None):
+ return_type = Typeref(self.node.GetProperty('TYPEREF'),
+ self.node.parent,
+ {'name': self.node.GetName()}).process(callbacks)
+ # The IDL parser doesn't allow specifying return types as optional.
+ # Instead we infer any object return values to be optional.
+ # TODO(asargent): fix the IDL parser to support optional return types.
+ if return_type.get('type') == 'object' or '$ref' in return_type:
+ return_type['optional'] = True
+ for node in self.node.GetChildren():
+ parameter = Param(node).process(callbacks)
+ if parameter['name'] in self.comment:
+ parameter['description'] = self.comment[parameter['name']]
+ parameters.append(parameter)
+ return (self.node.GetName(), parameters, return_type)
+
+
+class Param(object):
+ '''
+ Given a Param node representing a function parameter, converts into a Python
+ dictionary that the JSON schema compiler expects to see.
+ '''
+ def __init__(self, param_node):
+ self.node = param_node
+
+ def process(self, callbacks):
+ return Typeref(self.node.GetProperty('TYPEREF'),
+ self.node,
+ {'name': self.node.GetName()}).process(callbacks)
+
+
+class Dictionary(object):
+ '''
+ Given an IDL Dictionary node, converts into a Python dictionary that the JSON
+ schema compiler expects to see.
+ '''
+ def __init__(self, dictionary_node):
+ self.node = dictionary_node
+
+ def process(self, callbacks):
+ properties = OrderedDict()
+ for node in self.node.GetChildren():
+ if node.cls == 'Member':
+ k, v = Member(node).process(callbacks)
+ properties[k] = v
+ result = {'id': self.node.GetName(),
+ 'properties': properties,
+ 'type': 'object'}
+ if self.node.GetProperty('nodoc'):
+ result['nodoc'] = True
+ elif self.node.GetProperty('inline_doc'):
+ result['inline_doc'] = True
+ elif self.node.GetProperty('noinline_doc'):
+ result['noinline_doc'] = True
+ return result
+
+
+
+class Member(object):
+ '''
+ Given an IDL dictionary or interface member, converts into a name/value pair
+ where the value is a Python dictionary that the JSON schema compiler expects
+ to see.
+ '''
+ def __init__(self, member_node):
+ self.node = member_node
+
+ def process(self, callbacks):
+ properties = OrderedDict()
+ name = self.node.GetName()
+ if self.node.GetProperty('deprecated'):
+ properties['deprecated'] = self.node.GetProperty('deprecated')
+ if self.node.GetProperty('allowAmbiguousOptionalArguments'):
+ properties['allowAmbiguousOptionalArguments'] = True
+ for property_name in ('OPTIONAL', 'nodoc', 'nocompile', 'nodart'):
+ if self.node.GetProperty(property_name):
+ properties[property_name.lower()] = True
+ for option_name, sanitizer in [
+ ('maxListeners', int),
+ ('supportsFilters', lambda s: s == 'true'),
+ ('supportsListeners', lambda s: s == 'true'),
+ ('supportsRules', lambda s: s == 'true')]:
+ if self.node.GetProperty(option_name):
+ if 'options' not in properties:
+ properties['options'] = {}
+ properties['options'][option_name] = sanitizer(self.node.GetProperty(
+ option_name))
+ is_function = False
+ parameter_comments = OrderedDict()
+ for node in self.node.GetChildren():
+ if node.cls == 'Comment':
+ (parent_comment, parameter_comments) = ProcessComment(node.GetName())
+ properties['description'] = parent_comment
+ elif node.cls == 'Callspec':
+ is_function = True
+ name, parameters, return_type = (Callspec(node, parameter_comments)
+ .process(callbacks))
+ properties['parameters'] = parameters
+ if return_type is not None:
+ properties['returns'] = return_type
+ properties['name'] = name
+ if is_function:
+ properties['type'] = 'function'
+ else:
+ properties = Typeref(self.node.GetProperty('TYPEREF'),
+ self.node, properties).process(callbacks)
+ enum_values = self.node.GetProperty('legalValues')
+ if enum_values:
+ if properties['type'] == 'integer':
+ enum_values = map(int, enum_values)
+ elif properties['type'] == 'double':
+ enum_values = map(float, enum_values)
+ properties['enum'] = enum_values
+ return name, properties
+
+
+class Typeref(object):
+ '''
+ Given a TYPEREF property representing the type of dictionary member or
+ function parameter, converts into a Python dictionary that the JSON schema
+ compiler expects to see.
+ '''
+ def __init__(self, typeref, parent, additional_properties):
+ self.typeref = typeref
+ self.parent = parent
+ self.additional_properties = additional_properties
+
+ def process(self, callbacks):
+ properties = self.additional_properties
+ result = properties
+
+ if self.parent.GetPropertyLocal('OPTIONAL'):
+ properties['optional'] = True
+
+ # The IDL parser denotes array types by adding a child 'Array' node onto
+ # the Param node in the Callspec.
+ for sibling in self.parent.GetChildren():
+ if sibling.cls == 'Array' and sibling.GetName() == self.parent.GetName():
+ properties['type'] = 'array'
+ properties['items'] = OrderedDict()
+ properties = properties['items']
+ break
+
+ if self.typeref == 'DOMString':
+ properties['type'] = 'string'
+ elif self.typeref == 'boolean':
+ properties['type'] = 'boolean'
+ elif self.typeref == 'double':
+ properties['type'] = 'number'
+ elif self.typeref == 'long':
+ properties['type'] = 'integer'
+ elif self.typeref == 'any':
+ properties['type'] = 'any'
+ elif self.typeref == 'object':
+ properties['type'] = 'object'
+ if 'additionalProperties' not in properties:
+ properties['additionalProperties'] = OrderedDict()
+ properties['additionalProperties']['type'] = 'any'
+ instance_of = self.parent.GetProperty('instanceOf')
+ if instance_of:
+ properties['isInstanceOf'] = instance_of
+ elif self.typeref == 'ArrayBuffer':
+ properties['type'] = 'binary'
+ properties['isInstanceOf'] = 'ArrayBuffer'
+ elif self.typeref == 'FileEntry':
+ properties['type'] = 'object'
+ properties['isInstanceOf'] = 'FileEntry'
+ if 'additionalProperties' not in properties:
+ properties['additionalProperties'] = OrderedDict()
+ properties['additionalProperties']['type'] = 'any'
+ elif self.parent.GetPropertyLocal('Union'):
+ choices = []
+ properties['choices'] = [Typeref(node.GetProperty('TYPEREF'),
+ node,
+ OrderedDict()).process(callbacks)
+ for node in self.parent.GetChildren()
+ if node.cls == 'Option']
+ elif self.typeref is None:
+ properties['type'] = 'function'
+ else:
+ if self.typeref in callbacks:
+ # Do not override name and description if they are already specified.
+ name = properties.get('name', None)
+ description = properties.get('description', None)
+ properties.update(callbacks[self.typeref])
+ if description is not None:
+ properties['description'] = description
+ if name is not None:
+ properties['name'] = name
+ else:
+ properties['$ref'] = self.typeref
+ return result
+
+
+class Enum(object):
+ '''
+ Given an IDL Enum node, converts into a Python dictionary that the JSON
+ schema compiler expects to see.
+ '''
+ def __init__(self, enum_node):
+ self.node = enum_node
+ self.description = ''
+
+ def process(self, callbacks):
+ enum = []
+ for node in self.node.GetChildren():
+ if node.cls == 'EnumItem':
+ enum_value = {'name': node.GetName()}
+ for child in node.GetChildren():
+ if child.cls == 'Comment':
+ enum_value['description'] = ProcessComment(child.GetName())[0]
+ else:
+ raise ValueError('Did not process %s %s' % (child.cls, child))
+ enum.append(enum_value)
+ elif node.cls == 'Comment':
+ self.description = ProcessComment(node.GetName())[0]
+ else:
+ sys.exit('Did not process %s %s' % (node.cls, node))
+ result = {'id' : self.node.GetName(),
+ 'description': self.description,
+ 'type': 'string',
+ 'enum': enum}
+ for property_name in (
+ 'inline_doc', 'noinline_doc', 'nodoc', 'cpp_enum_prefix_override',):
+ if self.node.GetProperty(property_name):
+ result[property_name] = self.node.GetProperty(property_name)
+ if self.node.GetProperty('deprecated'):
+ result[deprecated] = self.node.GetProperty('deprecated')
+ return result
+
+
+class Namespace(object):
+ '''
+ Given an IDLNode representing an IDL namespace, converts into a Python
+ dictionary that the JSON schema compiler expects to see.
+ '''
+
+ def __init__(self,
+ namespace_node,
+ description,
+ nodoc=False,
+ internal=False,
+ platforms=None,
+ compiler_options=None,
+ deprecated=None):
+ self.namespace = namespace_node
+ self.nodoc = nodoc
+ self.internal = internal
+ self.platforms = platforms
+ self.compiler_options = compiler_options
+ self.events = []
+ self.functions = []
+ self.types = []
+ self.callbacks = OrderedDict()
+ self.description = description
+ self.deprecated = deprecated
+
+ def process(self):
+ for node in self.namespace.GetChildren():
+ if node.cls == 'Dictionary':
+ self.types.append(Dictionary(node).process(self.callbacks))
+ elif node.cls == 'Callback':
+ k, v = Member(node).process(self.callbacks)
+ self.callbacks[k] = v
+ elif node.cls == 'Interface' and node.GetName() == 'Functions':
+ self.functions = self.process_interface(node)
+ elif node.cls == 'Interface' and node.GetName() == 'Events':
+ self.events = self.process_interface(node)
+ elif node.cls == 'Enum':
+ self.types.append(Enum(node).process(self.callbacks))
+ else:
+ sys.exit('Did not process %s %s' % (node.cls, node))
+ if self.compiler_options is not None:
+ compiler_options = self.compiler_options
+ else:
+ compiler_options = {}
+ return {'namespace': self.namespace.GetName(),
+ 'description': self.description,
+ 'nodoc': self.nodoc,
+ 'types': self.types,
+ 'functions': self.functions,
+ 'internal': self.internal,
+ 'events': self.events,
+ 'platforms': self.platforms,
+ 'compiler_options': compiler_options,
+ 'deprecated': self.deprecated}
+
+ def process_interface(self, node):
+ members = []
+ for member in node.GetChildren():
+ if member.cls == 'Member':
+ name, properties = Member(member).process(self.callbacks)
+ members.append(properties)
+ return members
+
+
+class IDLSchema(object):
+ '''
+ Given a list of IDLNodes and IDLAttributes, converts into a Python list
+ of api_defs that the JSON schema compiler expects to see.
+ '''
+
+ def __init__(self, idl):
+ self.idl = idl
+
+ def process(self):
+ namespaces = []
+ nodoc = False
+ internal = False
+ description = None
+ platforms = None
+ compiler_options = {}
+ deprecated = None
+ for node in self.idl:
+ if node.cls == 'Namespace':
+ if not description:
+ # TODO(kalman): Go back to throwing an error here.
+ print('%s must have a namespace-level comment. This will '
+ 'appear on the API summary page.' % node.GetName())
+ description = ''
+ namespace = Namespace(node, description, nodoc, internal,
+ platforms=platforms,
+ compiler_options=compiler_options or None,
+ deprecated=deprecated)
+ namespaces.append(namespace.process())
+ nodoc = False
+ internal = False
+ platforms = None
+ compiler_options = None
+ elif node.cls == 'Copyright':
+ continue
+ elif node.cls == 'Comment':
+ description = node.GetName()
+ elif node.cls == 'ExtAttribute':
+ if node.name == 'nodoc':
+ nodoc = bool(node.value)
+ elif node.name == 'internal':
+ internal = bool(node.value)
+ elif node.name == 'platforms':
+ platforms = list(node.value)
+ elif node.name == 'implemented_in':
+ compiler_options['implemented_in'] = node.value
+ elif node.name == 'camel_case_enum_to_string':
+ compiler_options['camel_case_enum_to_string'] = node.value
+ elif node.name == 'deprecated':
+ deprecated = str(node.value)
+ else:
+ continue
+ else:
+ sys.exit('Did not process %s %s' % (node.cls, node))
+ return namespaces
+
+
+def Load(filename):
+ '''
+ Given the filename of an IDL file, parses it and returns an equivalent
+ Python dictionary in a format that the JSON schema compiler expects to see.
+ '''
+
+ f = open(filename, 'r')
+ contents = f.read()
+ f.close()
+
+ idl = idl_parser.IDLParser().ParseData(contents, filename)
+ idl_schema = IDLSchema(idl)
+ return idl_schema.process()
+
+
+def Main():
+ '''
+ Dump a json serialization of parse result for the IDL files whose names
+ were passed in on the command line.
+ '''
+ if len(sys.argv) > 1:
+ for filename in sys.argv[1:]:
+ schema = Load(filename)
+ print json.dumps(schema, indent=2)
+ else:
+ contents = sys.stdin.read()
+ idl = idl_parser.IDLParser().ParseData(contents, '<stdin>')
+ schema = IDLSchema(idl).process()
+ print json.dumps(schema, indent=2)
+
+
+if __name__ == '__main__':
+ Main()
diff --git a/tools/json_schema_compiler/idl_schema_test.py b/tools/json_schema_compiler/idl_schema_test.py
new file mode 100755
index 0000000..a045716
--- /dev/null
+++ b/tools/json_schema_compiler/idl_schema_test.py
@@ -0,0 +1,396 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import idl_schema
+import unittest
+
+from json_parse import OrderedDict
+
+def getFunction(schema, name):
+ for item in schema['functions']:
+ if item['name'] == name:
+ return item
+ raise KeyError('Missing function %s' % name)
+
+
+def getParams(schema, name):
+ function = getFunction(schema, name)
+ return function['parameters']
+
+
+def getReturns(schema, name):
+ function = getFunction(schema, name)
+ return function['returns']
+
+
+def getType(schema, id):
+ for item in schema['types']:
+ if item['id'] == id:
+ return item
+
+
+class IdlSchemaTest(unittest.TestCase):
+ def setUp(self):
+ loaded = idl_schema.Load('test/idl_basics.idl')
+ self.assertEquals(1, len(loaded))
+ self.assertEquals('idl_basics', loaded[0]['namespace'])
+ self.idl_basics = loaded[0]
+ self.maxDiff = None
+
+ def testSimpleCallbacks(self):
+ schema = self.idl_basics
+ expected = [{'type': 'function', 'name': 'cb', 'parameters':[]}]
+ self.assertEquals(expected, getParams(schema, 'function4'))
+
+ expected = [{'type': 'function', 'name': 'cb',
+ 'parameters':[{'name': 'x', 'type': 'integer'}]}]
+ self.assertEquals(expected, getParams(schema, 'function5'))
+
+ expected = [{'type': 'function', 'name': 'cb',
+ 'parameters':[{'name': 'arg', '$ref': 'MyType1'}]}]
+ self.assertEquals(expected, getParams(schema, 'function6'))
+
+ def testCallbackWithArrayArgument(self):
+ schema = self.idl_basics
+ expected = [{'type': 'function', 'name': 'cb',
+ 'parameters':[{'name': 'arg', 'type': 'array',
+ 'items':{'$ref': 'MyType2'}}]}]
+ self.assertEquals(expected, getParams(schema, 'function12'))
+
+
+ def testArrayOfCallbacks(self):
+ schema = idl_schema.Load('test/idl_function_types.idl')[0]
+ expected = [{'type': 'array', 'name': 'callbacks',
+ 'items':{'type': 'function', 'name': 'MyCallback',
+ 'parameters':[{'type': 'integer', 'name': 'x'}]}}]
+ self.assertEquals(expected, getParams(schema, 'whatever'))
+
+ def testLegalValues(self):
+ self.assertEquals({
+ 'x': {'name': 'x', 'type': 'integer', 'enum': [1,2],
+ 'description': 'This comment tests "double-quotes".'},
+ 'y': {'name': 'y', 'type': 'string'},
+ 'z': {'name': 'z', 'type': 'string'},
+ 'a': {'name': 'a', 'type': 'string'},
+ 'b': {'name': 'b', 'type': 'string'},
+ 'c': {'name': 'c', 'type': 'string'}},
+ getType(self.idl_basics, 'MyType1')['properties'])
+
+ def testMemberOrdering(self):
+ self.assertEquals(
+ ['x', 'y', 'z', 'a', 'b', 'c'],
+ getType(self.idl_basics, 'MyType1')['properties'].keys())
+
+ def testEnum(self):
+ schema = self.idl_basics
+ expected = {'enum': [{'name': 'name1', 'description': 'comment1'},
+ {'name': 'name2'}],
+ 'description': 'Enum description',
+ 'type': 'string', 'id': 'EnumType'}
+ self.assertEquals(expected, getType(schema, expected['id']))
+
+ expected = [{'name': 'type', '$ref': 'EnumType'},
+ {'type': 'function', 'name': 'cb',
+ 'parameters':[{'name': 'type', '$ref': 'EnumType'}]}]
+ self.assertEquals(expected, getParams(schema, 'function13'))
+
+ expected = [{'items': {'$ref': 'EnumType'}, 'name': 'types',
+ 'type': 'array'}]
+ self.assertEquals(expected, getParams(schema, 'function14'))
+
+ def testScopedArguments(self):
+ schema = self.idl_basics
+ expected = [{'name': 'value', '$ref': 'idl_other_namespace.SomeType'}]
+ self.assertEquals(expected, getParams(schema, 'function20'))
+
+ expected = [{'items': {'$ref': 'idl_other_namespace.SomeType'},
+ 'name': 'values',
+ 'type': 'array'}]
+ self.assertEquals(expected, getParams(schema, 'function21'))
+
+ expected = [{'name': 'value',
+ '$ref': 'idl_other_namespace.sub_namespace.AnotherType'}]
+ self.assertEquals(expected, getParams(schema, 'function22'))
+
+ expected = [{'items': {'$ref': 'idl_other_namespace.sub_namespace.'
+ 'AnotherType'},
+ 'name': 'values',
+ 'type': 'array'}]
+ self.assertEquals(expected, getParams(schema, 'function23'))
+
+ def testNoCompile(self):
+ schema = self.idl_basics
+ func = getFunction(schema, 'function15')
+ self.assertTrue(func is not None)
+ self.assertTrue(func['nocompile'])
+
+ def testNoDocOnEnum(self):
+ schema = self.idl_basics
+ enum_with_nodoc = getType(schema, 'EnumTypeWithNoDoc')
+ self.assertTrue(enum_with_nodoc is not None)
+ self.assertTrue(enum_with_nodoc['nodoc'])
+
+ def testInternalNamespace(self):
+ idl_basics = self.idl_basics
+ self.assertEquals('idl_basics', idl_basics['namespace'])
+ self.assertTrue(idl_basics['internal'])
+ self.assertFalse(idl_basics['nodoc'])
+
+ def testReturnTypes(self):
+ schema = self.idl_basics
+ self.assertEquals({'name': 'function24', 'type': 'integer'},
+ getReturns(schema, 'function24'))
+ self.assertEquals({'name': 'function25', '$ref': 'MyType1',
+ 'optional': True},
+ getReturns(schema, 'function25'))
+ self.assertEquals({'name': 'function26', 'type': 'array',
+ 'items': {'$ref': 'MyType1'}},
+ getReturns(schema, 'function26'))
+ self.assertEquals({'name': 'function27', '$ref': 'EnumType',
+ 'optional': True},
+ getReturns(schema, 'function27'))
+ self.assertEquals({'name': 'function28', 'type': 'array',
+ 'items': {'$ref': 'EnumType'}},
+ getReturns(schema, 'function28'))
+ self.assertEquals({'name': 'function29', '$ref':
+ 'idl_other_namespace.SomeType',
+ 'optional': True},
+ getReturns(schema, 'function29'))
+ self.assertEquals({'name': 'function30', 'type': 'array',
+ 'items': {'$ref': 'idl_other_namespace.SomeType'}},
+ getReturns(schema, 'function30'))
+
+ def testChromeOSPlatformsNamespace(self):
+ schema = idl_schema.Load('test/idl_namespace_chromeos.idl')[0]
+ self.assertEquals('idl_namespace_chromeos', schema['namespace'])
+ expected = ['chromeos']
+ self.assertEquals(expected, schema['platforms'])
+
+ def testAllPlatformsNamespace(self):
+ schema = idl_schema.Load('test/idl_namespace_all_platforms.idl')[0]
+ self.assertEquals('idl_namespace_all_platforms', schema['namespace'])
+ expected = ['chromeos', 'chromeos_touch', 'linux', 'mac', 'win']
+ self.assertEquals(expected, schema['platforms'])
+
+ def testNonSpecificPlatformsNamespace(self):
+ schema = idl_schema.Load('test/idl_namespace_non_specific_platforms.idl')[0]
+ self.assertEquals('idl_namespace_non_specific_platforms',
+ schema['namespace'])
+ expected = None
+ self.assertEquals(expected, schema['platforms'])
+
+ def testSpecificImplementNamespace(self):
+ schema = idl_schema.Load('test/idl_namespace_specific_implement.idl')[0]
+ self.assertEquals('idl_namespace_specific_implement',
+ schema['namespace'])
+ expected = 'idl_namespace_specific_implement.idl'
+ self.assertEquals(expected, schema['compiler_options']['implemented_in'])
+
+ def testSpecificImplementOnChromeOSNamespace(self):
+ schema = idl_schema.Load(
+ 'test/idl_namespace_specific_implement_chromeos.idl')[0]
+ self.assertEquals('idl_namespace_specific_implement_chromeos',
+ schema['namespace'])
+ expected_implemented_path = 'idl_namespace_specific_implement_chromeos.idl'
+ expected_platform = ['chromeos']
+ self.assertEquals(expected_implemented_path,
+ schema['compiler_options']['implemented_in'])
+ self.assertEquals(expected_platform, schema['platforms'])
+
+ def testCallbackComment(self):
+ schema = self.idl_basics
+ self.assertEquals('A comment on a callback.',
+ getParams(schema, 'function16')[0]['description'])
+ self.assertEquals(
+ 'A parameter.',
+ getParams(schema, 'function16')[0]['parameters'][0]['description'])
+ self.assertEquals(
+ 'Just a parameter comment, with no comment on the callback.',
+ getParams(schema, 'function17')[0]['parameters'][0]['description'])
+ self.assertEquals(
+ 'Override callback comment.',
+ getParams(schema, 'function18')[0]['description'])
+
+ def testFunctionComment(self):
+ schema = self.idl_basics
+ func = getFunction(schema, 'function3')
+ self.assertEquals(('This comment should appear in the documentation, '
+ 'despite occupying multiple lines.'),
+ func['description'])
+ self.assertEquals(
+ [{'description': ('So should this comment about the argument. '
+ '<em>HTML</em> is fine too.'),
+ 'name': 'arg',
+ '$ref': 'MyType1'}],
+ func['parameters'])
+ func = getFunction(schema, 'function4')
+ self.assertEquals(
+ '<p>This tests if "double-quotes" are escaped correctly.</p>'
+ '<p>It also tests a comment with two newlines.</p>',
+ func['description'])
+
+ def testReservedWords(self):
+ schema = idl_schema.Load('test/idl_reserved_words.idl')[0]
+
+ foo_type = getType(schema, 'Foo')
+ self.assertEquals([{'name': 'float'}, {'name': 'DOMString'}],
+ foo_type['enum'])
+
+ enum_type = getType(schema, 'enum')
+ self.assertEquals([{'name': 'callback'}, {'name': 'namespace'}],
+ enum_type['enum'])
+
+ dictionary = getType(schema, 'dictionary')
+ self.assertEquals('integer', dictionary['properties']['long']['type'])
+
+ mytype = getType(schema, 'MyType')
+ self.assertEquals('string', mytype['properties']['interface']['type'])
+
+ params = getParams(schema, 'static')
+ self.assertEquals('Foo', params[0]['$ref'])
+ self.assertEquals('enum', params[1]['$ref'])
+
+ def testObjectTypes(self):
+ schema = idl_schema.Load('test/idl_object_types.idl')[0]
+
+ foo_type = getType(schema, 'FooType')
+ self.assertEquals('object', foo_type['type'])
+ self.assertEquals('integer', foo_type['properties']['x']['type'])
+ self.assertEquals('object', foo_type['properties']['y']['type'])
+ self.assertEquals(
+ 'any',
+ foo_type['properties']['y']['additionalProperties']['type'])
+ self.assertEquals('object', foo_type['properties']['z']['type'])
+ self.assertEquals(
+ 'any',
+ foo_type['properties']['z']['additionalProperties']['type'])
+ self.assertEquals('Window', foo_type['properties']['z']['isInstanceOf'])
+
+ bar_type = getType(schema, 'BarType')
+ self.assertEquals('object', bar_type['type'])
+ self.assertEquals('any', bar_type['properties']['x']['type'])
+
+ def testObjectTypesInFunctions(self):
+ schema = idl_schema.Load('test/idl_object_types.idl')[0]
+
+ params = getParams(schema, 'objectFunction1')
+ self.assertEquals('object', params[0]['type'])
+ self.assertEquals('any', params[0]['additionalProperties']['type'])
+ self.assertEquals('ImageData', params[0]['isInstanceOf'])
+
+ params = getParams(schema, 'objectFunction2')
+ self.assertEquals('any', params[0]['type'])
+
+ def testObjectTypesWithOptionalFields(self):
+ schema = idl_schema.Load('test/idl_object_types.idl')[0]
+
+ baz_type = getType(schema, 'BazType')
+ self.assertEquals(True, baz_type['properties']['x']['optional'])
+ self.assertEquals('integer', baz_type['properties']['x']['type'])
+ self.assertEquals(True, baz_type['properties']['foo']['optional'])
+ self.assertEquals('FooType', baz_type['properties']['foo']['$ref'])
+
+ def testObjectTypesWithUnions(self):
+ schema = idl_schema.Load('test/idl_object_types.idl')[0]
+
+ union_type = getType(schema, 'UnionType')
+ expected = {
+ 'type': 'object',
+ 'id': 'UnionType',
+ 'properties': {
+ 'x': {
+ 'name': 'x',
+ 'optional': True,
+ 'choices': [
+ {'type': 'integer'},
+ {'$ref': 'FooType'},
+ ]
+ },
+ 'y': {
+ 'name': 'y',
+ 'choices': [
+ {'type': 'string'},
+ {'type': 'object',
+ 'additionalProperties': {'type': 'any'}}
+ ]
+ },
+ 'z': {
+ 'name': 'z',
+ 'choices': [
+ {'type': 'object', 'isInstanceOf': 'ImageData',
+ 'additionalProperties': {'type': 'any'}},
+ {'type': 'integer'}
+ ]
+ }
+ },
+ }
+
+ self.assertEquals(expected, union_type)
+
+ def testUnionsWithModifiers(self):
+ schema = idl_schema.Load('test/idl_object_types.idl')[0]
+
+ union_type = getType(schema, 'ModifiedUnionType')
+ expected = {
+ 'type': 'object',
+ 'id': 'ModifiedUnionType',
+ 'properties': {
+ 'x': {
+ 'name': 'x',
+ 'nodoc': True,
+ 'choices': [
+ {'type': 'integer'},
+ {'type': 'string'}
+ ]
+ }
+ }
+ }
+
+ self.assertEquals(expected, union_type)
+
+ def testUnionsWithFunctions(self):
+ schema = idl_schema.Load('test/idl_function_types.idl')[0]
+
+ union_params = getParams(schema, 'union_params')
+ expected = [{
+ 'name': 'x',
+ 'choices': [
+ {'type': 'integer'},
+ {'type': 'string'}
+ ]
+ }]
+
+ self.assertEquals(expected, union_params)
+
+ def testUnionsWithCallbacks(self):
+ schema = idl_schema.Load('test/idl_function_types.idl')[0]
+
+ blah_params = getParams(schema, 'blah')
+ expected = [{
+ 'type': 'function', 'name': 'callback', 'parameters': [{
+ 'name': 'x',
+ 'choices': [
+ {'type': 'integer'},
+ {'type': 'string'}
+ ]}
+ ]
+ }]
+ self.assertEquals(expected, blah_params)
+
+ badabish_params = getParams(schema, 'badabish')
+ expected = [{
+ 'type': 'function', 'name': 'callback', 'parameters': [{
+ 'name': 'x', 'optional': True, 'choices': [
+ {'type': 'integer'},
+ {'type': 'string'}
+ ]
+ }]
+ }]
+
+ self.assertEquals(expected, badabish_params)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/json_schema_compiler/json_parse.py b/tools/json_schema_compiler/json_parse.py
new file mode 100644
index 0000000..21a5a8f
--- /dev/null
+++ b/tools/json_schema_compiler/json_parse.py
@@ -0,0 +1,61 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import sys
+
+_FILE_PATH = os.path.dirname(os.path.realpath(__file__))
+_SYS_PATH = sys.path[:]
+try:
+ _COMMENT_EATER_PATH = os.path.join(
+ _FILE_PATH, os.pardir, 'json_comment_eater')
+ sys.path.insert(0, _COMMENT_EATER_PATH)
+ import json_comment_eater
+finally:
+ sys.path = _SYS_PATH
+
+try:
+ from collections import OrderedDict
+
+ # Successfully imported, so we're running Python >= 2.7, and json.loads
+ # supports object_pairs_hook.
+ def Parse(json_str):
+ return json.loads(json_comment_eater.Nom(json_str),
+ object_pairs_hook=OrderedDict)
+
+except ImportError:
+ # Failed to import, so we're running Python < 2.7, and json.loads doesn't
+ # support object_pairs_hook. simplejson however does, but it's slow.
+ #
+ # TODO(cduvall/kalman): Refuse to start the docs server in this case, but
+ # let json-schema-compiler do its thing.
+ #logging.warning('Using simplejson to parse, this might be slow! Upgrade to '
+ # 'Python 2.7.')
+
+ _SYS_PATH = sys.path[:]
+ try:
+ _SIMPLE_JSON_PATH = os.path.join(_FILE_PATH,
+ os.pardir,
+ os.pardir,
+ 'third_party')
+ sys.path.insert(0, _SIMPLE_JSON_PATH)
+ # Add this path in case this is being used in the docs server.
+ sys.path.insert(0, os.path.join(_FILE_PATH,
+ os.pardir,
+ os.pardir,
+ 'third_party',
+ 'json_schema_compiler'))
+ import simplejson
+ from simplejson import OrderedDict
+ finally:
+ sys.path = _SYS_PATH
+
+ def Parse(json_str):
+ return simplejson.loads(json_comment_eater.Nom(json_str),
+ object_pairs_hook=OrderedDict)
+
+
+def IsDict(item):
+ return isinstance(item, (dict, OrderedDict))
diff --git a/tools/json_schema_compiler/json_schema.py b/tools/json_schema_compiler/json_schema.py
new file mode 100644
index 0000000..bb4e9c4
--- /dev/null
+++ b/tools/json_schema_compiler/json_schema.py
@@ -0,0 +1,57 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import copy
+
+import json_parse
+
+
+def DeleteNodes(item, delete_key=None, matcher=None):
+ """Deletes certain nodes in item, recursively. If |delete_key| is set, all
+ dicts with |delete_key| as an attribute are deleted. If a callback is passed
+ as |matcher|, |DeleteNodes| will delete all dicts for which matcher(dict)
+ returns True.
+ """
+ assert (delete_key is not None) != (matcher is not None)
+
+ def ShouldDelete(thing):
+ return json_parse.IsDict(thing) and (
+ delete_key is not None and delete_key in thing or
+ matcher is not None and matcher(thing))
+
+ if json_parse.IsDict(item):
+ toDelete = []
+ for key, value in item.items():
+ if ShouldDelete(value):
+ toDelete.append(key)
+ else:
+ DeleteNodes(value, delete_key, matcher)
+ for key in toDelete:
+ del item[key]
+ elif type(item) == list:
+ item[:] = [DeleteNodes(thing, delete_key, matcher)
+ for thing in item if not ShouldDelete(thing)]
+
+ return item
+
+
+def Load(filename):
+ with open(filename, 'r') as handle:
+ schemas = json_parse.Parse(handle.read())
+ return schemas
+
+
+# A dictionary mapping |filename| to the object resulting from loading the JSON
+# at |filename|.
+_cache = {}
+
+
+def CachedLoad(filename):
+ """Equivalent to Load(filename), but caches results for subsequent calls"""
+ if filename not in _cache:
+ _cache[filename] = Load(filename)
+ # Return a copy of the object so that any changes a caller makes won't affect
+ # the next caller.
+ return copy.deepcopy(_cache[filename])
+
diff --git a/tools/json_schema_compiler/json_schema_test.py b/tools/json_schema_compiler/json_schema_test.py
new file mode 100755
index 0000000..edbb06e
--- /dev/null
+++ b/tools/json_schema_compiler/json_schema_test.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json_schema
+import unittest
+
+class JsonSchemaUnittest(unittest.TestCase):
+ def testNocompile(self):
+ compiled = [
+ {
+ "namespace": "compile",
+ "description": "The compile API.",
+ "functions": [],
+ "types": {}
+ },
+
+ {
+ "namespace": "functions",
+ "description": "The functions API.",
+ "functions": [
+ {
+ "id": "two"
+ },
+ {
+ "id": "four"
+ }
+ ],
+
+ "types": {
+ "one": { "key": "value" }
+ }
+ },
+
+ {
+ "namespace": "types",
+ "description": "The types API.",
+ "functions": [
+ { "id": "one" }
+ ],
+ "types": {
+ "two": {
+ "key": "value"
+ },
+ "four": {
+ "key": "value"
+ }
+ }
+ },
+
+ {
+ "namespace": "nested",
+ "description": "The nested API.",
+ "properties": {
+ "sync": {
+ "functions": [
+ {
+ "id": "two"
+ },
+ {
+ "id": "four"
+ }
+ ],
+ "types": {
+ "two": {
+ "key": "value"
+ },
+ "four": {
+ "key": "value"
+ }
+ }
+ }
+ }
+ }
+ ]
+
+ schema = json_schema.CachedLoad('test/json_schema_test.json')
+ self.assertEquals(compiled, json_schema.DeleteNodes(schema, 'nocompile'))
+
+ def should_delete(value):
+ return isinstance(value, dict) and not value.get('valid', True)
+ expected = [
+ {'one': {'test': 'test'}},
+ {'valid': True},
+ {}
+ ]
+ given = [
+ {'one': {'test': 'test'}, 'two': {'valid': False}},
+ {'valid': True},
+ {},
+ {'valid': False}
+ ]
+ self.assertEquals(
+ expected, json_schema.DeleteNodes(given, matcher=should_delete))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/json_schema_compiler/memoize.py b/tools/json_schema_compiler/memoize.py
new file mode 100644
index 0000000..228e7e3
--- /dev/null
+++ b/tools/json_schema_compiler/memoize.py
@@ -0,0 +1,14 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+def memoize(fn):
+ '''Decorates |fn| to memoize.
+ '''
+ memory = {}
+ def impl(*args, **optargs):
+ full_args = args + tuple(optargs.iteritems())
+ if full_args not in memory:
+ memory[full_args] = fn(*args, **optargs)
+ return memory[full_args]
+ return impl
diff --git a/tools/json_schema_compiler/model.py b/tools/json_schema_compiler/model.py
new file mode 100644
index 0000000..16530e7
--- /dev/null
+++ b/tools/json_schema_compiler/model.py
@@ -0,0 +1,605 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+
+from json_parse import OrderedDict
+from memoize import memoize
+
+
+class ParseException(Exception):
+ """Thrown when data in the model is invalid.
+ """
+ def __init__(self, parent, message):
+ hierarchy = _GetModelHierarchy(parent)
+ hierarchy.append(message)
+ Exception.__init__(
+ self, 'Model parse exception at:\n' + '\n'.join(hierarchy))
+
+
+class Model(object):
+ """Model of all namespaces that comprise an API.
+
+ Properties:
+ - |namespaces| a map of a namespace name to its model.Namespace
+ """
+ def __init__(self):
+ self.namespaces = {}
+
+ def AddNamespace(self,
+ json,
+ source_file,
+ include_compiler_options=False,
+ environment=None):
+ """Add a namespace's json to the model and returns the namespace.
+ """
+ namespace = Namespace(json,
+ source_file,
+ include_compiler_options=include_compiler_options,
+ environment=environment)
+ self.namespaces[namespace.name] = namespace
+ return namespace
+
+
+def CreateFeature(name, model):
+ if isinstance(model, dict):
+ return SimpleFeature(name, model)
+ return ComplexFeature(name, [SimpleFeature(name, child) for child in model])
+
+
+class ComplexFeature(object):
+ """A complex feature which may be made of several simple features.
+
+ Properties:
+ - |name| the name of the feature
+ - |unix_name| the unix_name of the feature
+ - |feature_list| a list of simple features which make up the feature
+ """
+ def __init__(self, feature_name, features):
+ self.name = feature_name
+ self.unix_name = UnixName(self.name)
+ self.feature_list = features
+
+class SimpleFeature(object):
+ """A simple feature, which can make up a complex feature, as specified in
+ files such as chrome/common/extensions/api/_permission_features.json.
+
+ Properties:
+ - |name| the name of the feature
+ - |unix_name| the unix_name of the feature
+ - |channel| the channel where the feature is released
+ - |extension_types| the types which can use the feature
+ - |whitelist| a list of extensions allowed to use the feature
+ """
+ def __init__(self, feature_name, feature_def):
+ self.name = feature_name
+ self.unix_name = UnixName(self.name)
+ self.channel = feature_def['channel']
+ self.extension_types = feature_def['extension_types']
+ self.whitelist = feature_def.get('whitelist')
+
+
+class Namespace(object):
+ """An API namespace.
+
+ Properties:
+ - |name| the name of the namespace
+ - |description| the description of the namespace
+ - |deprecated| a reason and possible alternative for a deprecated api
+ - |unix_name| the unix_name of the namespace
+ - |source_file| the file that contained the namespace definition
+ - |source_file_dir| the directory component of |source_file|
+ - |source_file_filename| the filename component of |source_file|
+ - |platforms| if not None, the list of platforms that the namespace is
+ available to
+ - |types| a map of type names to their model.Type
+ - |functions| a map of function names to their model.Function
+ - |events| a map of event names to their model.Function
+ - |properties| a map of property names to their model.Property
+ - |compiler_options| the compiler_options dict, only not empty if
+ |include_compiler_options| is True
+ """
+ def __init__(self,
+ json,
+ source_file,
+ include_compiler_options=False,
+ environment=None):
+ self.name = json['namespace']
+ if 'description' not in json:
+ # TODO(kalman): Go back to throwing an error here.
+ print('%s must have a "description" field. This will appear '
+ 'on the API summary page.' % self.name)
+ json['description'] = ''
+ self.description = json['description']
+ self.deprecated = json.get('deprecated', None)
+ self.unix_name = UnixName(self.name)
+ self.source_file = source_file
+ self.source_file_dir, self.source_file_filename = os.path.split(source_file)
+ self.short_filename = os.path.basename(source_file).split('.')[0]
+ self.parent = None
+ self.platforms = _GetPlatforms(json)
+ toplevel_origin = Origin(from_client=True, from_json=True)
+ self.types = _GetTypes(self, json, self, toplevel_origin)
+ self.functions = _GetFunctions(self, json, self)
+ self.events = _GetEvents(self, json, self)
+ self.properties = _GetProperties(self, json, self, toplevel_origin)
+ if include_compiler_options:
+ self.compiler_options = json.get('compiler_options', {})
+ else:
+ self.compiler_options = {}
+ self.environment = environment
+ self.documentation_options = json.get('documentation_options', {})
+
+
+class Origin(object):
+ """Stores the possible origin of model object as a pair of bools. These are:
+
+ |from_client| indicating that instances can originate from users of
+ generated code (for example, function results), or
+ |from_json| indicating that instances can originate from the JSON (for
+ example, function parameters)
+
+ It is possible for model objects to originate from both the client and json,
+ for example Types defined in the top-level schema, in which case both
+ |from_client| and |from_json| would be True.
+ """
+ def __init__(self, from_client=False, from_json=False):
+ if not from_client and not from_json:
+ raise ValueError('One of from_client or from_json must be true')
+ self.from_client = from_client
+ self.from_json = from_json
+
+
+class Type(object):
+ """A Type defined in the json.
+
+ Properties:
+ - |name| the type name
+ - |namespace| the Type's namespace
+ - |description| the description of the type (if provided)
+ - |properties| a map of property unix_names to their model.Property
+ - |functions| a map of function names to their model.Function
+ - |events| a map of event names to their model.Event
+ - |origin| the Origin of the type
+ - |property_type| the PropertyType of this Type
+ - |item_type| if this is an array, the type of items in the array
+ - |simple_name| the name of this Type without a namespace
+ - |additional_properties| the type of the additional properties, if any is
+ specified
+ """
+ def __init__(self,
+ parent,
+ name,
+ json,
+ namespace,
+ origin):
+ self.name = name
+ self.namespace = namespace
+ self.simple_name = _StripNamespace(self.name, namespace)
+ self.unix_name = UnixName(self.name)
+ self.description = json.get('description', None)
+ self.origin = origin
+ self.parent = parent
+ self.instance_of = json.get('isInstanceOf', None)
+
+ # TODO(kalman): Only objects need functions/events/properties, but callers
+ # assume that all types have them. Fix this.
+ self.functions = _GetFunctions(self, json, namespace)
+ self.events = _GetEvents(self, json, namespace)
+ self.properties = _GetProperties(self, json, namespace, origin)
+
+ json_type = json.get('type', None)
+ if json_type == 'array':
+ self.property_type = PropertyType.ARRAY
+ self.item_type = Type(
+ self, '%sType' % name, json['items'], namespace, origin)
+ elif '$ref' in json:
+ self.property_type = PropertyType.REF
+ self.ref_type = json['$ref']
+ elif 'enum' in json and json_type == 'string':
+ self.property_type = PropertyType.ENUM
+ self.enum_values = [EnumValue(value) for value in json['enum']]
+ self.cpp_enum_prefix_override = json.get('cpp_enum_prefix_override', None)
+ elif json_type == 'any':
+ self.property_type = PropertyType.ANY
+ elif json_type == 'binary':
+ self.property_type = PropertyType.BINARY
+ elif json_type == 'boolean':
+ self.property_type = PropertyType.BOOLEAN
+ elif json_type == 'integer':
+ self.property_type = PropertyType.INTEGER
+ elif (json_type == 'double' or
+ json_type == 'number'):
+ self.property_type = PropertyType.DOUBLE
+ elif json_type == 'string':
+ self.property_type = PropertyType.STRING
+ elif 'choices' in json:
+ self.property_type = PropertyType.CHOICES
+ def generate_type_name(type_json):
+ if 'items' in type_json:
+ return '%ss' % generate_type_name(type_json['items'])
+ if '$ref' in type_json:
+ return type_json['$ref']
+ if 'type' in type_json:
+ return type_json['type']
+ return None
+ self.choices = [
+ Type(self,
+ generate_type_name(choice) or 'choice%s' % i,
+ choice,
+ namespace,
+ origin)
+ for i, choice in enumerate(json['choices'])]
+ elif json_type == 'object':
+ if not (
+ 'isInstanceOf' in json or
+ 'properties' in json or
+ 'additionalProperties' in json or
+ 'functions' in json or
+ 'events' in json):
+ raise ParseException(self, name + " has no properties or functions")
+ self.property_type = PropertyType.OBJECT
+ additional_properties_json = json.get('additionalProperties', None)
+ if additional_properties_json is not None:
+ self.additional_properties = Type(self,
+ 'additionalProperties',
+ additional_properties_json,
+ namespace,
+ origin)
+ else:
+ self.additional_properties = None
+ elif json_type == 'function':
+ self.property_type = PropertyType.FUNCTION
+ # Sometimes we might have an unnamed function, e.g. if it's a property
+ # of an object. Use the name of the property in that case.
+ function_name = json.get('name', name)
+ self.function = Function(self, function_name, json, namespace, origin)
+ else:
+ raise ParseException(self, 'Unsupported JSON type %s' % json_type)
+
+
+class Function(object):
+ """A Function defined in the API.
+
+ Properties:
+ - |name| the function name
+ - |platforms| if not None, the list of platforms that the function is
+ available to
+ - |params| a list of parameters to the function (order matters). A separate
+ parameter is used for each choice of a 'choices' parameter
+ - |deprecated| a reason and possible alternative for a deprecated function
+ - |description| a description of the function (if provided)
+ - |callback| the callback parameter to the function. There should be exactly
+ one
+ - |optional| whether the Function is "optional"; this only makes sense to be
+ present when the Function is representing a callback property
+ - |simple_name| the name of this Function without a namespace
+ - |returns| the return type of the function; None if the function does not
+ return a value
+ """
+ def __init__(self,
+ parent,
+ name,
+ json,
+ namespace,
+ origin):
+ self.name = name
+ self.simple_name = _StripNamespace(self.name, namespace)
+ self.platforms = _GetPlatforms(json)
+ self.params = []
+ self.description = json.get('description')
+ self.deprecated = json.get('deprecated')
+ self.callback = None
+ self.optional = json.get('optional', False)
+ self.parent = parent
+ self.nocompile = json.get('nocompile')
+ options = json.get('options', {})
+ self.conditions = options.get('conditions', [])
+ self.actions = options.get('actions', [])
+ self.supports_listeners = options.get('supportsListeners', True)
+ self.supports_rules = options.get('supportsRules', False)
+ self.supports_dom = options.get('supportsDom', False)
+
+ def GeneratePropertyFromParam(p):
+ return Property(self, p['name'], p, namespace, origin)
+
+ self.filters = [GeneratePropertyFromParam(filter)
+ for filter in json.get('filters', [])]
+ callback_param = None
+ for param in json.get('parameters', []):
+ if param.get('type') == 'function':
+ if callback_param:
+ # No ParseException because the webstore has this.
+ # Instead, pretend all intermediate callbacks are properties.
+ self.params.append(GeneratePropertyFromParam(callback_param))
+ callback_param = param
+ else:
+ self.params.append(GeneratePropertyFromParam(param))
+
+ if callback_param:
+ self.callback = Function(self,
+ callback_param['name'],
+ callback_param,
+ namespace,
+ Origin(from_client=True))
+
+ self.returns = None
+ if 'returns' in json:
+ self.returns = Type(self,
+ '%sReturnType' % name,
+ json['returns'],
+ namespace,
+ origin)
+
+
+class Property(object):
+ """A property of a type OR a parameter to a function.
+ Properties:
+ - |name| name of the property as in the json. This shouldn't change since
+ it is the key used to access DictionaryValues
+ - |unix_name| the unix_style_name of the property. Used as variable name
+ - |optional| a boolean representing whether the property is optional
+ - |description| a description of the property (if provided)
+ - |type_| the model.Type of this property
+ - |simple_name| the name of this Property without a namespace
+ - |deprecated| a reason and possible alternative for a deprecated property
+ """
+ def __init__(self, parent, name, json, namespace, origin):
+ """Creates a Property from JSON.
+ """
+ self.parent = parent
+ self.name = name
+ self._unix_name = UnixName(self.name)
+ self._unix_name_used = False
+ self.origin = origin
+ self.simple_name = _StripNamespace(self.name, namespace)
+ self.description = json.get('description', None)
+ self.optional = json.get('optional', None)
+ self.instance_of = json.get('isInstanceOf', None)
+ self.deprecated = json.get('deprecated')
+
+ # HACK: only support very specific value types.
+ is_allowed_value = (
+ '$ref' not in json and
+ ('type' not in json or json['type'] == 'integer'
+ or json['type'] == 'string'))
+
+ self.value = None
+ if 'value' in json and is_allowed_value:
+ self.value = json['value']
+ if 'type' not in json:
+ # Sometimes the type of the value is left out, and we need to figure
+ # it out for ourselves.
+ if isinstance(self.value, int):
+ json['type'] = 'integer'
+ elif isinstance(self.value, basestring):
+ json['type'] = 'string'
+ else:
+ # TODO(kalman): support more types as necessary.
+ raise ParseException(
+ parent,
+ '"%s" is not a supported type for "value"' % type(self.value))
+
+ self.type_ = Type(parent, name, json, namespace, origin)
+
+ def GetUnixName(self):
+ """Gets the property's unix_name. Raises AttributeError if not set.
+ """
+ if not self._unix_name:
+ raise AttributeError('No unix_name set on %s' % self.name)
+ self._unix_name_used = True
+ return self._unix_name
+
+ def SetUnixName(self, unix_name):
+ """Set the property's unix_name. Raises AttributeError if the unix_name has
+ already been used (GetUnixName has been called).
+ """
+ if unix_name == self._unix_name:
+ return
+ if self._unix_name_used:
+ raise AttributeError(
+ 'Cannot set the unix_name on %s; '
+ 'it is already used elsewhere as %s' %
+ (self.name, self._unix_name))
+ self._unix_name = unix_name
+
+ unix_name = property(GetUnixName, SetUnixName)
+
+class EnumValue(object):
+ """A single value from an enum.
+ Properties:
+ - |name| name of the property as in the json.
+ - |description| a description of the property (if provided)
+ """
+ def __init__(self, json):
+ if isinstance(json, dict):
+ self.name = json['name']
+ self.description = json.get('description')
+ else:
+ self.name = json
+ self.description = None
+
+ def CamelName(self):
+ return CamelName(self.name)
+
+class _Enum(object):
+ """Superclass for enum types with a "name" field, setting up repr/eq/ne.
+ Enums need to do this so that equality/non-equality work over pickling.
+ """
+ @staticmethod
+ def GetAll(cls):
+ """Yields all _Enum objects declared in |cls|.
+ """
+ for prop_key in dir(cls):
+ prop_value = getattr(cls, prop_key)
+ if isinstance(prop_value, _Enum):
+ yield prop_value
+
+ def __init__(self, name):
+ self.name = name
+
+ def __eq__(self, other):
+ return type(other) == type(self) and other.name == self.name
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __repr__(self):
+ return self.name
+
+ def __str__(self):
+ return repr(self)
+
+
+class _PropertyTypeInfo(_Enum):
+ def __init__(self, is_fundamental, name):
+ _Enum.__init__(self, name)
+ self.is_fundamental = is_fundamental
+
+
+class PropertyType(object):
+ """Enum of different types of properties/parameters.
+ """
+ ANY = _PropertyTypeInfo(False, "any")
+ ARRAY = _PropertyTypeInfo(False, "array")
+ BINARY = _PropertyTypeInfo(False, "binary")
+ BOOLEAN = _PropertyTypeInfo(True, "boolean")
+ CHOICES = _PropertyTypeInfo(False, "choices")
+ DOUBLE = _PropertyTypeInfo(True, "double")
+ ENUM = _PropertyTypeInfo(False, "enum")
+ FUNCTION = _PropertyTypeInfo(False, "function")
+ INT64 = _PropertyTypeInfo(True, "int64")
+ INTEGER = _PropertyTypeInfo(True, "integer")
+ OBJECT = _PropertyTypeInfo(False, "object")
+ REF = _PropertyTypeInfo(False, "ref")
+ STRING = _PropertyTypeInfo(True, "string")
+
+
+@memoize
+def UnixName(name):
+ '''Returns the unix_style name for a given lowerCamelCase string.
+ '''
+ unix_name = []
+ for i, c in enumerate(name):
+ if c.isupper() and i > 0 and name[i - 1] != '_':
+ # Replace lowerUpper with lower_Upper.
+ if name[i - 1].islower():
+ unix_name.append('_')
+ # Replace ACMEWidgets with ACME_Widgets
+ elif i + 1 < len(name) and name[i + 1].islower():
+ unix_name.append('_')
+ if c == '.':
+ # Replace hello.world with hello_world.
+ unix_name.append('_')
+ else:
+ # Everything is lowercase.
+ unix_name.append(c.lower())
+ return ''.join(unix_name)
+
+
+@memoize
+def CamelName(snake):
+ ''' Converts a snake_cased_string to a camelCasedOne. '''
+ pieces = snake.split('_')
+ camel = []
+ for i, piece in enumerate(pieces):
+ if i == 0:
+ camel.append(piece)
+ else:
+ camel.append(piece.capitalize())
+ return ''.join(camel)
+
+
+def _StripNamespace(name, namespace):
+ if name.startswith(namespace.name + '.'):
+ return name[len(namespace.name + '.'):]
+ return name
+
+
+def _GetModelHierarchy(entity):
+ """Returns the hierarchy of the given model entity."""
+ hierarchy = []
+ while entity is not None:
+ hierarchy.append(getattr(entity, 'name', repr(entity)))
+ if isinstance(entity, Namespace):
+ hierarchy.insert(0, ' in %s' % entity.source_file)
+ entity = getattr(entity, 'parent', None)
+ hierarchy.reverse()
+ return hierarchy
+
+
+def _GetTypes(parent, json, namespace, origin):
+ """Creates Type objects extracted from |json|.
+ """
+ types = OrderedDict()
+ for type_json in json.get('types', []):
+ type_ = Type(parent, type_json['id'], type_json, namespace, origin)
+ types[type_.name] = type_
+ return types
+
+
+def _GetFunctions(parent, json, namespace):
+ """Creates Function objects extracted from |json|.
+ """
+ functions = OrderedDict()
+ for function_json in json.get('functions', []):
+ function = Function(parent,
+ function_json['name'],
+ function_json,
+ namespace,
+ Origin(from_json=True))
+ functions[function.name] = function
+ return functions
+
+
+def _GetEvents(parent, json, namespace):
+ """Creates Function objects generated from the events in |json|.
+ """
+ events = OrderedDict()
+ for event_json in json.get('events', []):
+ event = Function(parent,
+ event_json['name'],
+ event_json,
+ namespace,
+ Origin(from_client=True))
+ events[event.name] = event
+ return events
+
+
+def _GetProperties(parent, json, namespace, origin):
+ """Generates Property objects extracted from |json|.
+ """
+ properties = OrderedDict()
+ for name, property_json in json.get('properties', {}).items():
+ properties[name] = Property(parent, name, property_json, namespace, origin)
+ return properties
+
+
+class _PlatformInfo(_Enum):
+ def __init__(self, name):
+ _Enum.__init__(self, name)
+
+
+class Platforms(object):
+ """Enum of the possible platforms.
+ """
+ CHROMEOS = _PlatformInfo("chromeos")
+ CHROMEOS_TOUCH = _PlatformInfo("chromeos_touch")
+ LINUX = _PlatformInfo("linux")
+ MAC = _PlatformInfo("mac")
+ WIN = _PlatformInfo("win")
+
+
+def _GetPlatforms(json):
+ if 'platforms' not in json or json['platforms'] == None:
+ return None
+ # Sanity check: platforms should not be an empty list.
+ if not json['platforms']:
+ raise ValueError('"platforms" cannot be an empty list')
+ platforms = []
+ for platform_name in json['platforms']:
+ for platform_enum in _Enum.GetAll(Platforms):
+ if platform_name == platform_enum.name:
+ platforms.append(platform_enum)
+ break
+ return platforms
diff --git a/tools/json_schema_compiler/model_test.py b/tools/json_schema_compiler/model_test.py
new file mode 100755
index 0000000..75ed9c5
--- /dev/null
+++ b/tools/json_schema_compiler/model_test.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from json_schema import CachedLoad
+from idl_schema import Load
+from model import Platforms
+import model
+import unittest
+
+class ModelTest(unittest.TestCase):
+ def setUp(self):
+ self.model = model.Model()
+ self.permissions_json = CachedLoad('test/permissions.json')
+ self.model.AddNamespace(self.permissions_json[0],
+ 'path/to/permissions.json')
+ self.permissions = self.model.namespaces.get('permissions')
+ self.windows_json = CachedLoad('test/windows.json')
+ self.model.AddNamespace(self.windows_json[0],
+ 'path/to/window.json')
+ self.windows = self.model.namespaces.get('windows')
+ self.tabs_json = CachedLoad('test/tabs.json')
+ self.model.AddNamespace(self.tabs_json[0],
+ 'path/to/tabs.json')
+ self.tabs = self.model.namespaces.get('tabs')
+ self.idl_chromeos = Load('test/idl_namespace_chromeos.idl')
+ self.model.AddNamespace(self.idl_chromeos[0],
+ 'path/to/idl_namespace_chromeos.idl')
+ self.idl_namespace_chromeos = self.model.namespaces.get(
+ 'idl_namespace_chromeos')
+ self.idl_all_platforms = Load('test/idl_namespace_all_platforms.idl')
+ self.model.AddNamespace(self.idl_all_platforms[0],
+ 'path/to/idl_namespace_all_platforms.idl')
+ self.idl_namespace_all_platforms = self.model.namespaces.get(
+ 'idl_namespace_all_platforms')
+ self.idl_non_specific_platforms = Load(
+ 'test/idl_namespace_non_specific_platforms.idl')
+ self.model.AddNamespace(self.idl_non_specific_platforms[0],
+ 'path/to/idl_namespace_non_specific_platforms.idl')
+ self.idl_namespace_non_specific_platforms = self.model.namespaces.get(
+ 'idl_namespace_non_specific_platforms')
+
+ def testNamespaces(self):
+ self.assertEquals(6, len(self.model.namespaces))
+ self.assertTrue(self.permissions)
+
+ def testHasFunctions(self):
+ self.assertEquals(["contains", "getAll", "remove", "request"],
+ sorted(self.permissions.functions.keys()))
+
+ def testHasTypes(self):
+ self.assertEquals(['Tab'], self.tabs.types.keys())
+ self.assertEquals(['Permissions'], self.permissions.types.keys())
+ self.assertEquals(['Window'], self.windows.types.keys())
+
+ def testHasProperties(self):
+ self.assertEquals(["active", "favIconUrl", "highlighted", "id",
+ "incognito", "index", "pinned", "selected", "status", "title", "url",
+ "windowId"],
+ sorted(self.tabs.types['Tab'].properties.keys()))
+
+ def testProperties(self):
+ string_prop = self.tabs.types['Tab'].properties['status']
+ self.assertEquals(model.PropertyType.STRING,
+ string_prop.type_.property_type)
+ integer_prop = self.tabs.types['Tab'].properties['id']
+ self.assertEquals(model.PropertyType.INTEGER,
+ integer_prop.type_.property_type)
+ array_prop = self.windows.types['Window'].properties['tabs']
+ self.assertEquals(model.PropertyType.ARRAY,
+ array_prop.type_.property_type)
+ self.assertEquals(model.PropertyType.REF,
+ array_prop.type_.item_type.property_type)
+ self.assertEquals('tabs.Tab', array_prop.type_.item_type.ref_type)
+ object_prop = self.tabs.functions['query'].params[0]
+ self.assertEquals(model.PropertyType.OBJECT,
+ object_prop.type_.property_type)
+ self.assertEquals(
+ ["active", "highlighted", "pinned", "status", "title", "url",
+ "windowId", "windowType"],
+ sorted(object_prop.type_.properties.keys()))
+
+ def testChoices(self):
+ self.assertEquals(model.PropertyType.CHOICES,
+ self.tabs.functions['move'].params[0].type_.property_type)
+
+ def testPropertyNotImplemented(self):
+ (self.permissions_json[0]['types'][0]
+ ['properties']['permissions']['type']) = 'something'
+ self.assertRaises(model.ParseException, self.model.AddNamespace,
+ self.permissions_json[0], 'path/to/something.json')
+
+ def testDescription(self):
+ self.assertFalse(
+ self.permissions.functions['contains'].params[0].description)
+ self.assertEquals('True if the extension has the specified permissions.',
+ self.permissions.functions['contains'].callback.params[0].description)
+
+ def testPropertyUnixName(self):
+ param = self.tabs.functions['move'].params[0]
+ self.assertEquals('tab_ids', param.unix_name)
+
+ def testUnixName(self):
+ expectations = {
+ 'foo': 'foo',
+ 'fooBar': 'foo_bar',
+ 'fooBarBaz': 'foo_bar_baz',
+ 'fooBARBaz': 'foo_bar_baz',
+ 'fooBAR': 'foo_bar',
+ 'FOO': 'foo',
+ 'FOOBar': 'foo_bar',
+ 'foo.bar': 'foo_bar',
+ 'foo.BAR': 'foo_bar',
+ 'foo.barBAZ': 'foo_bar_baz',
+ 'foo_Bar_Baz_box': 'foo_bar_baz_box',
+ }
+ for name in expectations:
+ self.assertEquals(expectations[name], model.UnixName(name))
+
+ def testCamelName(self):
+ expectations = {
+ 'foo': 'foo',
+ 'fooBar': 'fooBar',
+ 'foo_bar_baz': 'fooBarBaz',
+ 'FOO_BAR': 'FOOBar',
+ 'FOO_bar': 'FOOBar',
+ '_bar': 'Bar',
+ '_bar_baz': 'BarBaz',
+ 'bar_': 'bar',
+ 'bar_baz_': 'barBaz',
+ }
+ for testcase, expected in expectations.iteritems():
+ self.assertEquals(expected, model.CamelName(testcase))
+
+ def testPlatforms(self):
+ self.assertEqual([Platforms.CHROMEOS],
+ self.idl_namespace_chromeos.platforms)
+ self.assertEqual(
+ [Platforms.CHROMEOS, Platforms.CHROMEOS_TOUCH, Platforms.LINUX,
+ Platforms.MAC, Platforms.WIN],
+ self.idl_namespace_all_platforms.platforms)
+ self.assertEqual(None,
+ self.idl_namespace_non_specific_platforms.platforms)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/json_schema_compiler/preview.py b/tools/json_schema_compiler/preview.py
new file mode 100755
index 0000000..050af96
--- /dev/null
+++ b/tools/json_schema_compiler/preview.py
@@ -0,0 +1,364 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Server for viewing the compiled C++ code from tools/json_schema_compiler.
+"""
+
+import cc_generator
+import code
+import cpp_type_generator
+import cpp_util
+import h_generator
+import idl_schema
+import json_schema
+import model
+import optparse
+import os
+import shlex
+import urlparse
+from highlighters import (
+ pygments_highlighter, none_highlighter, hilite_me_highlighter)
+from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+from cpp_namespace_environment import CppNamespaceEnvironment
+from schema_loader import SchemaLoader
+
+
+class CompilerHandler(BaseHTTPRequestHandler):
+ """A HTTPRequestHandler that outputs the result of tools/json_schema_compiler.
+ """
+ def do_GET(self):
+ parsed_url = urlparse.urlparse(self.path)
+ request_path = self._GetRequestPath(parsed_url)
+
+ chromium_favicon = 'http://codereview.chromium.org/static/favicon.ico'
+
+ head = code.Code()
+ head.Append('<link rel="icon" href="%s">' % chromium_favicon)
+ head.Append('<link rel="shortcut icon" href="%s">' % chromium_favicon)
+
+ body = code.Code()
+
+ try:
+ if os.path.isdir(request_path):
+ self._ShowPanels(parsed_url, head, body)
+ else:
+ self._ShowCompiledFile(parsed_url, head, body)
+ finally:
+ self.wfile.write('<html><head>')
+ self.wfile.write(head.Render())
+ self.wfile.write('</head><body>')
+ self.wfile.write(body.Render())
+ self.wfile.write('</body></html>')
+
+ def _GetRequestPath(self, parsed_url, strip_nav=False):
+ """Get the relative path from the current directory to the requested file.
+ """
+ path = parsed_url.path
+ if strip_nav:
+ path = parsed_url.path.replace('/nav', '')
+ return os.path.normpath(os.curdir + path)
+
+ def _ShowPanels(self, parsed_url, head, body):
+ """Show the previewer frame structure.
+
+ Code panes are populated via XHR after links in the nav pane are clicked.
+ """
+ (head.Append('<style>')
+ .Append('body {')
+ .Append(' margin: 0;')
+ .Append('}')
+ .Append('.pane {')
+ .Append(' height: 100%;')
+ .Append(' overflow-x: auto;')
+ .Append(' overflow-y: scroll;')
+ .Append(' display: inline-block;')
+ .Append('}')
+ .Append('#nav_pane {')
+ .Append(' width: 20%;')
+ .Append('}')
+ .Append('#nav_pane ul {')
+ .Append(' list-style-type: none;')
+ .Append(' padding: 0 0 0 1em;')
+ .Append('}')
+ .Append('#cc_pane {')
+ .Append(' width: 40%;')
+ .Append('}')
+ .Append('#h_pane {')
+ .Append(' width: 40%;')
+ .Append('}')
+ .Append('</style>')
+ )
+
+ body.Append(
+ '<div class="pane" id="nav_pane">%s</div>'
+ '<div class="pane" id="h_pane"></div>'
+ '<div class="pane" id="cc_pane"></div>' %
+ self._RenderNavPane(parsed_url.path[1:])
+ )
+
+ # The Javascript that interacts with the nav pane and panes to show the
+ # compiled files as the URL or highlighting options change.
+ body.Append('''<script type="text/javascript">
+// Calls a function for each highlighter style <select> element.
+function forEachHighlighterStyle(callback) {
+ var highlighterStyles =
+ document.getElementsByClassName('highlighter_styles');
+ for (var i = 0; i < highlighterStyles.length; ++i)
+ callback(highlighterStyles[i]);
+}
+
+// Called when anything changes, such as the highlighter or hashtag.
+function updateEverything() {
+ var highlighters = document.getElementById('highlighters');
+ var highlighterName = highlighters.value;
+
+ // Cache in localStorage for when the page loads next.
+ localStorage.highlightersValue = highlighterName;
+
+ // Show/hide the highlighter styles.
+ var highlighterStyleName = '';
+ forEachHighlighterStyle(function(highlighterStyle) {
+ if (highlighterStyle.id === highlighterName + '_styles') {
+ highlighterStyle.removeAttribute('style')
+ highlighterStyleName = highlighterStyle.value;
+ } else {
+ highlighterStyle.setAttribute('style', 'display:none')
+ }
+
+ // Cache in localStorage for when the page next loads.
+ localStorage[highlighterStyle.id + 'Value'] = highlighterStyle.value;
+ });
+
+ // Populate the code panes.
+ function populateViaXHR(elementId, requestPath) {
+ var xhr = new XMLHttpRequest();
+ xhr.onreadystatechange = function() {
+ if (xhr.readyState != 4)
+ return;
+ if (xhr.status != 200) {
+ alert('XHR error to ' + requestPath);
+ return;
+ }
+ document.getElementById(elementId).innerHTML = xhr.responseText;
+ };
+ xhr.open('GET', requestPath, true);
+ xhr.send();
+ }
+
+ var targetName = window.location.hash;
+ targetName = targetName.substring('#'.length);
+ targetName = targetName.split('.', 1)[0]
+
+ if (targetName !== '') {
+ var basePath = window.location.pathname;
+ var query = 'highlighter=' + highlighterName + '&' +
+ 'style=' + highlighterStyleName;
+ populateViaXHR('h_pane', basePath + '/' + targetName + '.h?' + query);
+ populateViaXHR('cc_pane', basePath + '/' + targetName + '.cc?' + query);
+ }
+}
+
+// Initial load: set the values of highlighter and highlighterStyles from
+// localStorage.
+(function() {
+var cachedValue = localStorage.highlightersValue;
+if (cachedValue)
+ document.getElementById('highlighters').value = cachedValue;
+
+forEachHighlighterStyle(function(highlighterStyle) {
+ var cachedValue = localStorage[highlighterStyle.id + 'Value'];
+ if (cachedValue)
+ highlighterStyle.value = cachedValue;
+});
+})();
+
+window.addEventListener('hashchange', updateEverything, false);
+updateEverything();
+</script>''')
+
+ def _ShowCompiledFile(self, parsed_url, head, body):
+ """Show the compiled version of a json or idl file given the path to the
+ compiled file.
+ """
+ api_model = model.Model()
+
+ request_path = self._GetRequestPath(parsed_url)
+ (file_root, file_ext) = os.path.splitext(request_path)
+ (filedir, filename) = os.path.split(file_root)
+
+ schema_loader = SchemaLoader("./",
+ filedir,
+ self.server.include_rules,
+ self.server.cpp_namespace_pattern)
+ try:
+ # Get main file.
+ namespace = schema_loader.ResolveNamespace(filename)
+ type_generator = cpp_type_generator.CppTypeGenerator(
+ api_model,
+ schema_loader,
+ namespace)
+
+ # Generate code
+ cpp_namespace = 'generated_api_schemas'
+ if file_ext == '.h':
+ cpp_code = (h_generator.HGenerator(type_generator)
+ .Generate(namespace).Render())
+ elif file_ext == '.cc':
+ cpp_code = (cc_generator.CCGenerator(type_generator)
+ .Generate(namespace).Render())
+ else:
+ self.send_error(404, "File not found: %s" % request_path)
+ return
+
+ # Do highlighting on the generated code
+ (highlighter_param, style_param) = self._GetHighlighterParams(parsed_url)
+ head.Append('<style>' +
+ self.server.highlighters[highlighter_param].GetCSS(style_param) +
+ '</style>')
+ body.Append(self.server.highlighters[highlighter_param]
+ .GetCodeElement(cpp_code, style_param))
+ except IOError:
+ self.send_error(404, "File not found: %s" % request_path)
+ return
+ except (TypeError, KeyError, AttributeError,
+ AssertionError, NotImplementedError) as error:
+ body.Append('<pre>')
+ body.Append('compiler error: %s' % error)
+ body.Append('Check server log for more details')
+ body.Append('</pre>')
+ raise
+
+ def _GetHighlighterParams(self, parsed_url):
+ """Get the highlighting parameters from a parsed url.
+ """
+ query_dict = urlparse.parse_qs(parsed_url.query)
+ return (query_dict.get('highlighter', ['pygments'])[0],
+ query_dict.get('style', ['colorful'])[0])
+
+ def _RenderNavPane(self, path):
+ """Renders an HTML nav pane.
+
+ This consists of a select element to set highlight style, and a list of all
+ files at |path| with the appropriate onclick handlers to open either
+ subdirectories or JSON files.
+ """
+ html = code.Code()
+
+ # Highlighter chooser.
+ html.Append('<select id="highlighters" onChange="updateEverything()">')
+ for name, highlighter in self.server.highlighters.items():
+ html.Append('<option value="%s">%s</option>' %
+ (name, highlighter.DisplayName()))
+ html.Append('</select>')
+
+ html.Append('<br/>')
+
+ # Style for each highlighter.
+ # The correct highlighting will be shown by Javascript.
+ for name, highlighter in self.server.highlighters.items():
+ styles = sorted(highlighter.GetStyles())
+ if not styles:
+ continue
+
+ html.Append('<select class="highlighter_styles" id="%s_styles" '
+ 'onChange="updateEverything()">' % name)
+ for style in styles:
+ html.Append('<option>%s</option>' % style)
+ html.Append('</select>')
+
+ html.Append('<br/>')
+
+ # The files, with appropriate handlers.
+ html.Append('<ul>')
+
+ # Make path point to a non-empty directory. This can happen if a URL like
+ # http://localhost:8000 is navigated to.
+ if path == '':
+ path = os.curdir
+
+ # Firstly, a .. link if this isn't the root.
+ if not os.path.samefile(os.curdir, path):
+ normpath = os.path.normpath(os.path.join(path, os.pardir))
+ html.Append('<li><a href="/%s">%s/</a>' % (normpath, os.pardir))
+
+ # Each file under path/
+ for filename in sorted(os.listdir(path)):
+ full_path = os.path.join(path, filename)
+ (file_root, file_ext) = os.path.splitext(full_path)
+ if os.path.isdir(full_path) and not full_path.endswith('.xcodeproj'):
+ html.Append('<li><a href="/%s/">%s/</a>' % (full_path, filename))
+ elif file_ext in ['.json', '.idl']:
+ # cc/h panes will automatically update via the hash change event.
+ html.Append('<li><a href="#%s">%s</a>' %
+ (filename, filename))
+
+ html.Append('</ul>')
+
+ return html.Render()
+
+
+class PreviewHTTPServer(HTTPServer, object):
+ def __init__(self,
+ server_address,
+ handler,
+ highlighters,
+ include_rules,
+ cpp_namespace_pattern):
+ super(PreviewHTTPServer, self).__init__(server_address, handler)
+ self.highlighters = highlighters
+ self.include_rules = include_rules
+ self.cpp_namespace_pattern = cpp_namespace_pattern
+
+
+if __name__ == '__main__':
+ parser = optparse.OptionParser(
+ description='Runs a server to preview the json_schema_compiler output.',
+ usage='usage: %prog [option]...')
+ parser.add_option('-p', '--port', default='8000',
+ help='port to run the server on')
+ parser.add_option('-n', '--namespace', default='generated_api_schemas',
+ help='C++ namespace for generated files. e.g extensions::api.')
+ parser.add_option('-I', '--include-rules',
+ help='A list of paths to include when searching for referenced objects,'
+ ' with the namespace separated by a \':\'. Example: '
+ '/foo/bar:Foo::Bar::%(namespace)s')
+
+ (opts, argv) = parser.parse_args()
+
+ def split_path_and_namespace(path_and_namespace):
+ if ':' not in path_and_namespace:
+ raise ValueError('Invalid include rule "%s". Rules must be of '
+ 'the form path:namespace' % path_and_namespace)
+ return path_and_namespace.split(':', 1)
+
+ include_rules = []
+ if opts.include_rules:
+ include_rules = map(split_path_and_namespace,
+ shlex.split(opts.include_rules))
+
+ try:
+ print('Starting previewserver on port %s' % opts.port)
+ print('The extension documentation can be found at:')
+ print('')
+ print(' http://localhost:%s/chrome/common/extensions/api' % opts.port)
+ print('')
+
+ highlighters = {
+ 'hilite': hilite_me_highlighter.HiliteMeHighlighter(),
+ 'none': none_highlighter.NoneHighlighter()
+ }
+ try:
+ highlighters['pygments'] = pygments_highlighter.PygmentsHighlighter()
+ except ImportError as e:
+ pass
+
+ server = PreviewHTTPServer(('', int(opts.port)),
+ CompilerHandler,
+ highlighters,
+ include_rules,
+ opts.namespace)
+ server.serve_forever()
+ except KeyboardInterrupt:
+ server.socket.close()
diff --git a/tools/json_schema_compiler/schema_loader.py b/tools/json_schema_compiler/schema_loader.py
new file mode 100644
index 0000000..b896991
--- /dev/null
+++ b/tools/json_schema_compiler/schema_loader.py
@@ -0,0 +1,92 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+import idl_schema
+import json_schema
+from cpp_namespace_environment import CppNamespaceEnvironment
+from model import Model, UnixName
+
+def GenerateFilenames(full_namespace):
+ # Try to find the file defining the namespace. Eg. for
+ # nameSpace.sub_name_space.Type' the following heuristics looks for:
+ # 1. name_space_sub_name_space.json,
+ # 2. name_space_sub_name_space.idl,
+ # 3. sub_name_space.json,
+ # 4. sub_name_space.idl,
+ # 5. etc.
+ sub_namespaces = full_namespace.split('.')
+ filenames = [ ]
+ basename = None
+ for namespace in reversed(sub_namespaces):
+ if basename is not None:
+ basename = UnixName(namespace + '.' + basename)
+ else:
+ basename = UnixName(namespace)
+ for ext in ['json', 'idl']:
+ filenames.append('%s.%s' % (basename, ext))
+ return filenames
+
+class SchemaLoader(object):
+ '''Resolves a type name into the namespace the type belongs to.
+
+ Properties:
+ - |root| path to the root directory.
+ - |path| path to the directory with the API header files, relative to the
+ root.
+ - |include_rules| List containing tuples with (path, cpp_namespace_pattern)
+ used when searching for types.
+ - |cpp_namespace_pattern| Default namespace pattern
+ '''
+ def __init__(self,
+ root,
+ path,
+ include_rules,
+ cpp_namespace_pattern):
+ self._root = root
+ self._include_rules = [(path, cpp_namespace_pattern)]
+ self._include_rules.extend(include_rules)
+
+ def ResolveNamespace(self, full_namespace):
+ filenames = GenerateFilenames(full_namespace)
+ for path, cpp_namespace in self._include_rules:
+ for filename in reversed(filenames):
+ filepath = os.path.join(path, filename);
+ if os.path.exists(os.path.join(self._root, filepath)):
+ return Model().AddNamespace(
+ self.LoadSchema(filepath)[0],
+ filepath,
+ environment=CppNamespaceEnvironment(cpp_namespace))
+ return None
+
+ def ResolveType(self, full_name, default_namespace):
+ name_parts = full_name.rsplit('.', 1)
+ if len(name_parts) == 1:
+ if full_name not in default_namespace.types:
+ return None
+ return default_namespace
+ full_namespace, type_name = full_name.rsplit('.', 1)
+ namespace = self.ResolveNamespace(full_namespace)
+ if namespace and type_name in namespace.types:
+ return namespace
+ return None
+
+ def LoadSchema(self, schema):
+ '''Load a schema definition. The schema parameter must be a file name
+ with the full path relative to the root.'''
+ schema_filename, schema_extension = os.path.splitext(schema)
+
+ schema_path = os.path.join(self._root, schema)
+ if schema_extension == '.json':
+ api_defs = json_schema.Load(schema_path)
+ elif schema_extension == '.idl':
+ api_defs = idl_schema.Load(schema_path)
+ else:
+ sys.exit('Did not recognize file extension %s for schema %s' %
+ (schema_extension, schema))
+
+ return api_defs
diff --git a/tools/json_schema_compiler/schema_util.py b/tools/json_schema_compiler/schema_util.py
new file mode 100644
index 0000000..b8fb404
--- /dev/null
+++ b/tools/json_schema_compiler/schema_util.py
@@ -0,0 +1,39 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Utilies for the processing of schema python structures.
+"""
+
+def CapitalizeFirstLetter(value):
+ return value[0].capitalize() + value[1:]
+
+
+def GetNamespace(ref):
+ return SplitNamespace(ref)[0]
+
+
+def StripNamespace(ref):
+ return SplitNamespace(ref)[1]
+
+
+def SplitNamespace(ref):
+ """Returns (namespace, entity) from |ref|, e.g. app.window.AppWindow ->
+ (app.window, AppWindow). If |ref| isn't qualified then returns (None, ref).
+ """
+ if '.' in ref:
+ return tuple(ref.rsplit('.', 1))
+ return (None, ref)
+
+
+def JsFunctionNameToClassName(namespace_name, function_name):
+ """Transform a fully qualified function name like foo.bar.baz into FooBarBaz
+
+ Also strips any leading 'Experimental' prefix."""
+ parts = []
+ full_name = namespace_name + "." + function_name
+ for part in full_name.split("."):
+ parts.append(CapitalizeFirstLetter(part))
+ if parts[0] == "Experimental":
+ del parts[0]
+ class_name = "".join(parts)
+ return class_name
diff --git a/tools/json_schema_compiler/schema_util_test.py b/tools/json_schema_compiler/schema_util_test.py
new file mode 100755
index 0000000..154da01
--- /dev/null
+++ b/tools/json_schema_compiler/schema_util_test.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from schema_util import JsFunctionNameToClassName
+from schema_util import StripNamespace
+import unittest
+
+class SchemaUtilTest(unittest.TestCase):
+ def testStripNamespace(self):
+ self.assertEquals('Bar', StripNamespace('foo.Bar'))
+ self.assertEquals('Baz', StripNamespace('Baz'))
+
+ def testJsFunctionNameToClassName(self):
+ self.assertEquals('FooBar', JsFunctionNameToClassName('foo', 'bar'))
+ self.assertEquals('FooBar',
+ JsFunctionNameToClassName('experimental.foo', 'bar'))
+ self.assertEquals('FooBarBaz',
+ JsFunctionNameToClassName('foo.bar', 'baz'))
+ self.assertEquals('FooBarBaz',
+ JsFunctionNameToClassName('experimental.foo.bar', 'baz'))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/json_schema_compiler/test/additional_properties.json b/tools/json_schema_compiler/test/additional_properties.json
new file mode 100644
index 0000000..a40c479
--- /dev/null
+++ b/tools/json_schema_compiler/test/additional_properties.json
@@ -0,0 +1,56 @@
+[
+ {
+ "namespace": "additionalProperties",
+ "description": "The additionalProperties API.",
+ "types": [
+ {
+ "id": "AdditionalPropertiesType",
+ "type": "object",
+ "properties": {
+ "string": {
+ "type": "string",
+ "description": "Some string."
+ }
+ },
+ "additionalProperties": { "type": "any" }
+ }
+ ],
+ "functions": [
+ {
+ "name": "additionalProperties",
+ "type": "function",
+ "description": "Takes an object with additionalProperties",
+ "parameters": [
+ {
+ "name": "paramObject",
+ "type": "object",
+ "properties": {},
+ "additionalProperties": {"type": "any"}
+ }
+ ]
+ },
+ {
+ "name": "returnAdditionalProperties",
+ "type": "function",
+ "description": "Returns an object with additionalProperties.",
+ "nodoc": "true",
+ "parameters": [
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "resultObject",
+ "type": "object",
+ "properties": {
+ "integer": {"type": "integer"}
+ },
+ "additionalProperties": {"type": "string"}
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/additional_properties_unittest.cc b/tools/json_schema_compiler/test/additional_properties_unittest.cc
new file mode 100644
index 0000000..dc980af
--- /dev/null
+++ b/tools/json_schema_compiler/test/additional_properties_unittest.cc
@@ -0,0 +1,64 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/json_schema_compiler/test/additional_properties.h"
+
+using namespace test::api::additional_properties;
+
+TEST(JsonSchemaCompilerAdditionalPropertiesTest,
+ AdditionalPropertiesTypePopulate) {
+ {
+ scoped_ptr<base::ListValue> list_value(new base::ListValue());
+ list_value->Append(new base::StringValue("asdf"));
+ list_value->Append(new base::FundamentalValue(4));
+ scoped_ptr<base::DictionaryValue> type_value(new base::DictionaryValue());
+ type_value->SetString("string", "value");
+ type_value->SetInteger("other", 9);
+ type_value->Set("another", list_value.release());
+ scoped_ptr<AdditionalPropertiesType> type(new AdditionalPropertiesType());
+ ASSERT_TRUE(AdditionalPropertiesType::Populate(*type_value, type.get()));
+ EXPECT_TRUE(type->additional_properties.Equals(type_value.get()));
+ }
+ {
+ scoped_ptr<base::DictionaryValue> type_value(new base::DictionaryValue());
+ type_value->SetInteger("string", 3);
+ scoped_ptr<AdditionalPropertiesType> type(new AdditionalPropertiesType());
+ EXPECT_FALSE(AdditionalPropertiesType::Populate(*type_value, type.get()));
+ }
+}
+
+TEST(JsonSchemaCompilerAdditionalPropertiesTest,
+ AdditionalPropertiesParamsCreate) {
+ scoped_ptr<base::DictionaryValue> param_object_value(
+ new base::DictionaryValue());
+ param_object_value->SetString("str", "a");
+ param_object_value->SetInteger("num", 1);
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(param_object_value->DeepCopy());
+ scoped_ptr<AdditionalProperties::Params> params(
+ AdditionalProperties::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_TRUE(params->param_object.additional_properties.Equals(
+ param_object_value.get()));
+}
+
+TEST(JsonSchemaCompilerAdditionalPropertiesTest,
+ ReturnAdditionalPropertiesResultCreate) {
+ ReturnAdditionalProperties::Results::ResultObject result_object;
+ result_object.integer = 5;
+ result_object.additional_properties["key"] = "value";
+
+ base::ListValue expected;
+ {
+ base::DictionaryValue* dict = new base::DictionaryValue();
+ dict->SetInteger("integer", 5);
+ dict->SetString("key", "value");
+ expected.Append(dict);
+ }
+
+ EXPECT_TRUE(base::Value::Equals(
+ ReturnAdditionalProperties::Results::Create(result_object).get(),
+ &expected));
+}
diff --git a/tools/json_schema_compiler/test/any.json b/tools/json_schema_compiler/test/any.json
new file mode 100644
index 0000000..d836a04
--- /dev/null
+++ b/tools/json_schema_compiler/test/any.json
@@ -0,0 +1,68 @@
+[
+ {
+ "namespace": "any",
+ "description": "The any API.",
+ "types": [
+ {
+ "id": "AnyType",
+ "type": "object",
+ "properties": {
+ "any": {
+ "type": "any",
+ "description": "Any way you want it, that's the way you need it."
+ }
+ }
+ }
+ ],
+ "functions": [
+ {
+ "name": "optionalAny",
+ "type": "function",
+ "description": "Takes an optional any param.",
+ "parameters": [
+ {
+ "type": "any",
+ "name": "anyName",
+ "optional": true
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "returnAny",
+ "type": "function",
+ "description": "Returns any.",
+ "nodoc": "true",
+ "parameters": [
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "result",
+ "type": "any"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "onAnyFired",
+ "type": "function",
+ "description": "Fired when anything is ready.",
+ "parameters": [
+ {
+ "name": "something",
+ "type": "any"
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/any_unittest.cc b/tools/json_schema_compiler/test/any_unittest.cc
new file mode 100644
index 0000000..d10a3eb
--- /dev/null
+++ b/tools/json_schema_compiler/test/any_unittest.cc
@@ -0,0 +1,59 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/json_schema_compiler/test/any.h"
+
+using namespace test::api::any;
+
+TEST(JsonSchemaCompilerAnyTest, AnyTypePopulate) {
+ {
+ AnyType any_type;
+ scoped_ptr<base::DictionaryValue> any_type_value(
+ new base::DictionaryValue());
+ any_type_value->SetString("any", "value");
+ EXPECT_TRUE(AnyType::Populate(*any_type_value, &any_type));
+ scoped_ptr<base::Value> any_type_to_value(any_type.ToValue());
+ EXPECT_TRUE(any_type_value->Equals(any_type_to_value.get()));
+ }
+ {
+ AnyType any_type;
+ scoped_ptr<base::DictionaryValue> any_type_value(
+ new base::DictionaryValue());
+ any_type_value->SetInteger("any", 5);
+ EXPECT_TRUE(AnyType::Populate(*any_type_value, &any_type));
+ scoped_ptr<base::Value> any_type_to_value(any_type.ToValue());
+ EXPECT_TRUE(any_type_value->Equals(any_type_to_value.get()));
+ }
+}
+
+TEST(JsonSchemaCompilerAnyTest, OptionalAnyParamsCreate) {
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<OptionalAny::Params> params(
+ OptionalAny::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_FALSE(params->any_name.get());
+ }
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::Value> param(new base::StringValue("asdf"));
+ params_value->Append(param->DeepCopy());
+ scoped_ptr<OptionalAny::Params> params(
+ OptionalAny::Params::Create(*params_value));
+ ASSERT_TRUE(params);
+ ASSERT_TRUE(params->any_name);
+ EXPECT_TRUE(params->any_name->Equals(param.get()));
+ }
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::Value> param(new base::FundamentalValue(true));
+ params_value->Append(param->DeepCopy());
+ scoped_ptr<OptionalAny::Params> params(
+ OptionalAny::Params::Create(*params_value));
+ ASSERT_TRUE(params);
+ ASSERT_TRUE(params->any_name);
+ EXPECT_TRUE(params->any_name->Equals(param.get()));
+ }
+}
diff --git a/tools/json_schema_compiler/test/arrays.json b/tools/json_schema_compiler/test/arrays.json
new file mode 100644
index 0000000..23314e9
--- /dev/null
+++ b/tools/json_schema_compiler/test/arrays.json
@@ -0,0 +1,274 @@
+[
+ {
+ "namespace": "arrays",
+ "description": "The arrays API.",
+ "types": [
+ {
+ "id": "EnumArrayType",
+ "type": "object",
+ "properties": {
+ "types": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": ["one", "two", "three"]
+ }
+ }
+ }
+ },
+ {
+ "id": "Enumeration",
+ "type": "string",
+ "enum": ["one", "two", "three"]
+ },
+ {
+ "id": "EnumArrayReference",
+ "type": "object",
+ "properties": {
+ "types": {
+ "type": "array",
+ "items": {
+ "$ref": "Enumeration"
+ }
+ }
+ }
+ },
+ {
+ "id": "EnumArrayMixed",
+ "type": "object",
+ "properties": {
+ "inline_enums": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": ["one", "two", "three"]
+ }
+ },
+ "infile_enums": {
+ "type": "array",
+ "items": {
+ "$ref": "Enumeration"
+ }
+ },
+ "external_enums": {
+ "type": "array",
+ "items": {
+ "$ref": "enums.Enumeration"
+ }
+ }
+ }
+ },
+ {
+ "id": "OptionalEnumArrayType",
+ "type": "object",
+ "properties": {
+ "types": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": ["one", "two", "three"]
+ },
+ "optional": true
+ }
+ }
+ },
+ {
+ "id": "BasicArrayType",
+ "type": "object",
+ "properties": {
+ "strings": {
+ "type": "array",
+ "items": {"type": "string"}
+ },
+ "booleans": {
+ "type": "array",
+ "items": {"type": "boolean"}
+ },
+ "numbers": {
+ "type": "array",
+ "items": {"type": "number"}
+ },
+ "integers": {
+ "type": "array",
+ "items": {"type": "integer"}
+ }
+ }
+ },
+ {
+ "id": "Item",
+ "type": "object",
+ "properties": {
+ "val": {
+ "type": "integer"
+ }
+ }
+ },
+ {
+ "id": "RefArrayType",
+ "type": "object",
+ "properties": {
+ "refs": {
+ "type": "array",
+ "items": { "$ref": "Item" }
+ }
+ }
+ }
+ ],
+ "functions": [
+ {
+ "name": "integerArray",
+ "type": "function",
+ "description": "Takes some integers.",
+ "parameters": [
+ {
+ "name": "nums",
+ "type": "array",
+ "items": {"type": "integer"}
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "anyArray",
+ "type": "function",
+ "description": "Takes some Items.",
+ "parameters": [
+ {
+ "name": "anys",
+ "type": "array",
+ "items": {"type": "any"}
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "objectArray",
+ "type": "function",
+ "description": "Takes some Items.",
+ "parameters": [
+ {
+ "name": "objects",
+ "type": "array",
+ "items": {
+ "type": "object",
+ "additionalProperties": {"type": "integer"}
+ }
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "refArray",
+ "type": "function",
+ "description": "Takes some Items.",
+ "parameters": [
+ {
+ "name": "refs",
+ "type": "array",
+ "items": {"$ref": "Item"}
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "justChoices",
+ "type": "function",
+ "description": "Takes some Choices.",
+ "parameters": [
+ {
+ "name": "choices",
+ "choices": [
+ { "type": "integer" },
+ { "type": "boolean" },
+ { "type": "array",
+ "items": {"$ref": "Item"}
+ }
+ ]
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "choicesArray",
+ "type": "function",
+ "description": "Takes some Choices.",
+ "parameters": [
+ {
+ "name": "choices",
+ "type": "array",
+ "items": {
+ "choices": [
+ { "type": "integer" },
+ { "type": "boolean" },
+ { "type": "array",
+ "items": {"$ref": "Item"}
+ }
+ ]
+ }
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "returnIntegerArray",
+ "type": "function",
+ "description": "Returns some integers.",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "integers",
+ "type": "array",
+ "items": {"type": "integer"}
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "returnRefArray",
+ "type": "function",
+ "description": "Returns some Items.",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "refs",
+ "type": "array",
+ "items": {"$ref": "Item"}
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/arrays_unittest.cc b/tools/json_schema_compiler/test/arrays_unittest.cc
new file mode 100644
index 0000000..79ebfb2
--- /dev/null
+++ b/tools/json_schema_compiler/test/arrays_unittest.cc
@@ -0,0 +1,317 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/test/arrays.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/json_schema_compiler/test/enums.h"
+
+using namespace test::api::arrays;
+
+namespace {
+
+// TODO(calamity): Change to AppendString etc once kalman's patch goes through
+static scoped_ptr<base::DictionaryValue> CreateBasicArrayTypeDictionary() {
+ base::DictionaryValue* value = new base::DictionaryValue();
+ base::ListValue* strings_value = new base::ListValue();
+ strings_value->Append(new base::StringValue("a"));
+ strings_value->Append(new base::StringValue("b"));
+ strings_value->Append(new base::StringValue("c"));
+ strings_value->Append(new base::StringValue("it's easy as"));
+ base::ListValue* integers_value = new base::ListValue();
+ integers_value->Append(new base::FundamentalValue(1));
+ integers_value->Append(new base::FundamentalValue(2));
+ integers_value->Append(new base::FundamentalValue(3));
+ base::ListValue* booleans_value = new base::ListValue();
+ booleans_value->Append(new base::FundamentalValue(false));
+ booleans_value->Append(new base::FundamentalValue(true));
+ base::ListValue* numbers_value = new base::ListValue();
+ numbers_value->Append(new base::FundamentalValue(6.1));
+ value->Set("numbers", numbers_value);
+ value->Set("booleans", booleans_value);
+ value->Set("strings", strings_value);
+ value->Set("integers", integers_value);
+ return scoped_ptr<base::DictionaryValue>(value);
+}
+
+static base::Value* CreateItemValue(int val) {
+ base::DictionaryValue* value(new base::DictionaryValue());
+ value->Set("val", new base::FundamentalValue(val));
+ return value;
+}
+
+} // namespace
+
+TEST(JsonSchemaCompilerArrayTest, BasicArrayType) {
+ {
+ scoped_ptr<base::DictionaryValue> value = CreateBasicArrayTypeDictionary();
+ scoped_ptr<BasicArrayType> basic_array_type(new BasicArrayType());
+ ASSERT_TRUE(BasicArrayType::Populate(*value, basic_array_type.get()));
+ EXPECT_TRUE(value->Equals(basic_array_type->ToValue().get()));
+ }
+}
+
+TEST(JsonSchemaCompilerArrayTest, EnumArrayType) {
+ // { "types": ["one", "two", "three"] }
+ base::ListValue* types = new base::ListValue();
+ types->AppendString("one");
+ types->AppendString("two");
+ types->AppendString("three");
+ base::DictionaryValue value;
+ value.Set("types", types);
+
+ EnumArrayType enum_array_type;
+
+ // Test Populate.
+ ASSERT_TRUE(EnumArrayType::Populate(value, &enum_array_type));
+ {
+ EnumArrayType::TypesType enums[] = {
+ EnumArrayType::TYPES_TYPE_ONE,
+ EnumArrayType::TYPES_TYPE_TWO,
+ EnumArrayType::TYPES_TYPE_THREE,
+ };
+ std::vector<EnumArrayType::TypesType> enums_vector(
+ enums, enums + arraysize(enums));
+ EXPECT_EQ(enums_vector, enum_array_type.types);
+ }
+
+ // Test ToValue.
+ scoped_ptr<base::Value> as_value(enum_array_type.ToValue());
+ EXPECT_TRUE(value.Equals(as_value.get())) << value << " != " << *as_value;
+}
+
+TEST(JsonSchemaCompilerArrayTest, EnumArrayReference) {
+ // { "types": ["one", "two", "three"] }
+ base::ListValue* types = new base::ListValue();
+ types->AppendString("one");
+ types->AppendString("two");
+ types->AppendString("three");
+ base::DictionaryValue value;
+ value.Set("types", types);
+
+ EnumArrayReference enum_array_reference;
+
+ // Test Populate.
+ ASSERT_TRUE(EnumArrayReference::Populate(value, &enum_array_reference));
+
+ Enumeration expected_types[] = {ENUMERATION_ONE, ENUMERATION_TWO,
+ ENUMERATION_THREE};
+ EXPECT_EQ(std::vector<Enumeration>(
+ expected_types, expected_types + arraysize(expected_types)),
+ enum_array_reference.types);
+
+ // Test ToValue.
+ scoped_ptr<base::Value> as_value(enum_array_reference.ToValue());
+ EXPECT_TRUE(value.Equals(as_value.get())) << value << " != " << *as_value;
+}
+
+TEST(JsonSchemaCompilerArrayTest, EnumArrayMixed) {
+ // { "types": ["one", "two", "three"] }
+ base::ListValue* inline_enums = new base::ListValue();
+ inline_enums->AppendString("one");
+ inline_enums->AppendString("two");
+ inline_enums->AppendString("three");
+
+ base::ListValue* infile_enums = new base::ListValue();
+ infile_enums->AppendString("one");
+ infile_enums->AppendString("two");
+ infile_enums->AppendString("three");
+
+ base::ListValue* external_enums = new base::ListValue();
+ external_enums->AppendString("one");
+ external_enums->AppendString("two");
+ external_enums->AppendString("three");
+
+ base::DictionaryValue value;
+ value.Set("inline_enums", inline_enums);
+ value.Set("infile_enums", infile_enums);
+ value.Set("external_enums", external_enums);
+
+ EnumArrayMixed enum_array_mixed;
+
+ // Test Populate.
+ ASSERT_TRUE(EnumArrayMixed::Populate(value, &enum_array_mixed));
+
+ EnumArrayMixed::Inline_enumsType expected_inline_types[] = {
+ EnumArrayMixed::INLINE_ENUMS_TYPE_ONE,
+ EnumArrayMixed::INLINE_ENUMS_TYPE_TWO,
+ EnumArrayMixed::INLINE_ENUMS_TYPE_THREE};
+ EXPECT_EQ(std::vector<EnumArrayMixed::Inline_enumsType>(
+ expected_inline_types,
+ expected_inline_types + arraysize(expected_inline_types)),
+ enum_array_mixed.inline_enums);
+
+ Enumeration expected_infile_types[] = {ENUMERATION_ONE, ENUMERATION_TWO,
+ ENUMERATION_THREE};
+ EXPECT_EQ(std::vector<Enumeration>(
+ expected_infile_types,
+ expected_infile_types + arraysize(expected_infile_types)),
+ enum_array_mixed.infile_enums);
+
+ test::api::enums::Enumeration expected_external_types[] = {
+ test::api::enums::ENUMERATION_ONE, test::api::enums::ENUMERATION_TWO,
+ test::api::enums::ENUMERATION_THREE};
+ EXPECT_EQ(std::vector<test::api::enums::Enumeration>(
+ expected_external_types,
+ expected_external_types + arraysize(expected_external_types)),
+ enum_array_mixed.external_enums);
+
+ // Test ToValue.
+ scoped_ptr<base::Value> as_value(enum_array_mixed.ToValue());
+ EXPECT_TRUE(value.Equals(as_value.get())) << value << " != " << *as_value;
+}
+
+TEST(JsonSchemaCompilerArrayTest, OptionalEnumArrayType) {
+ {
+ std::vector<OptionalEnumArrayType::TypesType> enums;
+ enums.push_back(OptionalEnumArrayType::TYPES_TYPE_ONE);
+ enums.push_back(OptionalEnumArrayType::TYPES_TYPE_TWO);
+ enums.push_back(OptionalEnumArrayType::TYPES_TYPE_THREE);
+
+ scoped_ptr<base::ListValue> types(new base::ListValue());
+ for (size_t i = 0; i < enums.size(); ++i) {
+ types->Append(new base::StringValue(
+ OptionalEnumArrayType::ToString(enums[i])));
+ }
+
+ base::DictionaryValue value;
+ value.Set("types", types.release());
+
+ OptionalEnumArrayType enum_array_type;
+ ASSERT_TRUE(OptionalEnumArrayType::Populate(value, &enum_array_type));
+ EXPECT_EQ(enums, *enum_array_type.types);
+ }
+ {
+ base::DictionaryValue value;
+ scoped_ptr<base::ListValue> enum_array(new base::ListValue());
+ enum_array->Append(new base::StringValue("invalid"));
+
+ value.Set("types", enum_array.release());
+ OptionalEnumArrayType enum_array_type;
+ ASSERT_FALSE(OptionalEnumArrayType::Populate(value, &enum_array_type));
+ EXPECT_TRUE(enum_array_type.types->empty());
+ }
+}
+
+TEST(JsonSchemaCompilerArrayTest, RefArrayType) {
+ {
+ scoped_ptr<base::DictionaryValue> value(new base::DictionaryValue());
+ scoped_ptr<base::ListValue> ref_array(new base::ListValue());
+ ref_array->Append(CreateItemValue(1));
+ ref_array->Append(CreateItemValue(2));
+ ref_array->Append(CreateItemValue(3));
+ value->Set("refs", ref_array.release());
+ scoped_ptr<RefArrayType> ref_array_type(new RefArrayType());
+ EXPECT_TRUE(RefArrayType::Populate(*value, ref_array_type.get()));
+ ASSERT_EQ(3u, ref_array_type->refs.size());
+ EXPECT_EQ(1, ref_array_type->refs[0]->val);
+ EXPECT_EQ(2, ref_array_type->refs[1]->val);
+ EXPECT_EQ(3, ref_array_type->refs[2]->val);
+ }
+ {
+ scoped_ptr<base::DictionaryValue> value(new base::DictionaryValue());
+ scoped_ptr<base::ListValue> not_ref_array(new base::ListValue());
+ not_ref_array->Append(CreateItemValue(1));
+ not_ref_array->Append(new base::FundamentalValue(3));
+ value->Set("refs", not_ref_array.release());
+ scoped_ptr<RefArrayType> ref_array_type(new RefArrayType());
+ EXPECT_FALSE(RefArrayType::Populate(*value, ref_array_type.get()));
+ }
+}
+
+TEST(JsonSchemaCompilerArrayTest, IntegerArrayParamsCreate) {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::ListValue> integer_array(new base::ListValue());
+ integer_array->Append(new base::FundamentalValue(2));
+ integer_array->Append(new base::FundamentalValue(4));
+ integer_array->Append(new base::FundamentalValue(8));
+ params_value->Append(integer_array.release());
+ scoped_ptr<IntegerArray::Params> params(
+ IntegerArray::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ ASSERT_EQ(3u, params->nums.size());
+ EXPECT_EQ(2, params->nums[0]);
+ EXPECT_EQ(4, params->nums[1]);
+ EXPECT_EQ(8, params->nums[2]);
+}
+
+TEST(JsonSchemaCompilerArrayTest, AnyArrayParamsCreate) {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::ListValue> any_array(new base::ListValue());
+ any_array->Append(new base::FundamentalValue(1));
+ any_array->Append(new base::StringValue("test"));
+ any_array->Append(CreateItemValue(2));
+ params_value->Append(any_array.release());
+ scoped_ptr<AnyArray::Params> params(
+ AnyArray::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ ASSERT_EQ(3u, params->anys.size());
+ int int_temp = 0;
+ EXPECT_TRUE(params->anys[0]->GetAsInteger(&int_temp));
+ EXPECT_EQ(1, int_temp);
+}
+
+TEST(JsonSchemaCompilerArrayTest, ObjectArrayParamsCreate) {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::ListValue> item_array(new base::ListValue());
+ item_array->Append(CreateItemValue(1));
+ item_array->Append(CreateItemValue(2));
+ params_value->Append(item_array.release());
+ scoped_ptr<ObjectArray::Params> params(
+ ObjectArray::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ ASSERT_EQ(2u, params->objects.size());
+ EXPECT_EQ(1, params->objects[0]->additional_properties["val"]);
+ EXPECT_EQ(2, params->objects[1]->additional_properties["val"]);
+}
+
+TEST(JsonSchemaCompilerArrayTest, RefArrayParamsCreate) {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::ListValue> item_array(new base::ListValue());
+ item_array->Append(CreateItemValue(1));
+ item_array->Append(CreateItemValue(2));
+ params_value->Append(item_array.release());
+ scoped_ptr<RefArray::Params> params(
+ RefArray::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ ASSERT_EQ(2u, params->refs.size());
+ EXPECT_EQ(1, params->refs[0]->val);
+ EXPECT_EQ(2, params->refs[1]->val);
+}
+
+TEST(JsonSchemaCompilerArrayTest, ReturnIntegerArrayResultCreate) {
+ std::vector<int> integers;
+ integers.push_back(1);
+ integers.push_back(2);
+ scoped_ptr<base::ListValue> results =
+ ReturnIntegerArray::Results::Create(integers);
+
+ base::ListValue expected;
+ base::ListValue* expected_argument = new base::ListValue();
+ expected_argument->Append(new base::FundamentalValue(1));
+ expected_argument->Append(new base::FundamentalValue(2));
+ expected.Append(expected_argument);
+ EXPECT_TRUE(results->Equals(&expected));
+}
+
+TEST(JsonSchemaCompilerArrayTest, ReturnRefArrayResultCreate) {
+ std::vector<linked_ptr<Item> > items;
+ items.push_back(linked_ptr<Item>(new Item()));
+ items.push_back(linked_ptr<Item>(new Item()));
+ items[0]->val = 1;
+ items[1]->val = 2;
+ scoped_ptr<base::ListValue> results =
+ ReturnRefArray::Results::Create(items);
+
+ base::ListValue expected;
+ base::ListValue* expected_argument = new base::ListValue();
+ base::DictionaryValue* first = new base::DictionaryValue();
+ first->SetInteger("val", 1);
+ expected_argument->Append(first);
+ base::DictionaryValue* second = new base::DictionaryValue();
+ second->SetInteger("val", 2);
+ expected_argument->Append(second);
+ expected.Append(expected_argument);
+ EXPECT_TRUE(results->Equals(&expected));
+}
diff --git a/tools/json_schema_compiler/test/browser_action.json b/tools/json_schema_compiler/test/browser_action.json
new file mode 100644
index 0000000..934d56d
--- /dev/null
+++ b/tools/json_schema_compiler/test/browser_action.json
@@ -0,0 +1,273 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+[
+ {
+ "namespace": "browserAction",
+ "description": "The browserAction API.",
+ "dependencies": [ "tabs" ],
+ "types": [
+ {
+ "id": "ColorArray",
+ "type": "array",
+ "items": {
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 255
+ },
+ "minItems": 4,
+ "maxItems": 4
+ }
+ ],
+ "functions": [
+ {
+ "name": "setTitle",
+ "type": "function",
+ "description": "Sets the title of the browser action. This shows up in the tooltip.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "title": {
+ "type": "string",
+ "description": "The string the browser action should display when moused over."
+ },
+ "tabId": {
+ "type": "integer",
+ "optional": true,
+ "description": "Limits the change to when a particular tab is selected. Automatically resets when the tab is closed."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "getTitle",
+ "type": "function",
+ "description": "Gets the title of the browser action.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "tabId": {
+ "type": "integer",
+ "optional": true,
+ "description": "Specify the tab to get the title from. If no tab is specified, the non-tab-specific title is returned."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "result",
+ "type": "string"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "setIcon",
+ "type": "function",
+ "description": "Sets the icon for the browser action. The icon can be specified either as the path to an image file or as the pixel data from a canvas element. Either the <b>path</b> or the <b>imageData</b> property must be specified.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "imageData": {
+ "type": "object",
+ "isInstanceOf": "ImageData",
+ "additionalProperties": { "type": "any" },
+ "description": "Pixel data for an image. Must be an ImageData object (for example, from a <code>canvas</code> element).",
+ "optional": true
+ },
+ "path": {
+ "type": "string",
+ "description": "Relative path to an image in the extension to show in the browser action.",
+ "optional": true
+ },
+ "tabId": {
+ "type": "integer",
+ "optional": true,
+ "description": "Limits the change to when a particular tab is selected. Automatically resets when the tab is closed."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "setPopup",
+ "type": "function",
+ "description": "Sets the html document to be opened as a popup when the user clicks on the browser action's icon.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "tabId": {
+ "type": "integer",
+ "optional": true,
+ "minimum": 0,
+ "description": "Limits the change to when a particular tab is selected. Automatically resets when the tab is closed."
+ },
+ "popup": {
+ "type": "string",
+ "description": "The html file to show in a popup. If set to the empty string (''), no popup is shown."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "getPopup",
+ "type": "function",
+ "description": "Gets the html document set as the popup for this browser action.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "tabId": {
+ "type": "integer",
+ "optional": true,
+ "description": "Specify the tab to get the popup from. If no tab is specified, the non-tab-specific popup is returned."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "result",
+ "type": "string"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "setBadgeText",
+ "type": "function",
+ "description": "Sets the badge text for the browser action. The badge is displayed on top of the icon.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "text": {
+ "type": "string",
+ "description": "Any number of characters can be passed, but only about four can fit in the space."
+ },
+ "tabId": {
+ "type": "integer",
+ "optional": true,
+ "description": "Limits the change to when a particular tab is selected. Automatically resets when the tab is closed."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "getBadgeText",
+ "type": "function",
+ "description": "Gets the badge text of the browser action. If no tab is specified, the non-tab-specific badge text is returned.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "tabId": {
+ "type": "integer",
+ "optional": true,
+ "description": "Specify the tab to get the badge text from. If no tab is specified, the non-tab-specific badge text is returned."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "result",
+ "type": "string"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "setBadgeBackgroundColor",
+ "type": "function",
+ "description": "Sets the background color for the badge.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "color": {
+ "description": "An array of four integers in the range [0,255] that make up the RGBA color of the badge. For example, opaque red is <code>[255, 0, 0, 255]</code>. Can also be a string with a CSS value, with opaque red being <code>#FF0000</code> or <code>#F00</code>.",
+ "choices": [
+ {"type": "string"},
+ {"$ref": "ColorArray"}
+ ]
+ },
+ "tabId": {
+ "type": "integer",
+ "optional": true,
+ "description": "Limits the change to when a particular tab is selected. Automatically resets when the tab is closed."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "getBadgeBackgroundColor",
+ "type": "function",
+ "description": "Gets the background color of the browser action.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "tabId": {
+ "type": "integer",
+ "optional": true,
+ "description": "Specify the tab to get the badge background color from. If no tab is specified, the non-tab-specific badge background color is returned."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "result",
+ "$ref": "ColorArray"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "onClicked",
+ "type": "function",
+ "description": "Fired when a browser action icon is clicked. This event will not fire if the browser action has a popup.",
+ "parameters": [
+ {
+ "name": "tab",
+ "$ref": "Tab"
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/callbacks.json b/tools/json_schema_compiler/test/callbacks.json
new file mode 100644
index 0000000..2f86c26
--- /dev/null
+++ b/tools/json_schema_compiler/test/callbacks.json
@@ -0,0 +1,72 @@
+[
+ {
+ "namespace": "callbacks",
+ "description": "The callbacks API.",
+ "types": [],
+ "functions": [
+ {
+ "name": "returnsNothing",
+ "type": "function",
+ "description": "Takes nothing. Returns nothing.",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "returnsObject",
+ "description": "Returns an object.",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "someObject",
+ "type": "object",
+ "properties": {
+ "state": {
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "returnsMultiple",
+ "description": "Returns an object.",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "someInteger",
+ "type": "integer"
+ },
+ {
+ "name": "someObject",
+ "type": "object",
+ "properties": {
+ "state": {
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+]
+
diff --git a/tools/json_schema_compiler/test/callbacks_unittest.cc b/tools/json_schema_compiler/test/callbacks_unittest.cc
new file mode 100644
index 0000000..001e977
--- /dev/null
+++ b/tools/json_schema_compiler/test/callbacks_unittest.cc
@@ -0,0 +1,36 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/test/callbacks.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+using namespace test::api::callbacks;
+
+TEST(JsonSchemaCompilerCallbacksTest, ReturnsObjectResultCreate) {
+ ReturnsObject::Results::SomeObject some_object;
+ some_object.state = ReturnsObject::Results::SomeObject::STATE_FOO;
+ scoped_ptr<base::ListValue> results =
+ ReturnsObject::Results::Create(some_object);
+
+ base::DictionaryValue* expected_dict = new base::DictionaryValue();
+ expected_dict->SetString("state", "foo");
+ base::ListValue expected;
+ expected.Append(expected_dict);
+ EXPECT_TRUE(results->Equals(&expected));
+}
+
+TEST(JsonSchemaCompilerCallbacksTest, ReturnsMultipleResultCreate) {
+ ReturnsMultiple::Results::SomeObject some_object;
+ some_object.state = ReturnsMultiple::Results::SomeObject::STATE_FOO;
+ scoped_ptr<base::ListValue> results =
+ ReturnsMultiple::Results::Create(5, some_object);
+
+ base::DictionaryValue* expected_dict = new base::DictionaryValue();
+ expected_dict->SetString("state", "foo");
+ base::ListValue expected;
+ expected.Append(new base::FundamentalValue(5));
+ expected.Append(expected_dict);
+ EXPECT_TRUE(results->Equals(&expected));
+}
diff --git a/tools/json_schema_compiler/test/choices.json b/tools/json_schema_compiler/test/choices.json
new file mode 100644
index 0000000..e7e39e0
--- /dev/null
+++ b/tools/json_schema_compiler/test/choices.json
@@ -0,0 +1,167 @@
+[
+ {
+ "namespace": "choices",
+ "description": "The choices API.",
+ "types": [
+ {
+ "id": "ChoiceType",
+ "type": "object",
+ "properties": {
+ "integers": {
+ "choices": [
+ {"type": "array", "items": {"type": "integer", "minimum": 0}},
+ {"type": "integer"}
+ ]
+ },
+ "strings": {
+ "choices": [
+ {"type": "array", "items": {"type": "string", "minimum": 0}},
+ {"type": "string"}
+ ],
+ "optional": true
+ }
+ }
+ },
+ {
+ "id": "NestedChoice",
+ "description": "Tests when some of the choices are choices themselves",
+ "choices": [
+ {"type": "integer"},
+ {"choices": [
+ {"type": "string"},
+ {"type": "boolean"}
+ ]},
+ {"choices": [
+ {"type": "double"},
+ {"$ref": "ChoiceType"},
+ {"type": "array", "items": {"$ref": "ChoiceType"}}
+ ]}
+ ]
+ }
+ ],
+ "functions": [
+ {
+ "name": "takesIntegers",
+ "type": "function",
+ "description": "Takes one or more integers.",
+ "parameters": [
+ {
+ "name": "nums",
+ "choices": [
+ {"type": "array", "items": {"type": "integer", "minimum": 0}},
+ {"type": "integer"}
+ ]
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "takesIntegersOptional",
+ "type": "function",
+ "description": "Takes one or more integers.",
+ "parameters": [
+ {
+ "name": "nums",
+ "choices": [
+ {"type": "array", "items": {"type": "integer", "minimum": 0}},
+ {"type": "integer"}
+ ],
+ "optional": true
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "objectWithChoices",
+ "type": "function",
+ "description": "Takes an object with one or more strings and optional integer(s).",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "stringInfo",
+ "properties": {
+ "strings": {
+ "description": "One or more tab indices to highlight.",
+ "choices": [
+ {"type": "array", "items": {"type": "string", "minimum": 0}},
+ {"type": "string"}
+ ]
+ },
+ "integers": {
+ "description": "One or more tab indices to highlight.",
+ "choices": [
+ {"type": "array", "items": {"type": "integer", "minimum": 0}},
+ {"type": "integer"}
+ ],
+ "optional": true
+ }
+ }
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "returnChoices",
+ "type": "function",
+ "description": "Gives back a string. Or not.",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "result",
+ "choices": [
+ {"type": "array", "items": {"type": "integer", "minimum": 0}},
+ {"type": "integer"}
+ ],
+ "description": "Some integers."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "returnMultipleChoices",
+ "type": "function",
+ "description": "Gives back two values where each is an integer or a list of integers.",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "firstResult",
+ "choices": [
+ {"type": "array", "items": {"type": "integer", "minimum": 0}},
+ {"type": "integer"}
+ ],
+ "description": "Some integers."
+ },
+ {
+ "name": "secondResult",
+ "choices": [
+ {"type": "array", "items": {"type": "integer", "minimum": 0}},
+ {"type": "integer"}
+ ],
+ "description": "Some integers."
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/choices_unittest.cc b/tools/json_schema_compiler/test/choices_unittest.cc
new file mode 100644
index 0000000..2cf1d8d
--- /dev/null
+++ b/tools/json_schema_compiler/test/choices_unittest.cc
@@ -0,0 +1,292 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/test/choices.h"
+
+#include "base/strings/string_piece.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/json_schema_compiler/test/test_util.h"
+
+namespace {
+
+using namespace test::api::choices;
+using json_schema_compiler::test_util::Dictionary;
+using json_schema_compiler::test_util::List;
+using json_schema_compiler::test_util::ReadJson;
+using json_schema_compiler::test_util::Vector;
+
+TEST(JsonSchemaCompilerChoicesTest, TakesIntegersParamsCreate) {
+ {
+ scoped_ptr<TakesIntegers::Params> params(
+ TakesIntegers::Params::Create(*List(new base::FundamentalValue(true))));
+ EXPECT_FALSE(params);
+ }
+ {
+ scoped_ptr<TakesIntegers::Params> params(
+ TakesIntegers::Params::Create(*List(new base::FundamentalValue(6))));
+ ASSERT_TRUE(params);
+ EXPECT_FALSE(params->nums.as_integers);
+ EXPECT_EQ(6, *params->nums.as_integer);
+ }
+ {
+ scoped_ptr<TakesIntegers::Params> params(TakesIntegers::Params::Create(
+ *List(List(new base::FundamentalValue(2),
+ new base::FundamentalValue(6),
+ new base::FundamentalValue(8)).release())));
+ ASSERT_TRUE(params);
+ ASSERT_TRUE(params->nums.as_integers);
+ EXPECT_EQ(Vector(2, 6, 8), *params->nums.as_integers);
+ }
+}
+
+TEST(JsonSchemaCompilerChoicesTest, ObjectWithChoicesParamsCreate) {
+ {
+ scoped_ptr<ObjectWithChoices::Params> params(
+ ObjectWithChoices::Params::Create(*List(
+ Dictionary("strings", new base::StringValue("asdf")).release())));
+ ASSERT_TRUE(params);
+ EXPECT_FALSE(params->string_info.strings.as_strings);
+ EXPECT_EQ("asdf", *params->string_info.strings.as_string);
+ EXPECT_FALSE(params->string_info.integers);
+ }
+ {
+ scoped_ptr<ObjectWithChoices::Params> params(
+ ObjectWithChoices::Params::Create(*List(
+ Dictionary("strings", new base::StringValue("asdf"),
+ "integers", new base::FundamentalValue(6)).release())));
+ ASSERT_TRUE(params);
+ EXPECT_FALSE(params->string_info.strings.as_strings);
+ EXPECT_EQ("asdf", *params->string_info.strings.as_string);
+ ASSERT_TRUE(params->string_info.integers);
+ EXPECT_FALSE(params->string_info.integers->as_integers);
+ EXPECT_EQ(6, *params->string_info.integers->as_integer);
+ }
+}
+
+// TODO(kalman): Clean up the rest of these tests to use the
+// Vector/List/Dictionary helpers.
+
+TEST(JsonSchemaCompilerChoicesTest, ObjectWithChoicesParamsCreateFail) {
+ {
+ scoped_ptr<base::DictionaryValue> object_param(new base::DictionaryValue());
+ object_param->SetWithoutPathExpansion("strings",
+ new base::FundamentalValue(5));
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(object_param.release());
+ scoped_ptr<ObjectWithChoices::Params> params(
+ ObjectWithChoices::Params::Create(*params_value));
+ EXPECT_FALSE(params.get());
+ }
+ {
+ scoped_ptr<base::DictionaryValue> object_param(new base::DictionaryValue());
+ object_param->SetWithoutPathExpansion("strings",
+ new base::StringValue("asdf"));
+ object_param->SetWithoutPathExpansion("integers",
+ new base::StringValue("asdf"));
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(object_param.release());
+ scoped_ptr<ObjectWithChoices::Params> params(
+ ObjectWithChoices::Params::Create(*params_value));
+ EXPECT_FALSE(params.get());
+ }
+ {
+ scoped_ptr<base::DictionaryValue> object_param(new base::DictionaryValue());
+ object_param->SetWithoutPathExpansion("integers",
+ new base::FundamentalValue(6));
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(object_param.release());
+ scoped_ptr<ObjectWithChoices::Params> params(
+ ObjectWithChoices::Params::Create(*params_value));
+ EXPECT_FALSE(params.get());
+ }
+}
+
+TEST(JsonSchemaCompilerChoicesTest, PopulateChoiceType) {
+ std::vector<std::string> strings = Vector(std::string("list"),
+ std::string("of"),
+ std::string("strings"));
+
+ base::ListValue* strings_value = new base::ListValue();
+ for (size_t i = 0; i < strings.size(); ++i)
+ strings_value->Append(new base::StringValue(strings[i]));
+
+ base::DictionaryValue value;
+ value.SetInteger("integers", 4);
+ value.Set("strings", strings_value);
+
+ ChoiceType out;
+ ASSERT_TRUE(ChoiceType::Populate(value, &out));
+ ASSERT_TRUE(out.integers.as_integer.get());
+ EXPECT_FALSE(out.integers.as_integers.get());
+ EXPECT_EQ(4, *out.integers.as_integer);
+
+ EXPECT_FALSE(out.strings->as_string.get());
+ ASSERT_TRUE(out.strings->as_strings.get());
+ EXPECT_EQ(strings, *out.strings->as_strings);
+}
+
+TEST(JsonSchemaCompilerChoicesTest, ChoiceTypeToValue) {
+ base::ListValue* strings_value = new base::ListValue();
+ strings_value->Append(new base::StringValue("list"));
+ strings_value->Append(new base::StringValue("of"));
+ strings_value->Append(new base::StringValue("strings"));
+
+ base::DictionaryValue value;
+ value.SetInteger("integers", 5);
+ value.Set("strings", strings_value);
+
+ ChoiceType out;
+ ASSERT_TRUE(ChoiceType::Populate(value, &out));
+
+ EXPECT_TRUE(value.Equals(out.ToValue().get()));
+}
+
+TEST(JsonSchemaCompilerChoicesTest, ReturnChoices) {
+ {
+ ReturnChoices::Results::Result results;
+ results.as_integers.reset(new std::vector<int>(Vector(1, 2)));
+
+ scoped_ptr<base::Value> results_value = results.ToValue();
+ ASSERT_TRUE(results_value);
+
+ base::ListValue expected;
+ expected.AppendInteger(1);
+ expected.AppendInteger(2);
+
+ EXPECT_TRUE(expected.Equals(results_value.get()));
+ }
+ {
+ ReturnChoices::Results::Result results;
+ results.as_integer.reset(new int(5));
+
+ scoped_ptr<base::Value> results_value = results.ToValue();
+ ASSERT_TRUE(results_value);
+
+ base::FundamentalValue expected(5);
+
+ EXPECT_TRUE(expected.Equals(results_value.get()));
+ }
+}
+
+TEST(JsonSchemaCompilerChoicesTest, NestedChoices) {
+ // These test both ToValue and FromValue for every legitimate configuration of
+ // NestedChoices.
+ {
+ // The plain integer choice.
+ scoped_ptr<base::Value> value = ReadJson("42");
+ scoped_ptr<NestedChoice> obj = NestedChoice::FromValue(*value);
+
+ ASSERT_TRUE(obj);
+ ASSERT_TRUE(obj->as_integer);
+ EXPECT_FALSE(obj->as_choice1);
+ EXPECT_FALSE(obj->as_choice2);
+ EXPECT_EQ(42, *obj->as_integer);
+
+ EXPECT_TRUE(base::Value::Equals(value.get(), obj->ToValue().get()));
+ }
+
+ {
+ // The string choice within the first choice.
+ scoped_ptr<base::Value> value = ReadJson("\"foo\"");
+ scoped_ptr<NestedChoice> obj = NestedChoice::FromValue(*value);
+
+ ASSERT_TRUE(obj);
+ EXPECT_FALSE(obj->as_integer);
+ ASSERT_TRUE(obj->as_choice1);
+ EXPECT_FALSE(obj->as_choice2);
+ ASSERT_TRUE(obj->as_choice1->as_string);
+ EXPECT_FALSE(obj->as_choice1->as_boolean);
+ EXPECT_EQ("foo", *obj->as_choice1->as_string);
+
+ EXPECT_TRUE(base::Value::Equals(value.get(), obj->ToValue().get()));
+ }
+
+ {
+ // The boolean choice within the first choice.
+ scoped_ptr<base::Value> value = ReadJson("true");
+ scoped_ptr<NestedChoice> obj = NestedChoice::FromValue(*value);
+
+ ASSERT_TRUE(obj);
+ EXPECT_FALSE(obj->as_integer);
+ ASSERT_TRUE(obj->as_choice1);
+ EXPECT_FALSE(obj->as_choice2);
+ EXPECT_FALSE(obj->as_choice1->as_string);
+ ASSERT_TRUE(obj->as_choice1->as_boolean);
+ EXPECT_TRUE(*obj->as_choice1->as_boolean);
+
+ EXPECT_TRUE(base::Value::Equals(value.get(), obj->ToValue().get()));
+ }
+
+ {
+ // The double choice within the second choice.
+ scoped_ptr<base::Value> value = ReadJson("42.0");
+ scoped_ptr<NestedChoice> obj = NestedChoice::FromValue(*value);
+
+ ASSERT_TRUE(obj);
+ EXPECT_FALSE(obj->as_integer);
+ EXPECT_FALSE(obj->as_choice1);
+ ASSERT_TRUE(obj->as_choice2);
+ ASSERT_TRUE(obj->as_choice2->as_double);
+ EXPECT_FALSE(obj->as_choice2->as_choice_type);
+ EXPECT_FALSE(obj->as_choice2->as_choice_types);
+ EXPECT_EQ(42.0, *obj->as_choice2->as_double);
+
+ EXPECT_TRUE(base::Value::Equals(value.get(), obj->ToValue().get()));
+ }
+
+ {
+ // The ChoiceType choice within the second choice.
+ scoped_ptr<base::Value> value = ReadJson(
+ "{\"integers\": [1, 2], \"strings\": \"foo\"}");
+ scoped_ptr<NestedChoice> obj = NestedChoice::FromValue(*value);
+
+ ASSERT_TRUE(obj);
+ EXPECT_FALSE(obj->as_integer);
+ EXPECT_FALSE(obj->as_choice1);
+ ASSERT_TRUE(obj->as_choice2);
+ EXPECT_FALSE(obj->as_choice2->as_double);
+ ASSERT_TRUE(obj->as_choice2->as_choice_type);
+ EXPECT_FALSE(obj->as_choice2->as_choice_types);
+ {
+ ChoiceType* choice_type = obj->as_choice2->as_choice_type.get();
+ ASSERT_TRUE(choice_type->integers.as_integers);
+ EXPECT_FALSE(choice_type->integers.as_integer);
+ EXPECT_EQ(Vector(1, 2), *choice_type->integers.as_integers);
+ ASSERT_TRUE(choice_type->strings);
+ EXPECT_FALSE(choice_type->strings->as_strings);
+ ASSERT_TRUE(choice_type->strings->as_string);
+ EXPECT_EQ("foo", *choice_type->strings->as_string);
+ }
+
+ EXPECT_TRUE(base::Value::Equals(value.get(), obj->ToValue().get()));
+ }
+
+ {
+ // The array of ChoiceTypes within the second choice.
+ scoped_ptr<base::Value> value = ReadJson(
+ "["
+ " {\"integers\": [1, 2], \"strings\": \"foo\"},"
+ " {\"integers\": 3, \"strings\": [\"bar\", \"baz\"]}"
+ "]");
+ scoped_ptr<NestedChoice> obj = NestedChoice::FromValue(*value);
+
+ ASSERT_TRUE(obj);
+ EXPECT_FALSE(obj->as_integer);
+ EXPECT_FALSE(obj->as_choice1);
+ ASSERT_TRUE(obj->as_choice2);
+ EXPECT_FALSE(obj->as_choice2->as_double);
+ EXPECT_FALSE(obj->as_choice2->as_choice_type);
+ ASSERT_TRUE(obj->as_choice2->as_choice_types);
+ {
+ std::vector<linked_ptr<ChoiceType> >* choice_types =
+ obj->as_choice2->as_choice_types.get();
+ // Bleh too much effort to test everything.
+ ASSERT_EQ(2u, choice_types->size());
+ }
+
+ EXPECT_TRUE(base::Value::Equals(value.get(), obj->ToValue().get()));
+ }
+}
+
+} // namespace
diff --git a/tools/json_schema_compiler/test/content_settings.json b/tools/json_schema_compiler/test/content_settings.json
new file mode 100644
index 0000000..6fac436
--- /dev/null
+++ b/tools/json_schema_compiler/test/content_settings.json
@@ -0,0 +1,222 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+[
+ {
+ "namespace": "contentSettings",
+ "description": "The contentSettings API.",
+ "types": [
+ {
+ "id": "ResourceIdentifier",
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "The resource identifier for the given content type."
+ },
+ "description": {
+ "type": "string",
+ "optional": true,
+ "description": "A human readable description of the resource."
+ }
+ },
+ "description": "The only content type using resource identifiers is <a href=\"contentSettings.html#property-plugins\"><var>plugins</var></a>. For more information, see <a href=\"contentSettings.html#resource-identifiers\">Resource Identifiers</a>."
+ },
+ {
+ "id": "ContentSetting",
+ "type": "object",
+ "functions": [
+ {
+ "name": "clear",
+ "type": "function",
+ "description": "Clear all content setting rules set by this extension.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "scope": {
+ "type": "string",
+ "enum": ["regular", "incognito_session_only"],
+ "optional": true,
+ "description": "Where to set the setting (default: regular). One of<br><var>regular</var>: setting for regular profile (which is inherited by the incognito profile if not overridden elsewhere),<br><var>incognito_session_only</var>: setting for incognito profile that can only be set during an incognito session and is deleted when the incognito session ends (overrides regular settings)."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "get",
+ "type": "function",
+ "description": "Gets the current content setting for a given pair of URLs.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "primaryUrl": {
+ "type": "string",
+ "description": "The primary URL for which the content setting should be retrieved. Note that the meaning of a primary URL depends on the content type."
+ },
+ "secondaryUrl": {
+ "type": "string",
+ "description": "The secondary URL for which the content setting should be retrieved. Defaults to the primary URL. Note that the meaning of a secondary URL depends on the content type, and not all content types use secondary URLs.",
+ "optional": true
+ },
+ "resourceIdentifier": {
+ "$ref": "ResourceIdentifier",
+ "optional": true,
+ "description": "A more specific identifier of the type of content for which the settings should be retrieved."
+ },
+ "incognito": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether to check the content settings for an incognito session. (default false)"
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "setting": {
+ "type": "any",
+ "description": "The content setting. See the description of the individual ContentSetting objects for the possible values."
+ }
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "set",
+ "type": "function",
+ "description": "Applies a new content setting rule.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "primaryPattern": {
+ "type": "string",
+ "description": "The pattern for the primary URL. For details on the format of a pattern, see <a href='contentSettings.html#patterns'>Content Setting Patterns</a>."
+ },
+ "secondaryPattern": {
+ "type": "string",
+ "description": "The pattern for the secondary URL. Defaults to matching all URLs. For details on the format of a pattern, see <a href='contentSettings.html#patterns'>Content Setting Patterns</a>.",
+ "optional": true
+ },
+ "resourceIdentifier": {
+ "$ref": "ResourceIdentifier",
+ "optional": true,
+ "description": "The resource identifier for the content type."
+ },
+ "setting": {
+ "type": "any",
+ "description": "The setting applied by this rule. See the description of the individual ContentSetting objects for the possible values."
+ },
+ "scope": {
+ "type": "string",
+ "enum": ["regular", "incognito_session_only"],
+ "optional": true,
+ "description": "Where to clear the setting (default: regular). One of<br><var>regular</var>: setting for regular profile (which is inherited by the incognito profile if not overridden elsewhere),<br><var>incognito_session_only</var>: setting for incognito profile that can only be set during an incognito session and is deleted when the incognito session ends (overrides regular settings)."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "getResourceIdentifiers",
+ "type": "function",
+ "description": "",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "resourceIdentifiers",
+ "type": "array",
+ "description": "A list of resource identifiers for this content type, or <var>undefined</var> if this content type does not use resource identifiers.",
+ "optional": true,
+ "items": {
+ "$ref": "ResourceIdentifier"
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "properties": {
+ "cookies": {
+ "$ref": "ContentSetting",
+ "description": "Whether to allow cookies and other local data to be set by websites. One of<br><var>allow</var>: Accept cookies,<br><var>block</var>: Block cookies,<br><var>session_only</var>: Accept cookies only for the current session. <br>Default is <var>allow</var>.<br>The primary URL is the URL representing the cookie origin. The secondary URL is the URL of the top-level frame.",
+ "value": [
+ "cookies",
+ {"type":"string", "enum": ["allow", "block", "session_only"]}
+ ]
+ },
+ "images": {
+ "$ref": "ContentSetting",
+ "description": "Whether to show images. One of<br><var>allow</var>: Show images,<br><var>block</var>: Don't show images. <br>Default is <var>allow</var>.<br>The primary URL is the main-frame URL. The secondary URL is the URL of the image.",
+ "value": [
+ "images",
+ {"type":"string", "enum": ["allow", "block"]}
+ ]
+ },
+ "javascript": {
+ "$ref": "ContentSetting",
+ "description": "Whether to run JavaScript. One of<br><var>allow</var>: Run JavaScript,<br><var>block</var>: Don't run JavaScript. <br>Default is <var>allow</var>.<br>The primary URL is the main-frame URL. The secondary URL is not used.",
+ "value": [
+ "javascript",
+ {"type":"string", "enum": ["allow", "block"]}
+ ]
+ },
+ "plugins": {
+ "$ref": "ContentSetting",
+ "description": "Whether to run plug-ins. One of<br><var>allow</var>: Run plug-ins automatically,<br><var>block</var>: Don't run plug-ins automatically. <br>Default is <var>allow</var>.<br>The primary URL is the main-frame URL. The secondary URL is not used.",
+ "value": [
+ "plugins",
+ {"type":"string", "enum": ["allow", "block"]}
+ ]
+ },
+ "popups": {
+ "$ref": "ContentSetting",
+ "description": "Whether to allow sites to show pop-ups. One of<br><var>allow</var>: Allow sites to show pop-ups,<br><var>block</var>: Don't allow sites to show pop-ups. <br>Default is <var>block</var>.<br>The primary URL is the main-frame URL. The secondary URL is not used.",
+ "value": [
+ "popups",
+ {"type":"string", "enum": ["allow", "block"]}
+ ]
+ },
+ "notifications": {
+ "$ref": "ContentSetting",
+ "description": "Whether to allow sites to show desktop notifications. One of<br><var>allow</var>: Allow sites to show desktop notifications,<br><var>block</var>: Don't allow sites to show desktop notifications,<br><var>ask</var>: Ask when a site wants to show desktop notifications. <br>Default is <var>ask</var>.<br>The primary URL is the main-frame URL. The secondary URL is not used.",
+ "value": [
+ "notifications",
+ {"type":"string", "enum": ["allow", "block", "ask"]}
+ ]
+ }
+ }
+ }
+]
diff --git a/tools/json_schema_compiler/test/crossref.json b/tools/json_schema_compiler/test/crossref.json
new file mode 100644
index 0000000..a1e994b
--- /dev/null
+++ b/tools/json_schema_compiler/test/crossref.json
@@ -0,0 +1,76 @@
+[
+ {
+ "namespace": "crossref",
+ "description": "The crossref API.",
+ "dependencies": ["simple_api"],
+ "types": [
+ {
+ "id": "CrossrefType",
+ "type": "object",
+ "properties": {
+ "testType": {
+ "$ref": "simple_api.TestType",
+ "optional": true
+ }
+ }
+ }
+ ],
+ "functions": [
+ {
+ "name": "testTypeOptionalParam",
+ "type": "function",
+ "description": "Takes TestType as a param.",
+ "parameters": [
+ {
+ "name": "testType",
+ "$ref": "simple_api.TestType",
+ "optional": true
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "getTestType",
+ "type": "function",
+ "description": "Return a TestType.",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "result",
+ "$ref": "simple_api.TestType",
+ "description": "A TestType."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "testTypeInObject",
+ "type": "function",
+ "description": "Takes an optional object with a TestType and a bool.",
+ "parameters": [
+ {
+ "name": "paramObject",
+ "type": "object",
+ "properties": {
+ "testType": {"$ref": "simple_api.TestType", "optional": true},
+ "boolean": {"type": "boolean"}
+ }
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/crossref_unittest.cc b/tools/json_schema_compiler/test/crossref_unittest.cc
new file mode 100644
index 0000000..7d8879d
--- /dev/null
+++ b/tools/json_schema_compiler/test/crossref_unittest.cc
@@ -0,0 +1,121 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/test/simple_api.h"
+#include "tools/json_schema_compiler/test/crossref.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+using namespace test::api::crossref;
+
+namespace {
+
+static scoped_ptr<base::DictionaryValue> CreateTestTypeDictionary() {
+ base::DictionaryValue* value(new base::DictionaryValue());
+ value->SetWithoutPathExpansion("number", new base::FundamentalValue(1.1));
+ value->SetWithoutPathExpansion("integer", new base::FundamentalValue(4));
+ value->SetWithoutPathExpansion("string", new base::StringValue("bling"));
+ value->SetWithoutPathExpansion("boolean", new base::FundamentalValue(true));
+ return scoped_ptr<base::DictionaryValue>(value);
+}
+
+} // namespace
+
+TEST(JsonSchemaCompilerCrossrefTest, CrossrefTypePopulate) {
+ scoped_ptr<CrossrefType> crossref_type(new CrossrefType());
+ scoped_ptr<base::DictionaryValue> value(new base::DictionaryValue());
+ value->Set("testType", CreateTestTypeDictionary().release());
+ EXPECT_TRUE(CrossrefType::Populate(*value, crossref_type.get()));
+ EXPECT_TRUE(crossref_type->test_type.get());
+ EXPECT_TRUE(CreateTestTypeDictionary()->Equals(
+ crossref_type->test_type->ToValue().get()));
+ EXPECT_TRUE(value->Equals(crossref_type->ToValue().get()));
+}
+
+TEST(JsonSchemaCompilerCrossrefTest, TestTypeOptionalParamCreate) {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(CreateTestTypeDictionary().release());
+ scoped_ptr<TestTypeOptionalParam::Params> params(
+ TestTypeOptionalParam::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_TRUE(params->test_type.get());
+ EXPECT_TRUE(
+ CreateTestTypeDictionary()->Equals(params->test_type->ToValue().get()));
+}
+
+TEST(JsonSchemaCompilerCrossrefTest, TestTypeOptionalParamFail) {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::DictionaryValue> test_type_value =
+ CreateTestTypeDictionary();
+ test_type_value->RemoveWithoutPathExpansion("number", NULL);
+ params_value->Append(test_type_value.release());
+ scoped_ptr<TestTypeOptionalParam::Params> params(
+ TestTypeOptionalParam::Params::Create(*params_value));
+ EXPECT_FALSE(params.get());
+}
+
+TEST(JsonSchemaCompilerCrossrefTest, GetTestType) {
+ scoped_ptr<base::DictionaryValue> value = CreateTestTypeDictionary();
+ scoped_ptr<test::api::simple_api::TestType> test_type(
+ new test::api::simple_api::TestType());
+ EXPECT_TRUE(
+ test::api::simple_api::TestType::Populate(*value, test_type.get()));
+
+ scoped_ptr<base::ListValue> results =
+ GetTestType::Results::Create(*test_type);
+ base::DictionaryValue* result_dict = NULL;
+ results->GetDictionary(0, &result_dict);
+ EXPECT_TRUE(value->Equals(result_dict));
+}
+
+TEST(JsonSchemaCompilerCrossrefTest, TestTypeInObjectParamsCreate) {
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::DictionaryValue> param_object_value(
+ new base::DictionaryValue());
+ param_object_value->Set("testType", CreateTestTypeDictionary().release());
+ param_object_value->Set("boolean", new base::FundamentalValue(true));
+ params_value->Append(param_object_value.release());
+ scoped_ptr<TestTypeInObject::Params> params(
+ TestTypeInObject::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_TRUE(params->param_object.test_type.get());
+ EXPECT_TRUE(params->param_object.boolean);
+ EXPECT_TRUE(CreateTestTypeDictionary()->Equals(
+ params->param_object.test_type->ToValue().get()));
+ }
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::DictionaryValue> param_object_value(
+ new base::DictionaryValue());
+ param_object_value->Set("boolean", new base::FundamentalValue(true));
+ params_value->Append(param_object_value.release());
+ scoped_ptr<TestTypeInObject::Params> params(
+ TestTypeInObject::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_FALSE(params->param_object.test_type.get());
+ EXPECT_TRUE(params->param_object.boolean);
+ }
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::DictionaryValue> param_object_value(
+ new base::DictionaryValue());
+ param_object_value->Set("testType", new base::StringValue("invalid"));
+ param_object_value->Set("boolean", new base::FundamentalValue(true));
+ params_value->Append(param_object_value.release());
+ scoped_ptr<TestTypeInObject::Params> params(
+ TestTypeInObject::Params::Create(*params_value));
+ EXPECT_FALSE(params.get());
+ }
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<base::DictionaryValue> param_object_value(
+ new base::DictionaryValue());
+ param_object_value->Set("testType", CreateTestTypeDictionary().release());
+ params_value->Append(param_object_value.release());
+ scoped_ptr<TestTypeInObject::Params> params(
+ TestTypeInObject::Params::Create(*params_value));
+ EXPECT_FALSE(params.get());
+ }
+}
diff --git a/tools/json_schema_compiler/test/dependency_tester.json b/tools/json_schema_compiler/test/dependency_tester.json
new file mode 100644
index 0000000..4845136
--- /dev/null
+++ b/tools/json_schema_compiler/test/dependency_tester.json
@@ -0,0 +1,33 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+[
+ {
+ "namespace": "dependencyTester",
+ "description": "An API for testing dependencies.",
+ "dependencies": [ "browserAction", "fontSettings" ],
+ "types": [],
+ "functions": [
+ {
+ "name": "setTitle",
+ "type": "function",
+ "description": "hi",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "color": {
+ "$ref": "browserAction.ColorArray"
+ },
+ "scriptCode": {
+ "$ref": "fontSettings.FakeStringType"
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/enums.json b/tools/json_schema_compiler/test/enums.json
new file mode 100644
index 0000000..c776313
--- /dev/null
+++ b/tools/json_schema_compiler/test/enums.json
@@ -0,0 +1,265 @@
+[
+ {
+ "namespace": "enums",
+ "description": "The enums API.",
+ "types": [
+ {
+ "id": "Enumeration",
+ "type": "string",
+ "enum": ["one", "two", "three"]
+ },
+ {
+ "id": "EnumType",
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["one", "two", "three"]
+ }
+ }
+ },
+ {
+ "id": "HasEnumeration",
+ "type": "object",
+ "properties": {
+ "enumeration": {
+ "$ref": "Enumeration"
+ },
+ "optional_enumeration": {
+ "$ref": "Enumeration",
+ "optional": true
+ }
+ }
+ },
+ {
+ "id": "InlineAndReferenceEnum",
+ "type": "object",
+ "properties": {
+ "inline_enum": {
+ "type": "string",
+ "enum": ["test1", "test2", "test3"]
+ },
+ "reference_enum": {
+ "$ref": "Enumeration"
+ }
+ }
+ },
+ {
+ "id": "OptionalEnumType",
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["one", "two", "three"],
+ "optional": true
+ }
+ }
+ }
+ ],
+ "functions": [
+ {
+ "name": "takesEnum",
+ "type": "function",
+ "description": "Takes an enum as its parameter.",
+ "parameters": [
+ {
+ "name": "state",
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "takesEnumArray",
+ "type": "function",
+ "description": "Takes an enum array as its parameter.",
+ "parameters": [
+ {
+ "name": "values",
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ }
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "takesEnumAsType",
+ "type": "function",
+ "description": "Takes an enum type as its parameter.",
+ "parameters": [
+ {
+ "name": "enumeration",
+ "$ref": "Enumeration"
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "takesEnumArrayAsType",
+ "type": "function",
+ "description": "Takes an enum type array as its parameter.",
+ "parameters": [
+ {
+ "name": "values",
+ "type": "array",
+ "items": {
+ "$ref": "Enumeration"
+ }
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "returnsEnum",
+ "type": "function",
+ "description": "Returns an enum through the callback",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "state",
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "returnsEnumAsType",
+ "type": "function",
+ "description": "Returns an enum through the callback",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "enumeration",
+ "$ref": "Enumeration"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "returnsTwoEnums",
+ "type": "function",
+ "description": "Returns two enums through the callback",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "firstState",
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ },
+ {
+ "name": "secondState",
+ "type": "string",
+ "enum": ["spam", "ham", "eggs"]
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "takesOptionalEnum",
+ "type": "function",
+ "description": "Takes an enum as its parameter.",
+ "parameters": [
+ {
+ "name": "state",
+ "type": "string",
+ "enum": ["foo", "bar", "baz"],
+ "optional": true
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "takesMultipleOptionalEnums",
+ "type": "function",
+ "description": "Takes two enums as parameters.",
+ "parameters": [
+ {
+ "name": "state",
+ "type": "string",
+ "enum": ["foo", "bar", "baz"],
+ "optional": true
+ },
+ {
+ "name": "type",
+ "type": "string",
+ "enum": ["foo", "ding", "dong"],
+ "optional": true
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "onEnumFired",
+ "type": "function",
+ "description": "Fired when an enum is ready.",
+ "parameters": [
+ {
+ "name": "someEnum",
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ }
+ ]
+ },
+ {
+ "name": "onTwoEnumsFired",
+ "type": "function",
+ "description": "Fired when two enums are ready.",
+ "parameters": [
+ {
+ "name": "firstEnum",
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ },
+ {
+ "name": "secondEnum",
+ "type": "string",
+ "enum": ["spam", "ham", "eggs"]
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/enums_unittest.cc b/tools/json_schema_compiler/test/enums_unittest.cc
new file mode 100644
index 0000000..7f1addb
--- /dev/null
+++ b/tools/json_schema_compiler/test/enums_unittest.cc
@@ -0,0 +1,278 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/test/enums.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/json_schema_compiler/test/test_util.h"
+
+using namespace test::api::enums;
+using json_schema_compiler::test_util::List;
+
+TEST(JsonSchemaCompilerEnumsTest, EnumTypePopulate) {
+ {
+ EnumType enum_type;
+ base::DictionaryValue value;
+ value.Set("type", new base::StringValue("one"));
+ EXPECT_TRUE(EnumType::Populate(value, &enum_type));
+ EXPECT_EQ(EnumType::TYPE_ONE, enum_type.type);
+ EXPECT_TRUE(value.Equals(enum_type.ToValue().get()));
+ }
+ {
+ EnumType enum_type;
+ base::DictionaryValue value;
+ value.Set("type", new base::StringValue("invalid"));
+ EXPECT_FALSE(EnumType::Populate(value, &enum_type));
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, EnumsAsTypes) {
+ {
+ base::ListValue args;
+ args.Append(new base::StringValue("one"));
+
+ scoped_ptr<TakesEnumAsType::Params> params(
+ TakesEnumAsType::Params::Create(args));
+ ASSERT_TRUE(params.get());
+ EXPECT_EQ(ENUMERATION_ONE, params->enumeration);
+
+ EXPECT_TRUE(args.Equals(ReturnsEnumAsType::Results::Create(
+ ENUMERATION_ONE).get()));
+ }
+ {
+ HasEnumeration enumeration;
+ EXPECT_EQ(ENUMERATION_NONE, enumeration.enumeration);
+ EXPECT_EQ(ENUMERATION_NONE, enumeration.optional_enumeration);
+ }
+ {
+ HasEnumeration enumeration;
+ base::DictionaryValue value;
+ ASSERT_FALSE(HasEnumeration::Populate(value, &enumeration));
+
+ value.Set("enumeration", new base::StringValue("one"));
+ ASSERT_TRUE(HasEnumeration::Populate(value, &enumeration));
+ EXPECT_TRUE(value.Equals(enumeration.ToValue().get()));
+
+ value.Set("optional_enumeration", new base::StringValue("two"));
+ ASSERT_TRUE(HasEnumeration::Populate(value, &enumeration));
+ EXPECT_TRUE(value.Equals(enumeration.ToValue().get()));
+ }
+ {
+ InlineAndReferenceEnum enumeration;
+ base::DictionaryValue value;
+ ASSERT_FALSE(InlineAndReferenceEnum::Populate(value, &enumeration));
+
+ value.Set("inline_enum", new base::StringValue("test2"));
+ ASSERT_FALSE(InlineAndReferenceEnum::Populate(value, &enumeration));
+
+ value.Set("reference_enum", new base::StringValue("one"));
+ ASSERT_TRUE(InlineAndReferenceEnum::Populate(value, &enumeration));
+ EXPECT_TRUE(value.Equals(enumeration.ToValue().get()));
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, EnumsArrayAsType) {
+ {
+ base::ListValue params_value;
+ params_value.Append(List(new base::StringValue("one"),
+ new base::StringValue("two")).release());
+ scoped_ptr<TakesEnumArrayAsType::Params> params(
+ TakesEnumArrayAsType::Params::Create(params_value));
+ ASSERT_TRUE(params);
+ EXPECT_EQ(2U, params->values.size());
+ EXPECT_EQ(ENUMERATION_ONE, params->values[0]);
+ EXPECT_EQ(ENUMERATION_TWO, params->values[1]);
+ }
+ {
+ base::ListValue params_value;
+ params_value.Append(List(new base::StringValue("invalid")).release());
+ scoped_ptr<TakesEnumArrayAsType::Params> params(
+ TakesEnumArrayAsType::Params::Create(params_value));
+ EXPECT_FALSE(params);
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, ReturnsEnumCreate) {
+ {
+ ReturnsEnum::Results::State state = ReturnsEnum::Results::STATE_FOO;
+ scoped_ptr<base::Value> result(
+ new base::StringValue(ReturnsEnum::Results::ToString(state)));
+ scoped_ptr<base::Value> expected(new base::StringValue("foo"));
+ EXPECT_TRUE(result->Equals(expected.get()));
+ }
+ {
+ ReturnsEnum::Results::State state = ReturnsEnum::Results::STATE_FOO;
+ scoped_ptr<base::ListValue> results = ReturnsEnum::Results::Create(state);
+ base::ListValue expected;
+ expected.Append(new base::StringValue("foo"));
+ EXPECT_TRUE(results->Equals(&expected));
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, ReturnsTwoEnumsCreate) {
+ {
+ scoped_ptr<base::ListValue> results = ReturnsTwoEnums::Results::Create(
+ ReturnsTwoEnums::Results::FIRST_STATE_FOO,
+ ReturnsTwoEnums::Results::SECOND_STATE_HAM);
+ base::ListValue expected;
+ expected.Append(new base::StringValue("foo"));
+ expected.Append(new base::StringValue("ham"));
+ EXPECT_TRUE(results->Equals(&expected));
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, OptionalEnumTypePopulate) {
+ {
+ OptionalEnumType enum_type;
+ base::DictionaryValue value;
+ value.Set("type", new base::StringValue("two"));
+ EXPECT_TRUE(OptionalEnumType::Populate(value, &enum_type));
+ EXPECT_EQ(OptionalEnumType::TYPE_TWO, enum_type.type);
+ EXPECT_TRUE(value.Equals(enum_type.ToValue().get()));
+ }
+ {
+ OptionalEnumType enum_type;
+ base::DictionaryValue value;
+ EXPECT_TRUE(OptionalEnumType::Populate(value, &enum_type));
+ EXPECT_EQ(OptionalEnumType::TYPE_NONE, enum_type.type);
+ EXPECT_TRUE(value.Equals(enum_type.ToValue().get()));
+ }
+ {
+ OptionalEnumType enum_type;
+ base::DictionaryValue value;
+ value.Set("type", new base::StringValue("invalid"));
+ EXPECT_FALSE(OptionalEnumType::Populate(value, &enum_type));
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, TakesEnumParamsCreate) {
+ {
+ base::ListValue params_value;
+ params_value.Append(new base::StringValue("baz"));
+ scoped_ptr<TakesEnum::Params> params(
+ TakesEnum::Params::Create(params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_EQ(TakesEnum::Params::STATE_BAZ, params->state);
+ }
+ {
+ base::ListValue params_value;
+ params_value.Append(new base::StringValue("invalid"));
+ scoped_ptr<TakesEnum::Params> params(
+ TakesEnum::Params::Create(params_value));
+ EXPECT_FALSE(params.get());
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, TakesEnumArrayParamsCreate) {
+ {
+ base::ListValue params_value;
+ params_value.Append(List(new base::StringValue("foo"),
+ new base::StringValue("bar")).release());
+ scoped_ptr<TakesEnumArray::Params> params(
+ TakesEnumArray::Params::Create(params_value));
+ ASSERT_TRUE(params);
+ EXPECT_EQ(2U, params->values.size());
+ EXPECT_EQ(TakesEnumArray::Params::VALUES_TYPE_FOO, params->values[0]);
+ EXPECT_EQ(TakesEnumArray::Params::VALUES_TYPE_BAR, params->values[1]);
+ }
+ {
+ base::ListValue params_value;
+ params_value.Append(List(new base::StringValue("invalid")).release());
+ scoped_ptr<TakesEnumArray::Params> params(
+ TakesEnumArray::Params::Create(params_value));
+ EXPECT_FALSE(params);
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, TakesOptionalEnumParamsCreate) {
+ {
+ base::ListValue params_value;
+ params_value.Append(new base::StringValue("baz"));
+ scoped_ptr<TakesOptionalEnum::Params> params(
+ TakesOptionalEnum::Params::Create(params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_EQ(TakesOptionalEnum::Params::STATE_BAZ, params->state);
+ }
+ {
+ base::ListValue params_value;
+ scoped_ptr<TakesOptionalEnum::Params> params(
+ TakesOptionalEnum::Params::Create(params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_EQ(TakesOptionalEnum::Params::STATE_NONE, params->state);
+ }
+ {
+ base::ListValue params_value;
+ params_value.Append(new base::StringValue("invalid"));
+ scoped_ptr<TakesOptionalEnum::Params> params(
+ TakesOptionalEnum::Params::Create(params_value));
+ EXPECT_FALSE(params.get());
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, TakesMultipleOptionalEnumsParamsCreate) {
+ {
+ base::ListValue params_value;
+ params_value.Append(new base::StringValue("foo"));
+ params_value.Append(new base::StringValue("foo"));
+ scoped_ptr<TakesMultipleOptionalEnums::Params> params(
+ TakesMultipleOptionalEnums::Params::Create(params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_EQ(TakesMultipleOptionalEnums::Params::STATE_FOO, params->state);
+ EXPECT_EQ(TakesMultipleOptionalEnums::Params::TYPE_FOO, params->type);
+ }
+ {
+ base::ListValue params_value;
+ params_value.Append(new base::StringValue("foo"));
+ scoped_ptr<TakesMultipleOptionalEnums::Params> params(
+ TakesMultipleOptionalEnums::Params::Create(params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_EQ(TakesMultipleOptionalEnums::Params::STATE_FOO, params->state);
+ EXPECT_EQ(TakesMultipleOptionalEnums::Params::TYPE_NONE, params->type);
+ }
+ {
+ base::ListValue params_value;
+ scoped_ptr<TakesMultipleOptionalEnums::Params> params(
+ TakesMultipleOptionalEnums::Params::Create(params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_EQ(TakesMultipleOptionalEnums::Params::STATE_NONE, params->state);
+ EXPECT_EQ(TakesMultipleOptionalEnums::Params::TYPE_NONE, params->type);
+ }
+ {
+ base::ListValue params_value;
+ params_value.Append(new base::StringValue("baz"));
+ params_value.Append(new base::StringValue("invalid"));
+ scoped_ptr<TakesMultipleOptionalEnums::Params> params(
+ TakesMultipleOptionalEnums::Params::Create(params_value));
+ EXPECT_FALSE(params.get());
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, OnEnumFiredCreate) {
+ {
+ OnEnumFired::SomeEnum some_enum = OnEnumFired::SOME_ENUM_FOO;
+ scoped_ptr<base::Value> result(
+ new base::StringValue(OnEnumFired::ToString(some_enum)));
+ scoped_ptr<base::Value> expected(new base::StringValue("foo"));
+ EXPECT_TRUE(result->Equals(expected.get()));
+ }
+ {
+ OnEnumFired::SomeEnum some_enum = OnEnumFired::SOME_ENUM_FOO;
+ scoped_ptr<base::ListValue> results(OnEnumFired::Create(some_enum));
+ base::ListValue expected;
+ expected.Append(new base::StringValue("foo"));
+ EXPECT_TRUE(results->Equals(&expected));
+ }
+}
+
+TEST(JsonSchemaCompilerEnumsTest, OnTwoEnumsFiredCreate) {
+ {
+ scoped_ptr<base::Value> results(OnTwoEnumsFired::Create(
+ OnTwoEnumsFired::FIRST_ENUM_FOO,
+ OnTwoEnumsFired::SECOND_ENUM_HAM));
+ base::ListValue expected;
+ expected.Append(new base::StringValue("foo"));
+ expected.Append(new base::StringValue("ham"));
+ EXPECT_TRUE(results->Equals(&expected));
+ }
+}
diff --git a/tools/json_schema_compiler/test/error_generation.json b/tools/json_schema_compiler/test/error_generation.json
new file mode 100644
index 0000000..f90ef6e
--- /dev/null
+++ b/tools/json_schema_compiler/test/error_generation.json
@@ -0,0 +1,152 @@
+[
+ {
+ "namespace": "error_generation",
+ "description": "Generates ALL the errors.",
+ "compiler_options": {
+ "generate_error_messages": true
+ },
+ "types": [
+ {
+ "id": "TestType",
+ "type": "object",
+ "properties": {
+ "string": {
+ "type": "string",
+ "description": "Some string."
+ }
+ }
+ },
+ {
+ "id": "OptionalTestType",
+ "type": "object",
+ "properties": {
+ "string": {
+ "type": "string",
+ "description": "Some string.",
+ "optional": true
+ }
+ }
+ },
+ {
+ "id": "ChoiceType",
+ "type": "object",
+ "properties": {
+ "integers": {
+ "choices": [
+ {"type": "array", "items": {"type": "integer", "minimum": 0}},
+ {"type": "integer"}
+ ]
+ }
+ }
+ },
+ {
+ "id": "OptionalChoiceType",
+ "type": "object",
+ "properties": {
+ "integers": {
+ "choices": [
+ {"type": "array", "items": {"type": "integer", "minimum": 0}},
+ {"type": "integer"}
+ ],
+ "optional": true
+ }
+ }
+ },
+ {
+ "id": "ObjectType",
+ "type": "object",
+ "properties": {
+ "otherType": {
+ "$ref": "error_generation.TestType",
+ "optional": true
+ }
+ }
+ },
+ {
+ "id": "Enumeration",
+ "type": "string",
+ "enum": ["one", "two", "three"]
+ },
+ {
+ "id": "HasEnumeration",
+ "type": "object",
+ "properties": {
+ "enumeration": {
+ "$ref": "Enumeration"
+ }
+ }
+ },
+ {
+ "id": "BinaryData",
+ "type": "object",
+ "properties": {
+ "data": {
+ "type" : "binary"
+ }
+ }
+ },
+ {
+ "id": "OptionalBinaryData",
+ "type": "object",
+ "properties": {
+ "data": {
+ "type" : "binary",
+ "optional": true
+ }
+ }
+ },
+ {
+ "id": "ArrayObject",
+ "type": "object",
+ "properties": {
+ "TheArray": {
+ "type": "array",
+ "items": {"type": "string"},
+ "optional": true,
+ "description": "Expecting a list?"
+ }
+ }
+ }
+ ],
+ "functions": [
+ {
+ "name": "testString",
+ "type": "function",
+ "description": "Takes a string. Or not.",
+ "parameters": [
+ {
+ "name": "str",
+ "type": "string",
+ "optional": true
+ }
+ ]
+ },
+ {
+ "name": "testFunction",
+ "type": "function",
+ "description": "Specifies a number of parameters.",
+ "parameters": [
+ {
+ "name": "num",
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "name": "testTypeInObject",
+ "type": "function",
+ "description": "Takes a TestType.",
+ "parameters": [
+ {
+ "name": "paramObject",
+ "type": "object",
+ "properties": {
+ "testType": {"$ref": "error_generation.TestType", "optional": true},
+ "boolean": {"type": "boolean"}
+ }
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/error_generation_unittest.cc b/tools/json_schema_compiler/test/error_generation_unittest.cc
new file mode 100644
index 0000000..42f9576
--- /dev/null
+++ b/tools/json_schema_compiler/test/error_generation_unittest.cc
@@ -0,0 +1,336 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/test/error_generation.h"
+
+#include "base/json/json_writer.h"
+#include "base/strings/utf_string_conversions.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/json_schema_compiler/test/test_util.h"
+
+using namespace test::api::error_generation;
+using base::FundamentalValue;
+using json_schema_compiler::test_util::Dictionary;
+using json_schema_compiler::test_util::List;
+
+template <typename T>
+base::string16 GetPopulateError(const base::Value& value) {
+ base::string16 error;
+ T test_type;
+ T::Populate(value, &test_type, &error);
+ return error;
+}
+
+testing::AssertionResult EqualsUtf16(const std::string& expected,
+ const base::string16& actual) {
+ if (base::ASCIIToUTF16(expected) != actual)
+ return testing::AssertionFailure() << expected << " != " << actual;
+ return testing::AssertionSuccess();
+}
+
+// GenerateTypePopulate errors
+
+TEST(JsonSchemaCompilerErrorTest, RequiredPropertyPopulate) {
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "string", new base::StringValue("bling"));
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<TestType>(*value)));
+ }
+ {
+ scoped_ptr<base::BinaryValue> value(new base::BinaryValue());
+ EXPECT_TRUE(EqualsUtf16("expected dictionary, got binary",
+ GetPopulateError<TestType>(*value)));
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, UnexpectedTypePopulation) {
+ {
+ scoped_ptr<base::ListValue> value(new base::ListValue());
+ EXPECT_TRUE(EqualsUtf16("",
+ GetPopulateError<ChoiceType::Integers>(*value)));
+ }
+ {
+ scoped_ptr<base::BinaryValue> value(new base::BinaryValue());
+ EXPECT_TRUE(EqualsUtf16("expected integers or integer, got binary",
+ GetPopulateError<ChoiceType::Integers>(*value)));
+ }
+}
+
+// GenerateTypePopulateProperty errors
+
+TEST(JsonSchemaCompilerErrorTest, TypeIsRequired) {
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "integers", new FundamentalValue(5));
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<ChoiceType>(*value)));
+ }
+ {
+ scoped_ptr<base::DictionaryValue> value(new base::DictionaryValue());
+ EXPECT_TRUE(EqualsUtf16("'integers' is required",
+ GetPopulateError<ChoiceType>(*value)));
+ }
+}
+
+// GenerateParamsCheck errors
+
+TEST(JsonSchemaCompilerErrorTest, TooManyParameters) {
+ {
+ scoped_ptr<base::ListValue> params_value = List(
+ new FundamentalValue(5));
+ base::string16 error;
+ EXPECT_TRUE(TestFunction::Params::Create(*params_value, &error));
+ }
+ {
+ scoped_ptr<base::ListValue> params_value = List(
+ new FundamentalValue(5),
+ new FundamentalValue(5));
+ base::string16 error;
+ EXPECT_FALSE(TestFunction::Params::Create(*params_value, &error));
+ EXPECT_TRUE(EqualsUtf16("expected 1 arguments, got 2", error));
+ }
+}
+
+// GenerateFunctionParamsCreate errors
+
+TEST(JsonSchemaCompilerErrorTest, ParamIsRequired) {
+ {
+ scoped_ptr<base::ListValue> params_value = List(
+ new FundamentalValue(5));
+ base::string16 error;
+ EXPECT_TRUE(TestFunction::Params::Create(*params_value, &error));
+ }
+ {
+ scoped_ptr<base::ListValue> params_value = List(
+ base::Value::CreateNullValue());
+ base::string16 error;
+ EXPECT_FALSE(TestFunction::Params::Create(*params_value, &error));
+ EXPECT_TRUE(EqualsUtf16("'num' is required", error));
+ }
+}
+
+// GeneratePopulateVariableFromValue errors
+
+TEST(JsonSchemaCompilerErrorTest, WrongPropertyValueType) {
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "string", new base::StringValue("yes"));
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<TestType>(*value)));
+ }
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "string", new FundamentalValue(1.1));
+ EXPECT_TRUE(EqualsUtf16("'string': expected string, got number",
+ GetPopulateError<TestType>(*value)));
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, WrongParameterCreationType) {
+ {
+ base::string16 error;
+ scoped_ptr<base::ListValue> params_value = List(
+ new base::StringValue("Yeah!"));
+ EXPECT_TRUE(TestString::Params::Create(*params_value, &error));
+ }
+ {
+ scoped_ptr<base::ListValue> params_value = List(
+ new FundamentalValue(5));
+ base::string16 error;
+ EXPECT_FALSE(TestTypeInObject::Params::Create(*params_value, &error));
+ EXPECT_TRUE(EqualsUtf16("'paramObject': expected dictionary, got integer",
+ error));
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, WrongTypeValueType) {
+ {
+ scoped_ptr<base::DictionaryValue> value(new base::DictionaryValue());
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<ObjectType>(*value)));
+ }
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "otherType", new FundamentalValue(1.1));
+ ObjectType out;
+ base::string16 error;
+ EXPECT_TRUE(ObjectType::Populate(*value, &out, &error));
+ EXPECT_TRUE(EqualsUtf16("'otherType': expected dictionary, got number",
+ error));
+ EXPECT_EQ(NULL, out.other_type.get());
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, UnableToPopulateArray) {
+ {
+ scoped_ptr<base::ListValue> params_value = List(
+ new FundamentalValue(5));
+ EXPECT_TRUE(EqualsUtf16("",
+ GetPopulateError<ChoiceType::Integers>(*params_value)));
+ }
+ {
+ scoped_ptr<base::ListValue> params_value = List(
+ new FundamentalValue(5),
+ new FundamentalValue(false));
+ EXPECT_TRUE(EqualsUtf16("unable to populate array 'integers'",
+ GetPopulateError<ChoiceType::Integers>(*params_value)));
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, BinaryTypeExpected) {
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "data", new base::BinaryValue());
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<BinaryData>(*value)));
+ }
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "data", new FundamentalValue(1.1));
+ EXPECT_TRUE(EqualsUtf16("'data': expected binary, got number",
+ GetPopulateError<BinaryData>(*value)));
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, ListExpected) {
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "TheArray", new base::ListValue());
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<ArrayObject>(*value)));
+ }
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "TheArray", new FundamentalValue(5));
+ EXPECT_TRUE(EqualsUtf16("'TheArray': expected list, got integer",
+ GetPopulateError<ArrayObject>(*value)));
+ }
+}
+
+// GenerateStringToEnumConversion errors
+
+TEST(JsonSchemaCompilerErrorTest, BadEnumValue) {
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "enumeration", new base::StringValue("one"));
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<HasEnumeration>(*value)));
+ }
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "enumeration", new base::StringValue("bad sauce"));
+ EXPECT_TRUE(EqualsUtf16("'Enumeration': expected \"one\" or \"two\" "
+ "or \"three\", got \"bad sauce\"",
+ GetPopulateError<HasEnumeration>(*value)));
+ }
+}
+
+// Warn but don't fail out errors
+
+TEST(JsonSchemaCompilerErrorTest, WarnOnOptionalFailure) {
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "string", new base::StringValue("bling"));
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<OptionalTestType>(*value)));
+ }
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "string", new base::FundamentalValue(1));
+
+ OptionalTestType out;
+ base::string16 error;
+ EXPECT_TRUE(OptionalTestType::Populate(*value, &out, &error));
+ EXPECT_TRUE(EqualsUtf16("'string': expected string, got integer",
+ error));
+ EXPECT_EQ(NULL, out.string.get());
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, OptionalBinaryTypeFailure) {
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "data", new base::BinaryValue());
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<OptionalBinaryData>(*value)));
+ }
+ {
+ // There's a bug with silent failures if the key doesn't exist.
+ scoped_ptr<base::DictionaryValue> value = Dictionary("data",
+ new base::FundamentalValue(1));
+
+ OptionalBinaryData out;
+ base::string16 error;
+ EXPECT_TRUE(OptionalBinaryData::Populate(*value, &out, &error));
+ EXPECT_TRUE(EqualsUtf16("'data': expected binary, got integer",
+ error));
+ EXPECT_EQ(NULL, out.data.get());
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, OptionalArrayTypeFailure) {
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "TheArray", new base::ListValue());
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<ArrayObject>(*value)));
+ }
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "TheArray", new FundamentalValue(5));
+ ArrayObject out;
+ base::string16 error;
+ EXPECT_TRUE(ArrayObject::Populate(*value, &out, &error));
+ EXPECT_TRUE(EqualsUtf16("'TheArray': expected list, got integer",
+ error));
+ EXPECT_EQ(NULL, out.the_array.get());
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, OptionalUnableToPopulateArray) {
+ {
+ scoped_ptr<base::ListValue> params_value = List(
+ new FundamentalValue(5));
+ EXPECT_TRUE(EqualsUtf16("",
+ GetPopulateError<OptionalChoiceType::Integers>(*params_value)));
+ }
+ {
+ scoped_ptr<base::ListValue> params_value = List(
+ new FundamentalValue(5),
+ new FundamentalValue(false));
+ OptionalChoiceType::Integers out;
+ base::string16 error;
+ EXPECT_TRUE(OptionalChoiceType::Integers::Populate(*params_value, &out,
+ &error));
+ EXPECT_TRUE(EqualsUtf16("unable to populate array 'integers'",
+ error));
+ EXPECT_EQ(NULL, out.as_integer.get());
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, MultiplePopulationErrors) {
+ {
+
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "TheArray", new FundamentalValue(5));
+ ArrayObject out;
+ base::string16 error;
+ EXPECT_TRUE(ArrayObject::Populate(*value, &out, &error));
+ EXPECT_TRUE(EqualsUtf16("'TheArray': expected list, got integer",
+ error));
+ EXPECT_EQ(NULL, out.the_array.get());
+
+ EXPECT_TRUE(ArrayObject::Populate(*value, &out, &error));
+ EXPECT_TRUE(EqualsUtf16("'TheArray': expected list, got integer; "
+ "'TheArray': expected list, got integer",
+ error));
+ EXPECT_EQ(NULL, out.the_array.get());
+ }
+}
+
+TEST(JsonSchemaCompilerErrorTest, TooManyKeys) {
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "string", new base::StringValue("yes"));
+ EXPECT_TRUE(EqualsUtf16("", GetPopulateError<TestType>(*value)));
+ }
+ {
+ scoped_ptr<base::DictionaryValue> value = Dictionary(
+ "string", new base::StringValue("yes"),
+ "ohno", new base::StringValue("many values"));
+ EXPECT_TRUE(EqualsUtf16("found unexpected key 'ohno'",
+ GetPopulateError<TestType>(*value)));
+ }
+}
diff --git a/tools/json_schema_compiler/test/features_unittest.cc b/tools/json_schema_compiler/test/features_unittest.cc
new file mode 100644
index 0000000..c4f1f12
--- /dev/null
+++ b/tools/json_schema_compiler/test/features_unittest.cc
@@ -0,0 +1,22 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <string>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/json_schema_compiler/test/test_features.h"
+
+using test::features::TestFeatures;
+
+TEST(FeaturesGeneratorTest, FromString) {
+ TestFeatures test_features;
+ EXPECT_EQ(TestFeatures::kSimple, test_features.FromString("simple"));
+ EXPECT_EQ(TestFeatures::kComplex, test_features.FromString("complex"));
+}
+
+TEST(FeaturesGeneratorTest, ToString) {
+ TestFeatures test_features;
+ EXPECT_STREQ("simple", test_features.ToString(TestFeatures::kSimple));
+ EXPECT_STREQ("complex", test_features.ToString(TestFeatures::kComplex));
+}
diff --git a/tools/json_schema_compiler/test/font_settings.json b/tools/json_schema_compiler/test/font_settings.json
new file mode 100644
index 0000000..01c49f7
--- /dev/null
+++ b/tools/json_schema_compiler/test/font_settings.json
@@ -0,0 +1,551 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+[
+ {
+ "namespace": "fontSettings",
+ "description": "The fontSettings API.",
+ "types": [
+ {
+ "id": "FontName",
+ "type": "object",
+ "description": "Represents a font name.",
+ "properties": {
+ "fontName": {
+ "type": "string",
+ "description": "The font name."
+ },
+ "localizedName": {
+ "type": "string",
+ "description": "The font name localized for the current locale."
+ }
+ }
+ },
+ {
+ "id": "FakeStringType",
+ "type": "string",
+ "description": "Used to test a string type."
+ },
+ {
+ "id": "GenericFamily",
+ "type": "string",
+ "enum": ["standard", "sansserif", "serif", "fixed", "cursive", "fantasy"],
+ "description": "A CSS generic font family."
+ },
+ {
+ "id": "LevelOfControl",
+ "description": "One of<br><var>not_controllable</var>: cannot be controlled by any extension<br><var>controlled_by_other_extensions</var>: controlled by extensions with higher precedence<br><var>controllable_by_this_extension</var>: can be controlled by this extension<br><var>controlled_by_this_extension</var>: controlled by this extension",
+ "type": "string",
+ "enum": ["not_controllable", "controlled_by_other_extensions", "controllable_by_this_extension", "controlled_by_this_extension"]
+ }
+ ],
+ "functions": [
+ {
+ "name": "clearFont",
+ "description": "Clears the font set by this extension, if any.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "script": {
+ "$ref": "FakeStringType",
+ "description": "The script for which the font should be cleared. If omitted, the global script font setting is cleared.",
+ "optional": true
+ },
+ "genericFamily": {
+ "$ref": "GenericFamily",
+ "description": "The generic font family for which the font should be cleared."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "getFont",
+ "description": "Gets the font for a given script and generic font family.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "script": {
+ "$ref": "FakeStringType",
+ "description": "The script for which the font should be retrieved. If omitted, the font for the global script is retrieved.",
+ "optional": true
+ },
+ "genericFamily": {
+ "$ref": "GenericFamily",
+ "description": "The generic font family for which the font should be retrieved."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "fontName": {
+ "type": "string",
+ "description": "The font name. Rather than the literal font name preference value, this may be the name of the font that the system resolves the preference value to. The empty string signifies fallback to the global script font setting."
+ }
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "setFont",
+ "description": "Sets the font for a given script and generic font family.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "script": {
+ "$ref": "FakeStringType",
+ "description": "The script code which the font should be set. If omitted, the font for the global script is set.",
+ "optional": true
+ },
+ "genericFamily": {
+ "$ref": "GenericFamily",
+ "description": "The generic font family for which the font should be set."
+ },
+ "fontName": {
+ "type": "string",
+ "description": "The font name. If a script is specified, the empty string means to fallback to the global script font setting."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "getFontList",
+ "description": "Gets a list of fonts on the system.",
+ "parameters": [
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "results",
+ "type": "array",
+ "items": { "$ref": "FontName" }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "clearDefaultFontSize",
+ "description": "Clears the default font size set by this extension, if any.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "optional": true,
+ "description": "This parameter is currently unused.",
+ "properties": {}
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "getDefaultFontSize",
+ "description": "Gets the default font size.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "optional": true,
+ "description": "This parameter is currently unused.",
+ "properties": {}
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "pixelSize": {
+ "type": "integer",
+ "description": "The font size in pixels."
+ }
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "setDefaultFontSize",
+ "description": "Sets the default font size.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "pixelSize": {
+ "type": "integer",
+ "description": "The font size in pixels."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "clearDefaultFixedFontSize",
+ "description": "Clears the default fixed font size set by this extension, if any.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "optional": true,
+ "description": "This parameter is currently unused.",
+ "properties": {}
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "getDefaultFixedFontSize",
+ "description": "Gets the default size for fixed width fonts.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "optional": true,
+ "description": "This parameter is currently unused.",
+ "properties": {}
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "pixelSize": {
+ "type": "integer",
+ "description": "The font size in pixels."
+ }
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "setDefaultFixedFontSize",
+ "description": "Sets the default size for fixed width fonts.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "pixelSize": {
+ "type": "integer",
+ "description": "The font size in pixels."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "clearMinimumFontSize",
+ "description": "Clears the minimum font size set by this extension, if any.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "optional": true,
+ "description": "This parameter is currently unused.",
+ "properties": {}
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "getMinimumFontSize",
+ "description": "Gets the minimum font size.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "optional": true,
+ "description": "This parameter is currently unused.",
+ "properties": {}
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "pixelSize": {
+ "type": "integer",
+ "description": "The font size in pixels."
+ }
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "setMinimumFontSize",
+ "description": "Sets the minimum font size.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "pixelSize": {
+ "type": "integer",
+ "description": "The font size in pixels."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "clearDefaultCharacterSet",
+ "description": "Clears the default character set set by this extension, if any.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "optional": true,
+ "description": "This parameter is currently unused.",
+ "properties": {}
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "getDefaultCharacterSet",
+ "description": "Gets the default character set.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "optional": true,
+ "description": "This parameter is currently unused.",
+ "properties": {}
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "charset": {
+ "type": "string",
+ "description": "The default character set, such as \"ISO-8859-1\"."
+ }
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "setDefaultCharacterSet",
+ "description": "Sets the default character set.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "properties": {
+ "charset": {
+ "type": "string",
+ "description": "The character set."
+ }
+ }
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "optional": true,
+ "parameters": []
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "onFontChanged",
+ "description": "Fired when a font setting changes.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "details",
+ "properties": {
+ "fontName": { "type": "string" },
+ "script": {
+ "$ref": "FakeStringType",
+ "description": "The script code for which the font setting has changed. If omitted, the global script font setting has changed.",
+ "optional": true
+ },
+ "genericFamily": {
+ "$ref": "GenericFamily",
+ "description": "The generic font family for which the font setting has changed."
+ },
+ "levelOfControl": {
+ "$ref": "LevelOfControl",
+ "description": "The level of control this extension has over the setting."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "onDefaultFontSizeChanged",
+ "description": "Fired when the default font size setting changes.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "details",
+ "properties": {
+ "pixelSize": {
+ "type": "integer",
+ "description": "The font size in pixels."
+ },
+ "levelOfControl": {
+ "$ref": "LevelOfControl",
+ "description": "The level of control this extension has over the setting."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "onDefaultFixedFontSizeChanged",
+ "description": "Fired when the default fixed font size setting changes.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "details",
+ "properties": {
+ "pixelSize": {
+ "type": "integer",
+ "description": "The font size in pixels."
+ },
+ "levelOfControl": {
+ "$ref": "LevelOfControl",
+ "description": "The level of control this extension has over the setting."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "onMinimumFontSizeChanged",
+ "description": "Fired when the minimum font size setting changes.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "details",
+ "properties": {
+ "pixelSize": {
+ "type": "integer",
+ "description": "The font size in pixels."
+ },
+ "levelOfControl": {
+ "$ref": "LevelOfControl",
+ "description": "The level of control this extension has over the setting."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "onDefaultCharacterSetChanged",
+ "description": "Fired when the default character set setting changes.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "details",
+ "properties": {
+ "charset": {
+ "type": "string",
+ "description": "The character set."
+ },
+ "levelOfControl": {
+ "$ref": "LevelOfControl",
+ "description": "The level of control this extension has over the setting."
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/forbidden.json b/tools/json_schema_compiler/test/forbidden.json
new file mode 100644
index 0000000..841b0e1
--- /dev/null
+++ b/tools/json_schema_compiler/test/forbidden.json
@@ -0,0 +1,40 @@
+[
+ {
+ "namespace": "forbidden",
+ "description": "The forbidden API... Careful.",
+ "types": [],
+ "functions": [
+ {
+ "name": "forbiddenParameters",
+ "type": "function",
+ "description": "Don't do this at home. Accepts multiple choices and values",
+ "parameters": [
+ {
+ "name": "firstChoice",
+ "description": "a choice between int and array",
+ "choices": [
+ {"type": "integer", "minimum": 0},
+ {"type": "array", "items": {"type": "integer"}}
+ ]
+ },
+ {
+ "type": "string",
+ "name": "firstString"
+ },
+ {
+ "name": "secondChoice",
+ "description": "a choice between int and array",
+ "choices": [
+ {"type": "integer", "minimum": 0},
+ {"type": "array", "items": {"type": "integer"}}
+ ]
+ },
+ {
+ "type": "string",
+ "name": "secondString"
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/functions_as_parameters.json b/tools/json_schema_compiler/test/functions_as_parameters.json
new file mode 100644
index 0000000..b948616
--- /dev/null
+++ b/tools/json_schema_compiler/test/functions_as_parameters.json
@@ -0,0 +1,29 @@
+[
+ {
+ "namespace": "functions_as_parameters",
+ "description": "The functionsAsParameters API",
+ "types": [
+ {
+ "id": "FunctionType",
+ "type": "object",
+ "properties": {
+ "event_callback": {
+ "type": "function",
+ "parameters": { }
+ }
+ }
+ },
+ {
+ "id": "OptionalFunctionType",
+ "type": "object",
+ "properties": {
+ "event_callback": {
+ "type": "function",
+ "optional": true,
+ "parameters": { }
+ }
+ }
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/functions_as_parameters_unittest.cc b/tools/json_schema_compiler/test/functions_as_parameters_unittest.cc
new file mode 100644
index 0000000..ce7a2f9
--- /dev/null
+++ b/tools/json_schema_compiler/test/functions_as_parameters_unittest.cc
@@ -0,0 +1,94 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/test/functions_as_parameters.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+using namespace test::api::functions_as_parameters;
+
+TEST(JsonSchemaCompilerFunctionsAsParametersTest, PopulateRequiredFunction) {
+ // The expectation is that if any value is set for the function, then
+ // the function is "present".
+ {
+ base::DictionaryValue empty_value;
+ FunctionType out;
+ EXPECT_FALSE(FunctionType::Populate(empty_value, &out));
+ }
+ {
+ base::DictionaryValue value;
+ base::DictionaryValue function_dict;
+ value.Set("event_callback", function_dict.DeepCopy());
+ FunctionType out;
+ ASSERT_TRUE(FunctionType::Populate(value, &out));
+ EXPECT_TRUE(out.event_callback.empty());
+ }
+}
+
+TEST(JsonSchemaCompilerFunctionsAsParametersTest, RequiredFunctionToValue) {
+ {
+ base::DictionaryValue value;
+ base::DictionaryValue function_dict;
+ value.Set("event_callback", function_dict.DeepCopy());
+
+ FunctionType out;
+ ASSERT_TRUE(FunctionType::Populate(value, &out));
+ EXPECT_TRUE(value.Equals(out.ToValue().get()));
+ }
+ {
+ base::DictionaryValue value;
+ base::DictionaryValue expected_value;
+ base::DictionaryValue function_dict;
+ value.Set("event_callback", function_dict.DeepCopy());
+ expected_value.Set("event_callback", function_dict.DeepCopy());
+
+ FunctionType out;
+ ASSERT_TRUE(FunctionType::Populate(value, &out));
+ EXPECT_TRUE(expected_value.Equals(out.ToValue().get()));
+ }
+}
+
+TEST(JsonSchemaCompilerFunctionsAsParametersTest, PopulateOptionalFunction) {
+ {
+ base::DictionaryValue empty_value;
+ OptionalFunctionType out;
+ ASSERT_TRUE(OptionalFunctionType::Populate(empty_value, &out));
+ EXPECT_FALSE(out.event_callback.get());
+ }
+ {
+ base::DictionaryValue value;
+ base::DictionaryValue function_value;
+ value.Set("event_callback", function_value.DeepCopy());
+ OptionalFunctionType out;
+ ASSERT_TRUE(OptionalFunctionType::Populate(value, &out));
+ EXPECT_TRUE(out.event_callback.get());
+ }
+ {
+ base::DictionaryValue value;
+ base::DictionaryValue function_value;
+ value.Set("event_callback", function_value.DeepCopy());
+ OptionalFunctionType out;
+ ASSERT_TRUE(OptionalFunctionType::Populate(value, &out));
+ EXPECT_TRUE(out.event_callback.get());
+ }
+}
+
+TEST(JsonSchemaCompilerFunctionsAsParametersTest, OptionalFunctionToValue) {
+ {
+ base::DictionaryValue empty_value;
+ OptionalFunctionType out;
+ ASSERT_TRUE(OptionalFunctionType::Populate(empty_value, &out));
+ // event_callback should not be set in the return from ToValue.
+ EXPECT_TRUE(empty_value.Equals(out.ToValue().get()));
+ }
+ {
+ base::DictionaryValue value;
+ base::DictionaryValue function_value;
+ value.Set("event_callback", function_value.DeepCopy());
+
+ OptionalFunctionType out;
+ ASSERT_TRUE(OptionalFunctionType::Populate(value, &out));
+ EXPECT_TRUE(value.Equals(out.ToValue().get()));
+ }
+}
diff --git a/tools/json_schema_compiler/test/functions_on_types.json b/tools/json_schema_compiler/test/functions_on_types.json
new file mode 100644
index 0000000..a20a75f
--- /dev/null
+++ b/tools/json_schema_compiler/test/functions_on_types.json
@@ -0,0 +1,75 @@
+[
+ {
+ "namespace": "functionsOnTypes",
+ "description": "The functionsOnTypes API.",
+ "types": [
+ {
+ "id": "StorageArea",
+ "type": "object",
+ "functions": [
+ {
+ "name": "get",
+ "type": "function",
+ "description": "Gets one or more items from storage.",
+ "parameters": [
+ {
+ "name": "keys",
+ "choices": [
+ { "type": "string" },
+ {
+ "type": "object",
+ "description": "Storage items to return in the callback, where the values are replaced with those from storage if they exist.",
+ "properties": {},
+ "additionalProperties": { "type": "any" }
+ }
+ ],
+ "description": "A single key to get, list of keys to get, or a dictionary specifying default values (see description of the object). An empty list or object will return an empty result object. Pass in <code>null</code> to get the entire contents of storage.",
+ "optional": true
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "description": "Callback with storage items, or on failure (in which case lastError will be set).",
+ "parameters": [
+ {
+ "name": "items",
+ "type": "object",
+ "properties": {},
+ "additionalProperties": { "type": "any" },
+ "description": "Object with items in their key-value mappings."
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "id": "ChromeSetting",
+ "type": "object",
+ "description": "An interface which allows access to a Chrome browser setting.",
+ "functions": [
+ {
+ "name": "get",
+ "type": "function",
+ "description": "Gets the value of a setting.",
+ "parameters": [
+ {
+ "name": "details",
+ "type": "object",
+ "description": "What setting to consider.",
+ "properties": {
+ "incognito": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether to return the setting that applies to the incognito session (default false)."
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/functions_on_types_unittest.cc b/tools/json_schema_compiler/test/functions_on_types_unittest.cc
new file mode 100644
index 0000000..86ce492
--- /dev/null
+++ b/tools/json_schema_compiler/test/functions_on_types_unittest.cc
@@ -0,0 +1,71 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/values.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/json_schema_compiler/test/functions_on_types.h"
+
+using namespace test::api::functions_on_types;
+
+TEST(JsonSchemaCompilerFunctionsOnTypesTest, StorageAreaGetParamsCreate) {
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<StorageArea::Get::Params> params(
+ StorageArea::Get::Params::Create(*params_value));
+ ASSERT_TRUE(params);
+ EXPECT_FALSE(params->keys);
+ }
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(new base::FundamentalValue(9));
+ scoped_ptr<StorageArea::Get::Params> params(
+ StorageArea::Get::Params::Create(*params_value));
+ EXPECT_FALSE(params);
+ }
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(new base::StringValue("test"));
+ scoped_ptr<StorageArea::Get::Params> params(
+ StorageArea::Get::Params::Create(*params_value));
+ ASSERT_TRUE(params);
+ ASSERT_TRUE(params->keys);
+ EXPECT_EQ("test", *params->keys->as_string);
+ }
+ {
+ scoped_ptr<base::DictionaryValue> keys_object_value(
+ new base::DictionaryValue());
+ keys_object_value->SetInteger("integer", 5);
+ keys_object_value->SetString("string", "string");
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(keys_object_value->DeepCopy());
+ scoped_ptr<StorageArea::Get::Params> params(
+ StorageArea::Get::Params::Create(*params_value));
+ ASSERT_TRUE(params);
+ ASSERT_TRUE(params->keys);
+ EXPECT_TRUE(keys_object_value->Equals(
+ ¶ms->keys->as_object->additional_properties));
+ }
+}
+
+TEST(JsonSchemaCompilerFunctionsOnTypesTest, StorageAreaGetResultCreate) {
+ StorageArea::Get::Results::Items items;
+ items.additional_properties.SetDouble("asdf", 0.1);
+ items.additional_properties.SetString("sdfg", "zxcv");
+ scoped_ptr<base::ListValue> results =
+ StorageArea::Get::Results::Create(items);
+ base::DictionaryValue* item_result = NULL;
+ ASSERT_TRUE(results->GetDictionary(0, &item_result));
+ EXPECT_TRUE(item_result->Equals(&items.additional_properties));
+}
+
+TEST(JsonSchemaCompilerFunctionsOnTypesTest, ChromeSettingGetParamsCreate) {
+ scoped_ptr<base::DictionaryValue> details_value(new base::DictionaryValue());
+ details_value->SetBoolean("incognito", true);
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(details_value.release());
+ scoped_ptr<ChromeSetting::Get::Params> params(
+ ChromeSetting::Get::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_TRUE(*params->details.incognito);
+}
diff --git a/tools/json_schema_compiler/test/idl_basics.idl b/tools/json_schema_compiler/test/idl_basics.idl
new file mode 100644
index 0000000..463574f
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_basics.idl
@@ -0,0 +1,106 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Tests a variety of basic API definition features.
+
+[internal] namespace idl_basics {
+ // Enum description
+ enum EnumType {
+ // comment1
+ name1,
+ name2
+ };
+
+ [nodoc] enum EnumTypeWithNoDoc {
+ name1,
+ name2
+ };
+
+ dictionary MyType1 {
+ // This comment tests "double-quotes".
+ [legalValues=(1,2)] long x;
+ DOMString y;
+ DOMString z;
+ DOMString a;
+ DOMString b;
+ DOMString c;
+ };
+
+ dictionary MyType2 {
+ DOMString x;
+ };
+
+ callback Callback1 = void();
+ callback Callback2 = void(long x);
+ callback Callback3 = void(MyType1 arg);
+ callback Callback4 = void(MyType2[] arg);
+ callback Callback5 = void(EnumType type);
+ // A comment on a callback.
+ // |x|: A parameter.
+ callback Callback6 = void(long x);
+ // |x|: Just a parameter comment, with no comment on the callback.
+ callback Callback7 = void(long x);
+
+ interface Functions {
+ static void function1();
+ static void function2(long x);
+ // This comment should appear in the documentation,
+ // despite occupying multiple lines.
+ //
+ // |arg|: So should this comment
+ // about the argument.
+ // <em>HTML</em> is fine too.
+ static void function3(MyType1 arg);
+
+ // This tests if "double-quotes" are escaped correctly.
+ //
+ // It also tests a comment with two newlines.
+ static void function4(Callback1 cb);
+ static void function5(Callback2 cb);
+ static void function6(Callback3 cb);
+
+ static void function7(optional long arg);
+ static void function8(long arg1, optional DOMString arg2);
+ static void function9(optional MyType1 arg);
+
+ static void function10(long x, long[] y);
+ static void function11(MyType1[] arg);
+
+ static void function12(Callback4 cb);
+
+ static void function13(EnumType type, Callback5 cb);
+ static void function14(EnumType[] types);
+
+ // "switch" is a reserved word and should cause a C++ compile error if we
+ // emit code for this declaration.
+ [nocompile] static void function15(long switch);
+
+ static void function16(Callback6 cb);
+ static void function17(Callback7 cb);
+ // |cb|: Override callback comment.
+ static void function18(Callback7 cb);
+
+ static void function20(idl_other_namespace.SomeType value);
+ static void function21(idl_other_namespace.SomeType[] values);
+ static void function22(
+ idl_other_namespace.sub_namespace.AnotherType value);
+ static void function23(
+ idl_other_namespace.sub_namespace.AnotherType[] values);
+
+ static long function24();
+ static MyType1 function25();
+ static MyType1[] function26();
+ static EnumType function27();
+ static EnumType[] function28();
+ static idl_other_namespace.SomeType function29();
+ static idl_other_namespace.SomeType[] function30();
+ };
+
+ interface Events {
+ static void onFoo1();
+ static void onFoo2(long x);
+ static void onFoo2(MyType1 arg);
+ static void onFoo3(EnumType type);
+ };
+};
diff --git a/tools/json_schema_compiler/test/idl_function_types.idl b/tools/json_schema_compiler/test/idl_function_types.idl
new file mode 100644
index 0000000..96bff43
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_function_types.idl
@@ -0,0 +1,18 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Tests function and callback types in various places in IDL.
+
+namespace idl_function_types {
+ callback MyCallback = void(long x);
+ callback MyUnionCallback = void ((long or DOMString) x);
+ callback MyOptionalUnionCallback = void (optional (long or DOMString) x);
+
+ interface Functions {
+ static void whatever(MyCallback[] callbacks);
+ static void blah(MyUnionCallback callback);
+ static void badabish(MyOptionalUnionCallback callback);
+ static void union_params((long or DOMString) x);
+ };
+};
diff --git a/tools/json_schema_compiler/test/idl_namespace_all_platforms.idl b/tools/json_schema_compiler/test/idl_namespace_all_platforms.idl
new file mode 100644
index 0000000..7ffd84a
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_namespace_all_platforms.idl
@@ -0,0 +1,10 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Tests a variety of basic API definition features.
+
+[platforms=("chromeos", "chromeos_touch", "linux", "mac", "win")]
+namespace idl_namespace_all_platforms {
+
+};
diff --git a/tools/json_schema_compiler/test/idl_namespace_chromeos.idl b/tools/json_schema_compiler/test/idl_namespace_chromeos.idl
new file mode 100644
index 0000000..ad748ff
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_namespace_chromeos.idl
@@ -0,0 +1,10 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Tests the case of a namespace with chromeos platforms.
+
+[platforms=("chromeos")]
+namespace idl_namespace_chromeos {
+
+};
diff --git a/tools/json_schema_compiler/test/idl_namespace_non_specific_platforms.idl b/tools/json_schema_compiler/test/idl_namespace_non_specific_platforms.idl
new file mode 100644
index 0000000..cff64f8
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_namespace_non_specific_platforms.idl
@@ -0,0 +1,9 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Tests the case of a namespace with non specific platforms.
+
+namespace idl_namespace_non_specific_platforms {
+
+};
diff --git a/tools/json_schema_compiler/test/idl_namespace_specific_implement.idl b/tools/json_schema_compiler/test/idl_namespace_specific_implement.idl
new file mode 100644
index 0000000..99b65ac
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_namespace_specific_implement.idl
@@ -0,0 +1,10 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Tests the case of a namespace with specific implemented file.
+
+[implemented_in="idl_namespace_specific_implement.idl"]
+namespace idl_namespace_specific_implement {
+
+};
diff --git a/tools/json_schema_compiler/test/idl_namespace_specific_implement_chromeos.idl b/tools/json_schema_compiler/test/idl_namespace_specific_implement_chromeos.idl
new file mode 100644
index 0000000..619e073
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_namespace_specific_implement_chromeos.idl
@@ -0,0 +1,11 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Tests the case of a namespace with specific implemented file on chromeos.
+
+[platforms=("chromeos"),
+ implemented_in="idl_namespace_specific_implement_chromeos.idl"]
+namespace idl_namespace_specific_implement_chromeos {
+
+};
diff --git a/tools/json_schema_compiler/test/idl_object_types.idl b/tools/json_schema_compiler/test/idl_object_types.idl
new file mode 100644
index 0000000..ebf9f38
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_object_types.idl
@@ -0,0 +1,37 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Tests 'object' types in various places in IDL.
+
+namespace idl_object_types {
+ dictionary FooType {
+ long x;
+ object y;
+ [instanceOf=Window]object z;
+ };
+
+ dictionary BarType {
+ any x;
+ };
+
+ dictionary BazType {
+ long? x;
+ FooType? foo;
+ };
+
+ dictionary UnionType {
+ (long or FooType)? x;
+ (DOMString or object) y;
+ ([instanceOf=ImageData]object or long) z;
+ };
+
+ dictionary ModifiedUnionType {
+ [nodoc] (long or DOMString) x;
+ };
+
+ interface Functions {
+ static void objectFunction1([instanceOf=ImageData]object icon);
+ static void objectFunction2(any some_arg);
+ };
+};
diff --git a/tools/json_schema_compiler/test/idl_other_namespace.idl b/tools/json_schema_compiler/test/idl_other_namespace.idl
new file mode 100644
index 0000000..9aa3fa0
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_other_namespace.idl
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Provides types for testing scoped types included in idl_basics.idl.
+
+namespace idl_other_namespace {
+ enum SomeType {
+ hello,
+ world
+ };
+};
diff --git a/tools/json_schema_compiler/test/idl_other_namespace_sub_namespace.idl b/tools/json_schema_compiler/test/idl_other_namespace_sub_namespace.idl
new file mode 100644
index 0000000..0d6a2cc
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_other_namespace_sub_namespace.idl
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Provides types for testing scoped types included in idl_basics.idl.
+
+namespace idl_other_namespace.sub_namespace {
+ enum AnotherType {
+ hello,
+ world
+ };
+};
diff --git a/tools/json_schema_compiler/test/idl_reserved_words.idl b/tools/json_schema_compiler/test/idl_reserved_words.idl
new file mode 100644
index 0000000..411973e
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_reserved_words.idl
@@ -0,0 +1,27 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Tests 'reserved words' in various places in IDL.
+
+namespace reserved_words {
+
+ enum Foo { _float, _DOMString };
+
+ enum _enum {
+ _callback,
+ _namespace
+ };
+
+ dictionary _dictionary {
+ long _long;
+ };
+
+ dictionary MyType {
+ DOMString _interface;
+ };
+
+ interface Functions {
+ static void _static(Foo foo, _enum e);
+ };
+};
diff --git a/tools/json_schema_compiler/test/idl_schemas_unittest.cc b/tools/json_schema_compiler/test/idl_schemas_unittest.cc
new file mode 100644
index 0000000..1817afb
--- /dev/null
+++ b/tools/json_schema_compiler/test/idl_schemas_unittest.cc
@@ -0,0 +1,199 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/values.h"
+#include "tools/json_schema_compiler/test/idl_basics.h"
+#include "tools/json_schema_compiler/test/idl_object_types.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+using test::api::idl_basics::MyType1;
+using test::api::idl_object_types::BarType;
+using test::api::idl_object_types::FooType;
+
+namespace Function2 = test::api::idl_basics::Function2;
+namespace Function3 = test::api::idl_basics::Function3;
+namespace Function4 = test::api::idl_basics::Function4;
+namespace Function5 = test::api::idl_basics::Function5;
+namespace Function6 = test::api::idl_basics::Function6;
+namespace Function7 = test::api::idl_basics::Function7;
+namespace Function8 = test::api::idl_basics::Function8;
+namespace Function9 = test::api::idl_basics::Function9;
+namespace Function10 = test::api::idl_basics::Function10;
+namespace Function11 = test::api::idl_basics::Function11;
+namespace ObjectFunction1 = test::api::idl_object_types::ObjectFunction1;
+
+TEST(IdlCompiler, Basics) {
+ // Test MyType1.
+ MyType1 a;
+ a.x = 5;
+ a.y = std::string("foo");
+ scoped_ptr<base::DictionaryValue> serialized = a.ToValue();
+ MyType1 b;
+ EXPECT_TRUE(MyType1::Populate(*serialized.get(), &b));
+ EXPECT_EQ(a.x, b.x);
+ EXPECT_EQ(a.y, b.y);
+
+ // Test Function2, which takes an integer parameter.
+ base::ListValue list;
+ list.Append(new base::FundamentalValue(5));
+ scoped_ptr<Function2::Params> f2_params = Function2::Params::Create(list);
+ EXPECT_EQ(5, f2_params->x);
+
+ // Test Function3, which takes a MyType1 parameter.
+ list.Clear();
+ base::DictionaryValue* tmp = new base::DictionaryValue();
+ tmp->SetInteger("x", 17);
+ tmp->SetString("y", "hello");
+ tmp->SetString("z", "zstring");
+ tmp->SetString("a", "astring");
+ tmp->SetString("b", "bstring");
+ tmp->SetString("c", "cstring");
+ list.Append(tmp);
+ scoped_ptr<Function3::Params> f3_params = Function3::Params::Create(list);
+ EXPECT_EQ(17, f3_params->arg.x);
+ EXPECT_EQ("hello", f3_params->arg.y);
+
+ // Test functions that take a callback function as a parameter, with varying
+ // callback signatures.
+ scoped_ptr<base::ListValue> f4_results = Function4::Results::Create();
+ base::ListValue expected;
+ EXPECT_TRUE(f4_results->Equals(&expected));
+
+ scoped_ptr<base::ListValue> f5_results(Function5::Results::Create(13));
+ base::Value* f5_result_int = NULL;
+ ASSERT_TRUE(f5_results->Get(0, &f5_result_int));
+ EXPECT_TRUE(f5_result_int->IsType(base::Value::TYPE_INTEGER));
+
+ scoped_ptr<base::ListValue> f6_results(Function6::Results::Create(a));
+ base::Value* f6_result_dict = NULL;
+ ASSERT_TRUE(f6_results->Get(0, &f6_result_dict));
+ MyType1 c;
+ EXPECT_TRUE(MyType1::Populate(*f6_result_dict, &c));
+ EXPECT_EQ(a.x, c.x);
+ EXPECT_EQ(a.y, c.y);
+}
+
+TEST(IdlCompiler, OptionalArguments) {
+ // Test a function that takes one optional argument, both without and with
+ // that argument.
+ base::ListValue list;
+ scoped_ptr<Function7::Params> f7_params = Function7::Params::Create(list);
+ EXPECT_EQ(NULL, f7_params->arg.get());
+ list.Append(new base::FundamentalValue(7));
+ f7_params = Function7::Params::Create(list);
+ EXPECT_EQ(7, *(f7_params->arg));
+
+ // Similar to above, but a function with one required and one optional
+ // argument.
+ list.Clear();
+ list.Append(new base::FundamentalValue(8));
+ scoped_ptr<Function8::Params> f8_params = Function8::Params::Create(list);
+ EXPECT_EQ(8, f8_params->arg1);
+ EXPECT_EQ(NULL, f8_params->arg2.get());
+ list.Append(new base::StringValue("foo"));
+ f8_params = Function8::Params::Create(list);
+ EXPECT_EQ(8, f8_params->arg1);
+ EXPECT_EQ("foo", *(f8_params->arg2));
+
+ // Test a function with an optional argument of custom type.
+ list.Clear();
+ scoped_ptr<Function9::Params> f9_params = Function9::Params::Create(list);
+ EXPECT_EQ(NULL, f9_params->arg.get());
+ list.Clear();
+ base::DictionaryValue* tmp = new base::DictionaryValue();
+ tmp->SetInteger("x", 17);
+ tmp->SetString("y", "hello");
+ tmp->SetString("z", "zstring");
+ tmp->SetString("a", "astring");
+ tmp->SetString("b", "bstring");
+ tmp->SetString("c", "cstring");
+ list.Append(tmp);
+ f9_params = Function9::Params::Create(list);
+ ASSERT_TRUE(f9_params->arg.get() != NULL);
+ MyType1* t1 = f9_params->arg.get();
+ EXPECT_EQ(17, t1->x);
+ EXPECT_EQ("hello", t1->y);
+}
+
+TEST(IdlCompiler, ArrayTypes) {
+ // Tests of a function that takes an integer and an array of integers. First
+ // use an empty array.
+ base::ListValue list;
+ list.Append(new base::FundamentalValue(33));
+ list.Append(new base::ListValue);
+ scoped_ptr<Function10::Params> f10_params = Function10::Params::Create(list);
+ ASSERT_TRUE(f10_params != NULL);
+ EXPECT_EQ(33, f10_params->x);
+ EXPECT_TRUE(f10_params->y.empty());
+
+ // Same function, but this time with 2 values in the array.
+ list.Clear();
+ list.Append(new base::FundamentalValue(33));
+ base::ListValue* sublist = new base::ListValue;
+ sublist->Append(new base::FundamentalValue(34));
+ sublist->Append(new base::FundamentalValue(35));
+ list.Append(sublist);
+ f10_params = Function10::Params::Create(list);
+ ASSERT_TRUE(f10_params != NULL);
+ EXPECT_EQ(33, f10_params->x);
+ ASSERT_EQ(2u, f10_params->y.size());
+ EXPECT_EQ(34, f10_params->y[0]);
+ EXPECT_EQ(35, f10_params->y[1]);
+
+ // Now test a function which takes an array of a defined type.
+ list.Clear();
+ MyType1 a;
+ MyType1 b;
+ a.x = 5;
+ b.x = 6;
+ a.y = std::string("foo");
+ b.y = std::string("bar");
+ base::ListValue* sublist2 = new base::ListValue;
+ sublist2->Append(a.ToValue().release());
+ sublist2->Append(b.ToValue().release());
+ list.Append(sublist2);
+ scoped_ptr<Function11::Params> f11_params = Function11::Params::Create(list);
+ ASSERT_TRUE(f11_params != NULL);
+ ASSERT_EQ(2u, f11_params->arg.size());
+ EXPECT_EQ(5, f11_params->arg[0]->x);
+ EXPECT_EQ("foo", f11_params->arg[0]->y);
+ EXPECT_EQ(6, f11_params->arg[1]->x);
+ EXPECT_EQ("bar", f11_params->arg[1]->y);
+}
+
+TEST(IdlCompiler, ObjectTypes) {
+ // Test the FooType type.
+ FooType f1;
+ f1.x = 3;
+ scoped_ptr<base::DictionaryValue> serialized_foo = f1.ToValue();
+ FooType f2;
+ EXPECT_TRUE(FooType::Populate(*serialized_foo.get(), &f2));
+ EXPECT_EQ(f1.x, f2.x);
+
+ // Test the BarType type.
+ BarType b1;
+ b1.x.reset(new base::FundamentalValue(7));
+ scoped_ptr<base::DictionaryValue> serialized_bar = b1.ToValue();
+ BarType b2;
+ EXPECT_TRUE(BarType::Populate(*serialized_bar.get(), &b2));
+ int tmp_int = 0;
+ EXPECT_TRUE(b2.x->GetAsInteger(&tmp_int));
+ EXPECT_EQ(7, tmp_int);
+
+ // Test the params to the ObjectFunction1 function.
+ scoped_ptr<base::DictionaryValue> icon_props(new base::DictionaryValue());
+ icon_props->SetString("hello", "world");
+ ObjectFunction1::Params::Icon icon;
+ EXPECT_TRUE(ObjectFunction1::Params::Icon::Populate(*(icon_props.get()),
+ &icon));
+ base::ListValue list;
+ list.Append(icon_props.release());
+ scoped_ptr<ObjectFunction1::Params> params =
+ ObjectFunction1::Params::Create(list);
+ ASSERT_TRUE(params.get() != NULL);
+ std::string tmp;
+ EXPECT_TRUE(params->icon.additional_properties.GetString("hello", &tmp));
+ EXPECT_EQ("world", tmp);
+}
diff --git a/tools/json_schema_compiler/test/json_schema_compiler_tests.gyp b/tools/json_schema_compiler/test/json_schema_compiler_tests.gyp
new file mode 100644
index 0000000..7921f87
--- /dev/null
+++ b/tools/json_schema_compiler/test/json_schema_compiler_tests.gyp
@@ -0,0 +1,46 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'json_schema_compiler_tests',
+ 'type': 'static_library',
+ 'variables': {
+ 'chromium_code': 1,
+ 'schema_files': [
+ 'additional_properties.json',
+ 'any.json',
+ 'arrays.json',
+ 'callbacks.json',
+ 'choices.json',
+ 'crossref.json',
+ 'enums.json',
+ 'functions_as_parameters.json',
+ 'functions_on_types.json',
+ 'idl_basics.idl',
+ 'idl_other_namespace.idl',
+ 'idl_other_namespace_sub_namespace.idl',
+ 'idl_object_types.idl',
+ 'objects.json',
+ 'simple_api.json',
+ 'error_generation.json'
+ ],
+ 'cc_dir': 'tools/json_schema_compiler/test',
+ 'root_namespace': 'test::api::%(namespace)s',
+ },
+ 'inputs': [
+ '<@(schema_files)',
+ ],
+ 'sources': [
+ '<@(schema_files)',
+ 'test_util.cc',
+ 'test_util.h',
+ ],
+ 'includes': ['../../../build/json_schema_compile.gypi'],
+ # TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
+ 'msvs_disabled_warnings': [4267, ],
+ },
+ ],
+}
diff --git a/tools/json_schema_compiler/test/json_schema_test.json b/tools/json_schema_compiler/test/json_schema_test.json
new file mode 100644
index 0000000..b3b24e9
--- /dev/null
+++ b/tools/json_schema_compiler/test/json_schema_test.json
@@ -0,0 +1,106 @@
+[
+ {
+ "namespace": "compile",
+ "description": "The compile API.",
+ "functions": [],
+ "types": {}
+ },
+
+ {
+ "namespace": "nocompile",
+ "description": "The nocompile API.",
+ "nocompile": true,
+ "functions": [],
+ "types": {}
+ },
+
+ {
+ "namespace": "functions",
+ "description": "The functions API.",
+ "functions": [
+ {
+ "id": "one",
+ "nocompile": true
+ },
+ {
+ "id": "two"
+ },
+ {
+ "id": "three",
+ "nocompile": true
+ },
+ {
+ "id": "four"
+ }
+ ],
+
+ "types": {
+ "one": { "key": "value" }
+ }
+ },
+
+ {
+ "namespace": "types",
+ "description": "The types API.",
+ "functions": [
+ { "id": "one" }
+ ],
+ "types": {
+ "one": {
+ "key": "value",
+ "nocompile": true
+ },
+ "two": {
+ "key": "value"
+ },
+ "three": {
+ "key": "value",
+ "nocompile": true
+ },
+ "four": {
+ "key": "value"
+ }
+ }
+ },
+
+ {
+ "namespace": "nested",
+ "description": "The nested API.",
+ "properties": {
+ "sync": {
+ "functions": [
+ {
+ "id": "one",
+ "nocompile": true
+ },
+ {
+ "id": "two"
+ },
+ {
+ "id": "three",
+ "nocompile": true
+ },
+ {
+ "id": "four"
+ }
+ ],
+ "types": {
+ "one": {
+ "key": "value",
+ "nocompile": true
+ },
+ "two": {
+ "key": "value"
+ },
+ "three": {
+ "key": "value",
+ "nocompile": true
+ },
+ "four": {
+ "key": "value"
+ }
+ }
+ }
+ }
+ }
+]
diff --git a/tools/json_schema_compiler/test/objects.json b/tools/json_schema_compiler/test/objects.json
new file mode 100644
index 0000000..e76e229
--- /dev/null
+++ b/tools/json_schema_compiler/test/objects.json
@@ -0,0 +1,140 @@
+[
+ {
+ "namespace": "objects",
+ "description": "The objects API.",
+ "types": [],
+ "functions": [
+ {
+ "name": "objectParam",
+ "type": "function",
+ "description": "Takes an object.",
+ "parameters": [
+ {
+ "name": "info",
+ "type": "object",
+ "properties": {
+ "strings": {
+ "type": "array",
+ "items": {"type": "string"}
+ },
+ "integer": {
+ "type": "integer"
+ },
+ "boolean": {
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "returnsObject",
+ "description": "Returns an object.",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "info",
+ "type": "object",
+ "properties": {
+ "state": {
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "returnsTwoObjects",
+ "description": "Return two objects.",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "firstInfo",
+ "type": "object",
+ "properties": {
+ "state": {
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ }
+ }
+ },
+ {
+ "name": "secondInfo",
+ "type": "object",
+ "properties": {
+ "state": {
+ "type": "string",
+ "enum": ["spam", "ham", "eggs"]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "onObjectFired",
+ "type": "function",
+ "description": "Fired when an object is ready.",
+ "parameters": [
+ {
+ "name": "someObject",
+ "type": "object",
+ "properties": {
+ "state": {
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "onTwoObjectsFired",
+ "type": "function",
+ "description": "Fired when two objects are ready.",
+ "parameters": [
+ {
+ "name": "firstObject",
+ "type": "object",
+ "properties": {
+ "state": {
+ "type": "string",
+ "enum": ["foo", "bar", "baz"]
+ }
+ }
+ },
+ {
+ "name": "secondObject",
+ "type": "object",
+ "properties": {
+ "state": {
+ "type": "string",
+ "enum": ["spam", "ham", "eggs"]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+]
+
diff --git a/tools/json_schema_compiler/test/objects_unittest.cc b/tools/json_schema_compiler/test/objects_unittest.cc
new file mode 100644
index 0000000..6dc2c45
--- /dev/null
+++ b/tools/json_schema_compiler/test/objects_unittest.cc
@@ -0,0 +1,71 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/test/objects.h"
+
+#include "base/json/json_writer.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using namespace test::api::objects;
+
+TEST(JsonSchemaCompilerObjectsTest, ObjectParamParamsCreate) {
+ {
+ scoped_ptr<base::ListValue> strings(new base::ListValue());
+ strings->Append(new base::StringValue("one"));
+ strings->Append(new base::StringValue("two"));
+ scoped_ptr<base::DictionaryValue> info_value(new base::DictionaryValue());
+ info_value->Set("strings", strings.release());
+ info_value->Set("integer", new base::FundamentalValue(5));
+ info_value->Set("boolean", new base::FundamentalValue(true));
+
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(info_value.release());
+ scoped_ptr<ObjectParam::Params> params(
+ ObjectParam::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_EQ((size_t) 2, params->info.strings.size());
+ EXPECT_EQ("one", params->info.strings[0]);
+ EXPECT_EQ("two", params->info.strings[1]);
+ EXPECT_EQ(5, params->info.integer);
+ EXPECT_TRUE(params->info.boolean);
+ }
+ {
+ scoped_ptr<base::ListValue> strings(new base::ListValue());
+ strings->Append(new base::StringValue("one"));
+ strings->Append(new base::StringValue("two"));
+ scoped_ptr<base::DictionaryValue> info_value(new base::DictionaryValue());
+ info_value->Set("strings", strings.release());
+ info_value->Set("integer", new base::FundamentalValue(5));
+
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(info_value.release());
+ scoped_ptr<ObjectParam::Params> params(
+ ObjectParam::Params::Create(*params_value));
+ EXPECT_FALSE(params.get());
+ }
+}
+
+TEST(JsonSchemaCompilerObjectsTest, ReturnsObjectResultCreate) {
+ ReturnsObject::Results::Info info;
+ info.state = ReturnsObject::Results::Info::STATE_FOO;
+ scoped_ptr<base::ListValue> results = ReturnsObject::Results::Create(info);
+
+ base::DictionaryValue expected;
+ expected.SetString("state", "foo");
+ base::DictionaryValue* result = NULL;
+ ASSERT_TRUE(results->GetDictionary(0, &result));
+ ASSERT_TRUE(result->Equals(&expected));
+}
+
+TEST(JsonSchemaCompilerObjectsTest, OnObjectFiredCreate) {
+ OnObjectFired::SomeObject object;
+ object.state = OnObjectFired::SomeObject::STATE_BAR;
+ scoped_ptr<base::ListValue> results(OnObjectFired::Create(object));
+
+ base::DictionaryValue expected;
+ expected.SetString("state", "bar");
+ base::DictionaryValue* result = NULL;
+ ASSERT_TRUE(results->GetDictionary(0, &result));
+ ASSERT_TRUE(result->Equals(&expected));
+}
diff --git a/tools/json_schema_compiler/test/permissions.json b/tools/json_schema_compiler/test/permissions.json
new file mode 100644
index 0000000..df97441
--- /dev/null
+++ b/tools/json_schema_compiler/test/permissions.json
@@ -0,0 +1,140 @@
+[
+ {
+ "namespace": "permissions",
+ "description": "A test API for the json_schema_compiler.",
+ "types": [
+ {
+ "id": "Permissions",
+ "type": "object",
+ "properties": {
+ "permissions": {
+ "type": "array",
+ "items": {"type": "string"},
+ "optional": true,
+ "description": "List of named permissions (does not include hosts or origins)."
+ },
+ "origins": {
+ "type": "array",
+ "items": {"type": "string"},
+ "optional": true,
+ "description": "List of origin permissions."
+ }
+ }
+ }
+ ],
+ "events": [
+ {
+ "name": "onAdded",
+ "type": "function",
+ "description": "Fired when the extension acquires new permissions.",
+ "parameters": [
+ {
+ "$ref": "Permissions",
+ "name": "permissions",
+ "description": "The newly acquired permissions."
+ }
+ ]
+ },
+ {
+ "name": "onRemoved",
+ "type": "function",
+ "description": "Fired when access to permissions has been removed from the extension.",
+ "parameters": [
+ {
+ "$ref": "Permissions",
+ "name": "permissions",
+ "description": "The permissions that have been removed."
+ }
+ ]
+ }
+ ],
+ "functions": [
+ {
+ "name": "getAll",
+ "type": "function",
+ "description": "Gets the extension's current set of permissions.",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "permissions",
+ "$ref": "Permissions",
+ "description": "The extension's active permissions."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "contains",
+ "type": "function",
+ "description": "Checks if the extension has the specified permissions.",
+ "parameters": [
+ {
+ "name": "permissions",
+ "$ref": "Permissions"
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "result",
+ "type": "boolean",
+ "description": "True if the extension has the specified permissions."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "request",
+ "type": "function",
+ "description": "Requests access to the specified permissions. These permissions must be defined in the optional_permissions field of the manifest. If there are any problems requesting the permissions, <a href='extension.html#property-lastError'>chrome.runtime.lastError</a> will be set.",
+ "parameters": [
+ {
+ "name": "permissions",
+ "$ref": "Permissions"
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "granted",
+ "type": "boolean",
+ "description": "True if the user granted the specified permissions."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "remove",
+ "type": "function",
+ "description": "Removes access to the specified permissions. If there are any problems removing the permissions, <a href='extension.html#property-lastError'>chrome.runtime.lastError</a> will be set.",
+ "parameters": [
+ {
+ "name": "permissions",
+ "$ref": "Permissions"
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "removed",
+ "type": "boolean",
+ "description": "True if the permissions were removed."
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/simple_api.json b/tools/json_schema_compiler/test/simple_api.json
new file mode 100644
index 0000000..9ab5403
--- /dev/null
+++ b/tools/json_schema_compiler/test/simple_api.json
@@ -0,0 +1,163 @@
+[
+ {
+ "namespace": "simple_api",
+ "description": "This is a simple API.",
+ "types": [
+ {
+ "id": "TestType",
+ "type": "object",
+ "properties": {
+ "string": {
+ "type": "string",
+ "description": "Some string."
+ },
+ "boolean": {
+ "type": "boolean",
+ "description": "Some boolean."
+ },
+ "number": {
+ "type": "number",
+ "description": "Some double."
+ },
+ "integer": {
+ "type": "integer",
+ "description": "Some integer."
+ }
+ }
+ }
+ ],
+ "functions": [
+ {
+ "name": "incrementInteger",
+ "type": "function",
+ "description": "Increments the given integer.",
+ "parameters": [
+ {
+ "name": "num",
+ "type": "integer"
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "result",
+ "type": "integer",
+ "description": "The incremented value."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "optionalString",
+ "type": "function",
+ "description": "Takes a string. Or not.",
+ "parameters": [
+ {
+ "name": "str",
+ "type": "string",
+ "optional": true
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "optionalBeforeRequired",
+ "type": "function",
+ "description": "Takes an optional parameter followed by a required one.",
+ "parameters": [
+ {
+ "name": "first",
+ "type": "string",
+ "optional": true
+ },
+ {
+ "name": "second",
+ "type": "string"
+ },
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "optionalCallbackParams",
+ "type": "function",
+ "description": "Gives back a string. Or not.",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "result",
+ "$ref": "TestType",
+ "description": "True if the extension has the specified permissions."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "getTestType",
+ "type": "function",
+ "description": "Return a TestType.",
+ "parameters": [
+ {
+ "name": "callback",
+ "type": "function",
+ "parameters": [
+ {
+ "name": "result",
+ "$ref": "TestType",
+ "description": "A TestType."
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "onIntegerFired",
+ "type": "function",
+ "description": "Fired when an integer is ready.",
+ "parameters": [
+ {
+ "name": "someInteger",
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "name": "onStringFired",
+ "type": "function",
+ "description": "Fired when a string is ready.",
+ "parameters": [
+ {
+ "name": "someString",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "onTestTypeFired",
+ "type": "function",
+ "description": "Fired when a TestType is ready.",
+ "parameters": [
+ {
+ "name": "someTestType",
+ "$ref": "TestType"
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/simple_api_unittest.cc b/tools/json_schema_compiler/test/simple_api_unittest.cc
new file mode 100644
index 0000000..f100b81
--- /dev/null
+++ b/tools/json_schema_compiler/test/simple_api_unittest.cc
@@ -0,0 +1,182 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/test/simple_api.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+using namespace test::api::simple_api;
+
+namespace {
+
+static scoped_ptr<base::DictionaryValue> CreateTestTypeDictionary() {
+ scoped_ptr<base::DictionaryValue> value(new base::DictionaryValue());
+ value->SetWithoutPathExpansion("number", new base::FundamentalValue(1.1));
+ value->SetWithoutPathExpansion("integer", new base::FundamentalValue(4));
+ value->SetWithoutPathExpansion("string", new base::StringValue("bling"));
+ value->SetWithoutPathExpansion("boolean", new base::FundamentalValue(true));
+ return value.Pass();
+}
+
+} // namespace
+
+TEST(JsonSchemaCompilerSimpleTest, IncrementIntegerResultCreate) {
+ scoped_ptr<base::ListValue> results = IncrementInteger::Results::Create(5);
+ base::ListValue expected;
+ expected.Append(new base::FundamentalValue(5));
+ EXPECT_TRUE(results->Equals(&expected));
+}
+
+TEST(JsonSchemaCompilerSimpleTest, IncrementIntegerParamsCreate) {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(new base::FundamentalValue(6));
+ scoped_ptr<IncrementInteger::Params> params(
+ IncrementInteger::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_EQ(6, params->num);
+}
+
+TEST(JsonSchemaCompilerSimpleTest, NumberOfParams) {
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(new base::StringValue("text"));
+ params_value->Append(new base::StringValue("text"));
+ scoped_ptr<OptionalString::Params> params(
+ OptionalString::Params::Create(*params_value));
+ EXPECT_FALSE(params.get());
+ }
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<IncrementInteger::Params> params(
+ IncrementInteger::Params::Create(*params_value));
+ EXPECT_FALSE(params.get());
+ }
+}
+
+TEST(JsonSchemaCompilerSimpleTest, OptionalStringParamsCreate) {
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ scoped_ptr<OptionalString::Params> params(
+ OptionalString::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_FALSE(params->str.get());
+ }
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(new base::StringValue("asdf"));
+ scoped_ptr<OptionalString::Params> params(
+ OptionalString::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_TRUE(params->str.get());
+ EXPECT_EQ("asdf", *params->str);
+ }
+}
+
+TEST(JsonSchemaCompilerSimpleTest, OptionalParamsTakingNull) {
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(base::Value::CreateNullValue());
+ scoped_ptr<OptionalString::Params> params(
+ OptionalString::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_FALSE(params->str.get());
+ }
+}
+
+TEST(JsonSchemaCompilerSimpleTest, OptionalStringParamsWrongType) {
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(new base::FundamentalValue(5));
+ scoped_ptr<OptionalString::Params> params(
+ OptionalString::Params::Create(*params_value));
+ EXPECT_FALSE(params.get());
+ }
+}
+
+TEST(JsonSchemaCompilerSimpleTest, OptionalBeforeRequired) {
+ {
+ scoped_ptr<base::ListValue> params_value(new base::ListValue());
+ params_value->Append(base::Value::CreateNullValue());
+ params_value->Append(new base::StringValue("asdf"));
+ scoped_ptr<OptionalBeforeRequired::Params> params(
+ OptionalBeforeRequired::Params::Create(*params_value));
+ EXPECT_TRUE(params.get());
+ EXPECT_FALSE(params->first.get());
+ EXPECT_EQ("asdf", params->second);
+ }
+}
+
+TEST(JsonSchemaCompilerSimpleTest, NoParamsResultCreate) {
+ scoped_ptr<base::ListValue> results = OptionalString::Results::Create();
+ base::ListValue expected;
+ EXPECT_TRUE(results->Equals(&expected));
+}
+
+TEST(JsonSchemaCompilerSimpleTest, TestTypePopulate) {
+ {
+ scoped_ptr<TestType> test_type(new TestType());
+ scoped_ptr<base::DictionaryValue> value = CreateTestTypeDictionary();
+ EXPECT_TRUE(TestType::Populate(*value, test_type.get()));
+ EXPECT_EQ("bling", test_type->string);
+ EXPECT_EQ(1.1, test_type->number);
+ EXPECT_EQ(4, test_type->integer);
+ EXPECT_EQ(true, test_type->boolean);
+ EXPECT_TRUE(value->Equals(test_type->ToValue().get()));
+ }
+ {
+ scoped_ptr<TestType> test_type(new TestType());
+ scoped_ptr<base::DictionaryValue> value = CreateTestTypeDictionary();
+ value->Remove("number", NULL);
+ EXPECT_FALSE(TestType::Populate(*value, test_type.get()));
+ }
+}
+
+TEST(JsonSchemaCompilerSimpleTest, GetTestType) {
+ {
+ scoped_ptr<base::DictionaryValue> value = CreateTestTypeDictionary();
+ scoped_ptr<TestType> test_type(new TestType());
+ EXPECT_TRUE(TestType::Populate(*value, test_type.get()));
+ scoped_ptr<base::ListValue> results =
+ GetTestType::Results::Create(*test_type);
+
+ base::DictionaryValue* result = NULL;
+ results->GetDictionary(0, &result);
+ EXPECT_TRUE(result->Equals(value.get()));
+ }
+}
+
+TEST(JsonSchemaCompilerSimpleTest, OnIntegerFiredCreate) {
+ {
+ scoped_ptr<base::ListValue> results(OnIntegerFired::Create(5));
+ base::ListValue expected;
+ expected.Append(new base::FundamentalValue(5));
+ EXPECT_TRUE(results->Equals(&expected));
+ }
+}
+
+TEST(JsonSchemaCompilerSimpleTest, OnStringFiredCreate) {
+ {
+ scoped_ptr<base::ListValue> results(OnStringFired::Create("yo dawg"));
+ base::ListValue expected;
+ expected.Append(new base::StringValue("yo dawg"));
+ EXPECT_TRUE(results->Equals(&expected));
+ }
+}
+
+TEST(JsonSchemaCompilerSimpleTest, OnTestTypeFiredCreate) {
+ {
+ TestType some_test_type;
+ scoped_ptr<base::DictionaryValue> expected = CreateTestTypeDictionary();
+ ASSERT_TRUE(expected->GetDouble("number", &some_test_type.number));
+ ASSERT_TRUE(expected->GetString("string", &some_test_type.string));
+ ASSERT_TRUE(expected->GetInteger("integer", &some_test_type.integer));
+ ASSERT_TRUE(expected->GetBoolean("boolean", &some_test_type.boolean));
+
+ scoped_ptr<base::ListValue> results(
+ OnTestTypeFired::Create(some_test_type));
+ base::DictionaryValue* result = NULL;
+ results->GetDictionary(0, &result);
+ EXPECT_TRUE(result->Equals(expected.get()));
+ }
+}
diff --git a/tools/json_schema_compiler/test/tabs.json b/tools/json_schema_compiler/test/tabs.json
new file mode 100644
index 0000000..7dca080
--- /dev/null
+++ b/tools/json_schema_compiler/test/tabs.json
@@ -0,0 +1,770 @@
+[
+ {
+ "namespace": "tabs",
+ "description": "The tabs API.",
+ "types": [
+ {
+ "id": "Tab",
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer", "minimum": 0, "description": "The ID of the tab. Tab IDs are unique within a browser session."},
+ "index": {"type": "integer", "minimum": 0, "description": "The zero-based index of the tab within its window."},
+ "windowId": {"type": "integer", "minimum": 0, "description": "The ID of the window the tab is contained within."},
+ "selected": {"type": "boolean", "description": "Whether the tab is selected.", "nodoc": true},
+ "highlighted": {"type": "boolean", "description": "Whether the tab is highlighted."},
+ "active": {"type": "boolean", "description": "Whether the tab is active in its window."},
+ "pinned": {"type": "boolean", "description": "Whether the tab is pinned."},
+ "url": {"type": "string", "description": "The URL the tab is displaying."},
+ "title": {"type": "string", "optional": true, "description": "The title of the tab. This may not be available if the tab is loading."},
+ "favIconUrl": {"type": "string", "optional": true, "description": "The URL of the tab's favicon. This may not be available if the tab is loading."},
+ "status": {"type": "string", "optional": true, "description": "Either <em>loading</em> or <em>complete</em>."},
+ "incognito": {"type": "boolean", "description": "Whether the tab is in an incognito window."}
+ }
+ }
+ ],
+ "functions": [
+ {
+ "name": "get",
+ "type": "function",
+ "description": "Retrieves details about the specified tab.",
+ "parameters": [
+ {
+ "type": "integer",
+ "name": "tabId",
+ "minimum": 0
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {"name": "tab", "$ref": "Tab"}
+ ]
+ }
+ ]
+ },
+ {
+ "name": "getCurrent",
+ "type": "function",
+ "description": "Gets the tab that this script call is being made from. May be undefined if called from a non-tab context (for example: a background page or popup view).",
+ "parameters": [
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "tab",
+ "$ref": "Tab",
+ "optional": true
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "connect",
+ "nocompile": true,
+ "type": "function",
+ "description": "Connects to the content script(s) in the specified tab. The <a href='extension.html#event-onConnect'>chrome.runtime.onConnect</a> event is fired in each content script running in the specified tab for the current extension. For more details, see <a href='content_scripts.html#messaging'>Content Script Messaging</a>.",
+ "parameters": [
+ {
+ "type": "integer",
+ "name": "tabId",
+ "minimum": 0
+ },
+ {
+ "type": "object",
+ "name": "connectInfo",
+ "properties": {
+ "name": { "type": "string", "optional": true, "description": "Will be passed into onConnect for content scripts that are listening for the connection event." }
+ },
+ "optional": true
+ }
+ ],
+ "returns": {
+ "$ref": "Port",
+ "description": "A port that can be used to communicate with the content scripts running in the specified tab. The port's <a href='extension.html#type-Port'>onDisconnect</a> event is fired if the tab closes or does not exist. "
+ }
+ },
+ {
+ "name": "sendRequest",
+ "nocompile": true,
+ "type": "function",
+ "description": "Sends a single request to the content script(s) in the specified tab, with an optional callback to run when a response is sent back. The <a href='extension.html#event-onRequest'>chrome.extension.onRequest</a> event is fired in each content script running in the specified tab for the current extension.",
+ "parameters": [
+ {
+ "type": "integer",
+ "name": "tabId",
+ "minimum": 0
+ },
+ {
+ "type": "any",
+ "name": "request"
+ },
+ {
+ "type": "function",
+ "name": "responseCallback",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "response",
+ "type": "any",
+ "description": "The JSON response object sent by the handler of the request. If an error occurs while connecting to the specified tab, the callback will be called with no arguments and <a href='extension.html#property-lastError'>chrome.runtime.lastError</a> will be set to the error message."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "getSelected",
+ "nodoc": true,
+ "type": "function",
+ "description": "Deprecated. Please use query({'active': true}). Gets the tab that is selected in the specified window.",
+ "parameters": [
+ {
+ "type": "integer",
+ "name": "windowId",
+ "minimum": 0,
+ "optional": true,
+ "description": "Defaults to the <a href='windows.html#current-window'>current window</a>."
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {"name": "tab", "$ref": "Tab"}
+ ]
+ }
+ ]
+ },
+ {
+ "name": "getAllInWindow",
+ "type": "function",
+ "nodoc": true,
+ "description": "Deprecated. Please use query({'windowId': windowId}). Gets details about all tabs in the specified window.",
+ "parameters": [
+ {
+ "type": "integer",
+ "name": "windowId",
+ "minimum": 0,
+ "optional": true,
+ "description": "Defaults to the <a href='windows.html#current-window'>current window</a>."
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {"name": "tabs", "type": "array", "items": { "$ref": "Tab" } }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "create",
+ "type": "function",
+ "description": "Creates a new tab. Note: This function can be used without requesting the 'tabs' permission in the manifest.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "createProperties",
+ "properties": {
+ "windowId": {
+ "type": "integer",
+ "minimum": 0,
+ "optional": true,
+ "description": "The window to create the new tab in. Defaults to the <a href='windows.html#current-window'>current window</a>."
+ },
+ "index": {
+ "type": "integer",
+ "minimum": 0,
+ "optional": true,
+ "description": "The position the tab should take in the window. The provided value will be clamped to between zero and the number of tabs in the window."
+ },
+ "url": {
+ "type": "string",
+ "optional": true,
+ "description": "The URL to navigate the tab to initially. Fully-qualified URLs must include a scheme (i.e. 'http://www.google.com', not 'www.google.com'). Relative URLs will be relative to the current page within the extension. Defaults to the New Tab Page."
+ },
+ "active": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether the tab should become the active tab in the window. Defaults to <var>true</var>"
+ },
+ "selected": {
+ "nodoc": true,
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether the tab should become the selected tab in the window. Defaults to <var>true</var>"
+ },
+ "pinned": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether the tab should be pinned. Defaults to <var>false</var>"
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "tab",
+ "$ref": "Tab",
+ "description": "Details about the created tab. Will contain the ID of the new tab."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "query",
+ "type": "function",
+ "description": "Gets all tabs that have the specified properties, or all tabs if no properties are specified.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "queryInfo",
+ "properties": {
+ "active": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether the tabs are active in their windows."
+ },
+ "pinned": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether the tabs are pinned."
+ },
+ "highlighted": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether the tabs are highlighted."
+ },
+ "status": {
+ "type": "string",
+ "optional": true,
+ "enum": ["loading", "complete"],
+ "description": "Whether the tabs have completed loading."
+ },
+ "title": {
+ "type": "string",
+ "optional": true,
+ "description": "Match page titles against a pattern."
+ },
+ "url": {
+ "type": "string",
+ "optional": true,
+ "description": "Match tabs against a URL pattern."
+ },
+ "windowId": {
+ "type": "integer",
+ "optional": true,
+ "minimum": 0,
+ "description": "The ID of the parent window."
+ },
+ "windowType": {
+ "type": "string",
+ "optional": true,
+ "enum": ["normal", "popup", "panel", "app"],
+ "description": "The type of window the tabs are in."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "result",
+ "type": "array",
+ "items": {
+ "$ref": "Tab"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "highlight",
+ "type": "function",
+ "description": "Highlights the given tabs.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "highlightInfo",
+ "properties": {
+ "windowId": {
+ "type": "integer",
+ "description": "The window that contains the tabs."
+ },
+ "tabs": {
+ "description": "One or more tab indices to highlight.",
+ "choices": [
+ {"type": "array", "items": {"type": "integer", "minimum": 0}},
+ {"type": "integer"}
+ ]
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "window",
+ "$ref": "Window",
+ "description": "Contains details about the window whose tabs were highlighted."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "update",
+ "type": "function",
+ "description": "Modifies the properties of a tab. Properties that are not specified in <var>updateProperties</var> are not modified. Note: This function can be used without requesting the 'tabs' permission in the manifest.",
+ "parameters": [
+ {
+ "type": "integer",
+ "name": "tabId",
+ "minimum": 0,
+ "optional": true,
+ "description": "Defaults to the selected tab of the <a href='windows.html#current-window'>current window</a>."
+ },
+ {
+ "type": "object",
+ "name": "updateProperties",
+ "properties": {
+ "url": {
+ "optional": true,
+ "type": "string",
+ "description": "A URL to navigate the tab to."
+ },
+ "active": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether the tab should be active."
+ },
+ "highlighted": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Adds or removes the tab from the current selection."
+ },
+ "selected": {
+ "nodoc": true,
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether the tab should be selected."
+ },
+ "pinned": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether the tab should be pinned."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "tab",
+ "$ref": "Tab",
+ "optional": true,
+ "description": "Details about the updated tab, or <code>null</code> if the 'tabs' permission has not been requested."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "move",
+ "type": "function",
+ "description": "Moves one or more tabs to a new position within its window, or to a new window. Note that tabs can only be moved to and from normal (window.type === \"normal\") windows.",
+ "parameters": [
+ {
+ "name": "tabIds",
+ "description": "The tab or list of tabs to move.",
+ "choices": [
+ {"type": "integer", "minimum": 0},
+ {"type": "array", "items": {"type": "integer", "minimum": 0}}
+ ]
+ },
+ {
+ "type": "object",
+ "name": "moveProperties",
+ "properties": {
+ "windowId": {
+ "type": "integer",
+ "minimum": 0,
+ "optional": true,
+ "description": "Defaults to the window the tab is currently in."
+ },
+ "index": {
+ "type": "integer",
+ "minimum": 0,
+ "description": "The position to move the window to. The provided value will be clamped to between zero and the number of tabs in the window."
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "tabs",
+ "description": "Details about the moved tabs.",
+ "choices": [
+ {"$ref": "Tab"},
+ {"type": "array", "items": {"$ref": "Tab"}}
+ ]
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "reload",
+ "type": "function",
+ "description": "Reload a tab.",
+ "parameters": [
+ {"type": "integer", "name": "tabId", "optional": true, "description": "The ID of the tab to reload; defaults to the selected tab of the current window."},
+ {
+ "type": "object",
+ "name": "reloadProperties",
+ "optional": true,
+ "properties": {
+ "bypassCache": {
+ "type": "boolean",
+ "optional": true,
+ "description": "Whether using any local cache. Default is false."
+ }
+ }
+ },
+ {"type": "function", "name": "callback", "optional": true, "parameters": []}
+ ]
+ },
+ {
+ "name": "remove",
+ "type": "function",
+ "description": "Closes one or more tabs. Note: This function can be used without requesting the 'tabs' permission in the manifest.",
+ "parameters": [
+ {
+ "name": "tabIds",
+ "description": "The tab or list of tabs to close.",
+ "choices": [
+ {"type": "integer", "minimum": 0},
+ {"type": "array", "items": {"type": "integer", "minimum": 0}}
+ ]
+ },
+ {"type": "function", "name": "callback", "optional": true, "parameters": []}
+ ]
+ },
+ {
+ "name": "detectLanguage",
+ "type": "function",
+ "description": "Detects the primary language of the content in a tab.",
+ "parameters": [
+ {
+ "type": "integer",
+ "name": "tabId",
+ "minimum": 0,
+ "optional": true,
+ "description": "Defaults to the active tab of the <a href='windows.html#current-window'>current window</a>."
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "language",
+ "description": "An ISO language code such as <code>en</code> or <code>fr</code>. For a complete list of languages supported by this method, see <a href='http://src.chromium.org/viewvc/chrome/trunk/src/third_party/cld/languages/internal/languages.cc'>kLanguageInfoTable</a>. The 2nd to 4th columns will be checked and the first non-NULL value will be returned except for Simplified Chinese for which zh-CN will be returned. For an unknown language, <code>und</code> will be returned."
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "captureVisibleTab",
+ "type": "function",
+ "description": "Captures the visible area of the currently active tab in the specified window. You must have <a href='manifest.html#permissions'><all_urls></a> permission to use this method.",
+ "parameters": [
+ {
+ "type": "integer",
+ "name": "windowId",
+ "minimum": 0,
+ "optional": true,
+ "description": "The target window. Defaults to the <a href='windows.html#current-window'>current window</a>."
+ },
+ {
+ "type": "object",
+ "name": "options",
+ "optional": true,
+ "description": "Set parameters of image capture, such as the format of the resulting image.",
+ "properties": {
+ "format": {
+ "type": "string",
+ "optional": true,
+ "enum": ["jpeg", "png"],
+ "description": "The format of the resulting image. Default is jpeg."
+ },
+ "quality": {
+ "type": "integer",
+ "name": "quality",
+ "optional": true,
+ "minimum": 0,
+ "maximum": 100,
+ "description": "When format is 'jpeg', controls the quality of the resulting image. This value is ignored for PNG images. As quality is decreased, the resulting image will have more visual artifacts, and the number of bytes needed to store it will decrease."
+ }
+ }
+ },
+ {
+ "type": "function", "name": "callback", "parameters": [
+ {"type": "string", "name": "dataUrl", "description": "A data URL which encodes an image of the visible area of the captured tab. May be assigned to the 'src' property of an HTML Image element for display."}
+ ]
+ }
+ ]
+ },
+ {
+ "name": "executeScript",
+ "type": "function",
+ "description": "Injects JavaScript code into a page. For details, see the <a href='content_scripts.html#pi'>programmatic injection</a> section of the content scripts doc.",
+ "parameters": [
+ {"type": "integer", "name": "tabId", "optional": true, "description": "The ID of the tab in which to run the script; defaults to the active tab of the current window."},
+ {
+ "type": "object",
+ "name": "details",
+ "description": "Details of the script to run. Either the code or the file property must be set, but both may not be set at the same time.",
+ "properties": {
+ "code": {"type": "string", "optional": true, "description": "JavaScript code to execute."},
+ "file": {"type": "string", "optional": true, "description": "JavaScript file to execute."},
+ "allFrames": {"type": "boolean", "optional": true, "description": "If allFrames is true, this function injects script into all frames of current page. By default, it's false and script is injected only into the top main frame."}
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "description": "Called after all the JavaScript has been executed.",
+ "parameters": []
+ }
+ ]
+ },
+ {
+ "name": "insertCSS",
+ "type": "function",
+ "description": "Injects CSS into a page. For details, see the <a href='content_scripts.html#pi'>programmatic injection</a> section of the content scripts doc.",
+ "parameters": [
+ {"type": "integer", "name": "tabId", "optional": true, "description": "The ID of the tab in which to insert the CSS; defaults to the active tab of the current window."},
+ {
+ "type": "object",
+ "name": "details",
+ "description": "Details of the CSS text to insert. Either the code or the file property must be set, but both may not be set at the same time.",
+ "properties": {
+ "code": {"type": "string", "optional": true, "description": "CSS code to be injected."},
+ "file": {"type": "string", "optional": true, "description": "CSS file to be injected."},
+ "allFrames": {"type": "boolean", "optional": true, "description": "If allFrames is true, this function injects CSS text into all frames of current page. By default, it's false and CSS is injected only into the top main frame."}
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "description": "Called when all the CSS has been inserted.",
+ "parameters": []
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "onCreated",
+ "type": "function",
+ "description": "Fired when a tab is created. Note that the tab's URL may not be set at the time this event fired, but you can listen to onUpdated events to be notified when a URL is set.",
+ "parameters": [
+ {
+ "$ref": "Tab",
+ "name": "tab",
+ "description": "Details of the tab that was created."
+ }
+ ]
+ },
+ {
+ "name": "onUpdated",
+ "type": "function",
+ "description": "Fired when a tab is updated.",
+ "parameters": [
+ {"type": "integer", "name": "tabId", "minimum": 0},
+ {
+ "type": "object",
+ "name": "changeInfo",
+ "description": "Lists the changes to the state of the tab that was updated.",
+ "properties": {
+ "status": {
+ "type": "string",
+ "optional": true,
+ "description": "The status of the tab. Can be either <em>loading</em> or <em>complete</em>."
+ },
+ "url": {
+ "type": "string",
+ "optional": true,
+ "description": "The tab's URL if it has changed."
+ },
+ "pinned": {
+ "type": "boolean",
+ "optional": true,
+ "description": "The tab's new pinned state."
+ }
+ }
+ },
+ {
+ "$ref": "Tab",
+ "name": "tab",
+ "description": "Gives the state of the tab that was updated."
+ }
+ ]
+ },
+ {
+ "name": "onMoved",
+ "type": "function",
+ "description": "Fired when a tab is moved within a window. Only one move event is fired, representing the tab the user directly moved. Move events are not fired for the other tabs that must move in response. This event is not fired when a tab is moved between windows. For that, see <a href='#event-onDetached'>onDetached</a>.",
+ "parameters": [
+ {"type": "integer", "name": "tabId", "minimum": 0},
+ {
+ "type": "object",
+ "name": "moveInfo",
+ "properties": {
+ "windowId": {"type": "integer", "minimum": 0},
+ "fromIndex": {"type": "integer", "minimum": 0},
+ "toIndex": {"type": "integer", "minimum": 0}
+ }
+ }
+ ]
+ },
+ {
+ "name": "onSelectionChanged",
+ "nodoc": true,
+ "type": "function",
+ "description": "Deprecated. Please use onActiveChanged.",
+ "parameters": [
+ {
+ "type": "integer",
+ "name": "tabId",
+ "minimum": 0,
+ "description": "The ID of the tab that has become active."
+ },
+ {
+ "type": "object",
+ "name": "selectInfo",
+ "properties": {
+ "windowId": {
+ "type": "integer",
+ "minimum": 0,
+ "description": "The ID of the window the selected tab changed inside of."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "onActiveChanged",
+ "type": "function",
+ "description": "Fires when the selected tab in a window changes.",
+ "parameters": [
+ {
+ "type": "integer",
+ "name": "tabId",
+ "minimum": 0,
+ "description": "The ID of the tab that has become active."
+ },
+ {
+ "type": "object",
+ "name": "selectInfo",
+ "properties": {
+ "windowId": {
+ "type": "integer",
+ "minimum": 0,
+ "description": "The ID of the window the selected tab changed inside of."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "onHighlightChanged",
+ "type": "function",
+ "description": "Fired when the highlighted or selected tabs in a window changes.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "selectInfo",
+ "properties": {
+ "windowId": {
+ "type": "integer",
+ "minimum": 0,
+ "description": "The window whose tabs changed."
+ },
+ "tabIds": {
+ "type": "array",
+ "name": "tabIds",
+ "items": {"type": "integer", "minimum": 0},
+ "description": "All highlighted tabs in the window."
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "onDetached",
+ "type": "function",
+ "description": "Fired when a tab is detached from a window, for example because it is being moved between windows.",
+ "parameters": [
+ {"type": "integer", "name": "tabId", "minimum": 0},
+ {
+ "type": "object",
+ "name": "detachInfo",
+ "properties": {
+ "oldWindowId": {"type": "integer", "minimum": 0},
+ "oldPosition": {"type": "integer", "minimum": 0}
+ }
+ }
+ ]
+ },
+ {
+ "name": "onAttached",
+ "type": "function",
+ "description": "Fired when a tab is attached to a window, for example because it was moved between windows.",
+ "parameters": [
+ {"type": "integer", "name": "tabId", "minimum": 0},
+ {
+ "type": "object",
+ "name": "attachInfo",
+ "properties": {
+ "newWindowId": {"type": "integer", "minimum": 0},
+ "newPosition": {"type": "integer", "minimum": 0}
+ }
+ }
+ ]
+ },
+ {
+ "name": "onRemoved",
+ "type": "function",
+ "description": "Fired when a tab is closed. Note: A listener can be registered for this event without requesting the 'tabs' permission in the manifest.",
+ "parameters": [
+ {"type": "integer", "name": "tabId", "minimum": 0},
+ {
+ "type": "object",
+ "name": "removeInfo",
+ "properties": {
+ "isWindowClosing": {"type": "boolean", "description": "True when the tab is being closed because its window is being closed." }
+ }
+ }
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/test/test_features.json b/tools/json_schema_compiler/test/test_features.json
new file mode 100644
index 0000000..26d67a1
--- /dev/null
+++ b/tools/json_schema_compiler/test/test_features.json
@@ -0,0 +1,25 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+{
+ "simple": {
+ "channel": "stable",
+ "extension_types": ["extension", "legacy_packaged_app"],
+ "min_manifest_version": 2
+ },
+ "complex": [
+ {
+ "channel": "dev",
+ "extension_types": ["platform_app"]
+ },
+ {
+ "channel": "stable",
+ "extension_types": ["platform_app"],
+ "whitelist": [
+ "8C3741E3AF0B93B6E8E0DDD499BB0B74839EA578",
+ "E703483CEF33DEC18B4B6DD84B5C776FB9182BDB"
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/tools/json_schema_compiler/test/test_util.cc b/tools/json_schema_compiler/test/test_util.cc
new file mode 100644
index 0000000..79d5f7b
--- /dev/null
+++ b/tools/json_schema_compiler/test/test_util.cc
@@ -0,0 +1,69 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/test/test_util.h"
+
+#include <string>
+
+#include "base/json/json_reader.h"
+#include "base/logging.h"
+
+namespace json_schema_compiler {
+namespace test_util {
+
+scoped_ptr<base::Value> ReadJson(const base::StringPiece& json) {
+ int error_code;
+ std::string error_msg;
+ scoped_ptr<base::Value> result(base::JSONReader::ReadAndReturnError(
+ json,
+ base::JSON_ALLOW_TRAILING_COMMAS,
+ &error_code,
+ &error_msg));
+ // CHECK not ASSERT since passing invalid |json| is a test error.
+ CHECK(result) << error_msg;
+ return result.Pass();
+}
+
+scoped_ptr<base::ListValue> List(base::Value* a) {
+ scoped_ptr<base::ListValue> list(new base::ListValue());
+ list->Append(a);
+ return list.Pass();
+}
+scoped_ptr<base::ListValue> List(base::Value* a, base::Value* b) {
+ scoped_ptr<base::ListValue> list = List(a);
+ list->Append(b);
+ return list.Pass();
+}
+scoped_ptr<base::ListValue> List(base::Value* a,
+ base::Value* b,
+ base::Value* c) {
+ scoped_ptr<base::ListValue> list = List(a, b);
+ list->Append(c);
+ return list.Pass();
+}
+
+scoped_ptr<base::DictionaryValue> Dictionary(
+ const std::string& ak, base::Value* av) {
+ scoped_ptr<base::DictionaryValue> dict(new base::DictionaryValue());
+ dict->SetWithoutPathExpansion(ak, av);
+ return dict.Pass();
+}
+scoped_ptr<base::DictionaryValue> Dictionary(
+ const std::string& ak, base::Value* av,
+ const std::string& bk, base::Value* bv) {
+ scoped_ptr<base::DictionaryValue> dict = Dictionary(ak, av);
+ dict->SetWithoutPathExpansion(bk, bv);
+ return dict.Pass();
+}
+scoped_ptr<base::DictionaryValue> Dictionary(
+ const std::string& ak, base::Value* av,
+ const std::string& bk, base::Value* bv,
+ const std::string& ck, base::Value* cv) {
+ scoped_ptr<base::DictionaryValue> dict = Dictionary(ak, av, bk, bv);
+ dict->SetWithoutPathExpansion(ck, cv);
+ return dict.Pass();
+}
+
+} // namespace test_util
+} // namespace json_schema_compiler
diff --git a/tools/json_schema_compiler/test/test_util.h b/tools/json_schema_compiler/test/test_util.h
new file mode 100644
index 0000000..7590004
--- /dev/null
+++ b/tools/json_schema_compiler/test/test_util.h
@@ -0,0 +1,55 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_JSON_SCHEMA_COMPILER_TEST_TEST_UTIL_H_
+#define TOOLS_JSON_SCHEMA_COMPILER_TEST_TEST_UTIL_H_
+
+#include "base/memory/scoped_ptr.h"
+#include "base/strings/string_piece.h"
+#include "base/values.h"
+
+namespace json_schema_compiler {
+namespace test_util {
+
+scoped_ptr<base::Value> ReadJson(const base::StringPiece& json);
+
+template <typename T>
+std::vector<T> Vector(const T& a) {
+ std::vector<T> arr;
+ arr.push_back(a);
+ return arr;
+}
+template <typename T>
+std::vector<T> Vector(const T& a, const T& b) {
+ std::vector<T> arr = Vector(a);
+ arr.push_back(b);
+ return arr;
+}
+template <typename T>
+std::vector<T> Vector(const T& a, const T& b, const T& c) {
+ std::vector<T> arr = Vector(a, b);
+ arr.push_back(c);
+ return arr;
+}
+
+scoped_ptr<base::ListValue> List(base::Value* a);
+scoped_ptr<base::ListValue> List(base::Value* a, base::Value* b);
+scoped_ptr<base::ListValue> List(base::Value* a,
+ base::Value* b,
+ base::Value* c);
+
+scoped_ptr<base::DictionaryValue> Dictionary(
+ const std::string& ak, base::Value* av);
+scoped_ptr<base::DictionaryValue> Dictionary(
+ const std::string& ak, base::Value* av,
+ const std::string& bk, base::Value* bv);
+scoped_ptr<base::DictionaryValue> Dictionary(
+ const std::string& ak, base::Value* av,
+ const std::string& bk, base::Value* bv,
+ const std::string& ck, base::Value* cv);
+
+} // namespace test_util
+} // namespace json_schema_compiler
+
+#endif // TOOLS_JSON_SCHEMA_COMPILER_TEST_TEST_UTIL_H_
diff --git a/tools/json_schema_compiler/test/windows.json b/tools/json_schema_compiler/test/windows.json
new file mode 100644
index 0000000..ae90084
--- /dev/null
+++ b/tools/json_schema_compiler/test/windows.json
@@ -0,0 +1,265 @@
+[
+ {
+ "namespace": "windows",
+ "description": "The windows API.",
+ "types": [
+ {
+ "id": "Window",
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer", "minimum": 0, "description": "The ID of the window. Window IDs are unique within a browser session."},
+ "focused": {"type": "boolean", "description": "Whether the window is currently the focused window."},
+ "top": {"type": "integer", "description": "The offset of the window from the top edge of the screen in pixels."},
+ "left": {"type": "integer", "description": "The offset of the window from the left edge of the screen in pixels."},
+ "width": {"type": "integer", "description": "The width of the window in pixels."},
+ "height": {"type": "integer", "description": "The height of the window in pixels."},
+ "tabs": {"type": "array", "items": { "$ref": "tabs.Tab" }, "optional": true, "description": "Array of $ref:Tab objects representing the current tabs in the window."},
+ "incognito": {"type": "boolean", "description": "Whether the window is incognito."},
+ "type": {
+ "type": "string",
+ "description": "The type of browser window this is.",
+ "enum": ["normal", "popup", "panel", "app"]
+ },
+ "state": {
+ "type": "string",
+ "description": "The state of this browser window.",
+ "enum": ["normal", "minimized", "maximized"]
+ }
+ }
+ }
+ ],
+ "properties": {
+ "WINDOW_ID_NONE": {
+ "type": "integer",
+ "value": "-1",
+ "description": "The windowId value that represents the absence of a chrome browser window."
+ }
+ },
+ "functions": [
+ {
+ "name": "get",
+ "type": "function",
+ "description": "Gets details about a window.",
+ "parameters": [
+ {"type": "integer", "name": "windowId", "minimum": 0},
+ {
+ "type": "object",
+ "name": "getInfo",
+ "optional": true,
+ "description": "",
+ "properties": {
+ "populate": {"type": "boolean", "optional": true, "description": "If true, the window object will have a <var>tabs</var> property that contains a list of the $ref:Tab objects" }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "window", "$ref": "Window"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "getCurrent",
+ "type": "function",
+ "description": "Gets the <a href='#current-window'>current window</a>.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "getInfo",
+ "optional": true,
+ "description": "",
+ "properties": {
+ "populate": {"type": "boolean", "optional": true, "description": "If true, the window object will have a <var>tabs</var> property that contains a list of the $ref:Tab objects" }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "window", "$ref": "Window"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "getLastFocused",
+ "type": "function",
+ "description": "Gets the window that was most recently focused — typically the window 'on top'.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "getInfo",
+ "optional": true,
+ "description": "",
+ "properties": {
+ "populate": {"type": "boolean", "optional": true, "description": "If true, the window object will have a <var>tabs</var> property that contains a list of the $ref:Tab objects" }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "window", "$ref": "Window"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "getAll",
+ "type": "function",
+ "description": "Gets all windows.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "getInfo",
+ "optional": true,
+ "description": "",
+ "properties": {
+ "populate": {"type": "boolean", "optional": true, "description": "If true, each window object will have a <var>tabs</var> property that contains a list of the $ref:Tab objects for that window." }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "parameters": [
+ {
+ "name": "windows", "type": "array", "items": { "$ref": "Window" }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "create",
+ "nocompile": true,
+ "type": "function",
+ "description": "Creates (opens) a new browser with any optional sizing, position or default URL provided.",
+ "parameters": [
+ {
+ "type": "object",
+ "name": "createData",
+ "properties": {
+ "url": {
+ "type": "string",
+ "description": "A URL or list of URLs to open as tabs in the window. Fully-qualified URLs must include a scheme (i.e. 'http://www.google.com', not 'www.google.com'). Relative URLs will be relative to the current page within the extension. Defaults to the New Tab Page.",
+ "optional": true,
+ "choices": [
+ {"type": "string"},
+ {"type": "array", "items": {"type": "string"}}
+ ]
+ },
+ "tabId": {"type": "integer", "minimum": 0, "optional": true, "description": "The id of the tab for which you want to adopt to the new window."},
+ "left": {"type": "integer", "optional": true, "description": "The number of pixels to position the new window from the left edge of the screen. If not specified, the new window is offset naturally from the last focusd window. This value is ignored for panels."},
+ "top": {"type": "integer", "optional": true, "description": "The number of pixels to position the new window from the top edge of the screen. If not specified, the new window is offset naturally from the last focusd window. This value is ignored for panels."},
+ "width": {"type": "integer", "minimum": 0, "optional": true, "description": "The width in pixels of the new window. If not specified defaults to a natural width."},
+ "height": {"type": "integer", "minimum": 0, "optional": true, "description": "The height in pixels of the new window. If not specified defaults to a natural height."},
+ "focused": {"type": "boolean", "optional": true, "description": "If true, opens an active window. If false, opens an inactive window."},
+ "incognito": {"type": "boolean", "optional": true, "description": "Whether the new window should be an incognito window."},
+ "type": {
+ "type": "string",
+ "optional": true,
+ "description": "Specifies what type of browser window to create. The 'panel' type creates a popup unless the '--enable-panels' flag is set.",
+ "enum": ["normal", "popup", "panel"]
+ }
+ },
+ "optional": true
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "window", "$ref": "Window", "description": "Contains details about the created window.",
+ "optional": true
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "update",
+ "type": "function",
+ "description": "Updates the properties of a window. Specify only the properties that you want to change; unspecified properties will be left unchanged.",
+ "parameters": [
+ {"type": "integer", "name": "windowId", "minimum": 0},
+ {
+ "type": "object",
+ "name": "updateInfo",
+ "properties": {
+ "left": {"type": "integer", "optional": true, "description": "The offset from the left edge of the screen to move the window to in pixels. This value is ignored for panels."},
+ "top": {"type": "integer", "optional": true, "description": "The offset from the top edge of the screen to move the window to in pixels. This value is ignored for panels."},
+ "width": {"type": "integer", "minimum": 0, "optional": true, "description": "The width to resize the window to in pixels. This value is ignored for panels."},
+ "height": {"type": "integer", "minimum": 0, "optional": true, "description": "The height to resize the window to in pixels. This value is ignored for panels."},
+ "focused": {"type": "boolean", "optional": true, "description": "If true, brings the window to the front. If false, brings the next window in the z-order to the front."},
+ "drawAttention": {"type": "boolean", "optional": true, "description": "If true, causes the window to be displayed in a manner that draws the user's attention to the window, without changing the focused window. The effect lasts until the user changes focus to the window. This option has no effect if set to false or if the window already has focus."},
+ "state": {
+ "type": "string",
+ "optional": true,
+ "description": "The new state of the window. The 'minimized' and 'maximized' states cannot be combined with 'left', 'top', 'width' or 'height'.",
+ "enum": ["normal", "minimized", "maximized"]
+ }
+ }
+ },
+ {
+ "type": "function",
+ "name": "callback",
+ "optional": true,
+ "parameters": [
+ {
+ "name": "window", "$ref": "Window"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "remove",
+ "type": "function",
+ "description": "Removes (closes) a window, and all the tabs inside it.",
+ "parameters": [
+ {"type": "integer", "name": "windowId", "minimum": 0},
+ {"type": "function", "name": "callback", "optional": true, "parameters": []}
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "onCreated",
+ "type": "function",
+ "description": "Fired when a window is created.",
+ "parameters": [
+ {
+ "$ref": "Window",
+ "name": "window",
+ "description": "Details of the window that was created."
+ }
+ ]
+ },
+ {
+ "name": "onRemoved",
+ "type": "function",
+ "description": "Fired when a window is removed (closed).",
+ "parameters": [
+ {"type": "integer", "name": "windowId", "minimum": 0, "description": "ID of the removed window."}
+ ]
+ },
+ {
+ "name": "onFocusChanged",
+ "type": "function",
+ "description": "Fired when the currently focused window changes. Will be chrome.windows.WINDOW_ID_NONE if all chrome windows have lost focus. Note: On some Linux window managers, WINDOW_ID_NONE will always be sent immediately preceding a switch from one chrome window to another.",
+ "parameters": [
+ {"type": "integer", "name": "windowId", "minimum": -1, "description": "ID of the newly focused window."}
+ ]
+ }
+ ]
+ }
+]
diff --git a/tools/json_schema_compiler/util.cc b/tools/json_schema_compiler/util.cc
new file mode 100644
index 0000000..2806cfc
--- /dev/null
+++ b/tools/json_schema_compiler/util.cc
@@ -0,0 +1,97 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/json_schema_compiler/util.h"
+
+#include "base/values.h"
+
+namespace json_schema_compiler {
+namespace util {
+
+bool GetItemFromList(const base::ListValue& from, int index, int* out) {
+ return from.GetInteger(index, out);
+}
+
+bool GetItemFromList(const base::ListValue& from, int index, bool* out) {
+ return from.GetBoolean(index, out);
+}
+
+bool GetItemFromList(const base::ListValue& from, int index, double* out) {
+ return from.GetDouble(index, out);
+}
+
+bool GetItemFromList(const base::ListValue& from, int index, std::string* out) {
+ return from.GetString(index, out);
+}
+
+bool GetItemFromList(const base::ListValue& from,
+ int index,
+ linked_ptr<base::Value>* out) {
+ const base::Value* value = NULL;
+ if (!from.Get(index, &value))
+ return false;
+ *out = make_linked_ptr(value->DeepCopy());
+ return true;
+}
+
+bool GetItemFromList(const base::ListValue& from, int index,
+ linked_ptr<base::DictionaryValue>* out) {
+ const base::DictionaryValue* dict = NULL;
+ if (!from.GetDictionary(index, &dict))
+ return false;
+ *out = make_linked_ptr(dict->DeepCopy());
+ return true;
+}
+
+void AddItemToList(const int from, base::ListValue* out) {
+ out->Append(new base::FundamentalValue(from));
+}
+
+void AddItemToList(const bool from, base::ListValue* out) {
+ out->Append(new base::FundamentalValue(from));
+}
+
+void AddItemToList(const double from, base::ListValue* out) {
+ out->Append(new base::FundamentalValue(from));
+}
+
+void AddItemToList(const std::string& from, base::ListValue* out) {
+ out->Append(new base::StringValue(from));
+}
+
+void AddItemToList(const linked_ptr<base::Value>& from,
+ base::ListValue* out) {
+ out->Append(from->DeepCopy());
+}
+
+void AddItemToList(const linked_ptr<base::DictionaryValue>& from,
+ base::ListValue* out) {
+ out->Append(static_cast<base::Value*>(from->DeepCopy()));
+}
+
+std::string ValueTypeToString(base::Value::Type type) {
+ switch(type) {
+ case base::Value::TYPE_NULL:
+ return "null";
+ case base::Value::TYPE_BOOLEAN:
+ return "boolean";
+ case base::Value::TYPE_INTEGER:
+ return "integer";
+ case base::Value::TYPE_DOUBLE:
+ return "number";
+ case base::Value::TYPE_STRING:
+ return "string";
+ case base::Value::TYPE_BINARY:
+ return "binary";
+ case base::Value::TYPE_DICTIONARY:
+ return "dictionary";
+ case base::Value::TYPE_LIST:
+ return "list";
+ }
+ NOTREACHED();
+ return "";
+}
+
+} // namespace api_util
+} // namespace extensions
diff --git a/tools/json_schema_compiler/util.h b/tools/json_schema_compiler/util.h
new file mode 100644
index 0000000..228eced
--- /dev/null
+++ b/tools/json_schema_compiler/util.h
@@ -0,0 +1,181 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_JSON_SCHEMA_COMPILER_UTIL_H__
+#define TOOLS_JSON_SCHEMA_COMPILER_UTIL_H__
+
+#include <string>
+#include <vector>
+
+#include "base/memory/linked_ptr.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/values.h"
+
+namespace json_schema_compiler {
+
+namespace util {
+
+// Creates a new item at |out| from |from|[|index|]. These are used by template
+// specializations of |Get(Optional)ArrayFromList|.
+bool GetItemFromList(const base::ListValue& from, int index, int* out);
+bool GetItemFromList(const base::ListValue& from, int index, bool* out);
+bool GetItemFromList(const base::ListValue& from, int index, double* out);
+bool GetItemFromList(const base::ListValue& from, int index, std::string* out);
+bool GetItemFromList(const base::ListValue& from,
+ int index,
+ linked_ptr<base::Value>* out);
+bool GetItemFromList(const base::ListValue& from,
+ int index,
+ linked_ptr<base::DictionaryValue>* out);
+
+// This template is used for types generated by tools/json_schema_compiler.
+template<class T>
+bool GetItemFromList(const base::ListValue& from,
+ int index,
+ linked_ptr<T>* out) {
+ const base::DictionaryValue* dict;
+ if (!from.GetDictionary(index, &dict))
+ return false;
+ scoped_ptr<T> obj(new T());
+ if (!T::Populate(*dict, obj.get()))
+ return false;
+ *out = linked_ptr<T>(obj.release());
+ return true;
+}
+
+// Populates |out| with |list|. Returns false if there is no list at the
+// specified key or if the list has anything other than |T|.
+template <class T>
+bool PopulateArrayFromList(
+ const base::ListValue& list, std::vector<T>* out) {
+ out->clear();
+ T value;
+ for (size_t i = 0; i < list.GetSize(); ++i) {
+ if (!GetItemFromList(list, i, &value))
+ return false;
+ out->push_back(value);
+ }
+
+ return true;
+}
+
+// Populates |out| with |from|.|name|. Returns false if there is no list at
+// the specified key or if the list has anything other than |T|.
+template <class T>
+bool PopulateArrayFromDictionary(
+ const base::DictionaryValue& from,
+ const std::string& name,
+ std::vector<T>* out) {
+ const base::ListValue* list = NULL;
+ if (!from.GetListWithoutPathExpansion(name, &list))
+ return false;
+
+ return PopulateArrayFromList(*list, out);
+}
+
+// Creates a new vector containing |list| at |out|. Returns
+// true on success or if there is nothing at the specified key. Returns false
+// if anything other than a list of |T| is at the specified key.
+template <class T>
+bool PopulateOptionalArrayFromList(
+ const base::ListValue& list,
+ scoped_ptr<std::vector<T> >* out) {
+ out->reset(new std::vector<T>());
+ T value;
+ for (size_t i = 0; i < list.GetSize(); ++i) {
+ if (!GetItemFromList(list, i, &value)) {
+ out->reset();
+ return false;
+ }
+ (*out)->push_back(value);
+ }
+
+ return true;
+}
+
+// Creates a new vector containing |from|.|name| at |out|. Returns
+// true on success or if there is nothing at the specified key. Returns false
+// if anything other than a list of |T| is at the specified key.
+template <class T>
+bool PopulateOptionalArrayFromDictionary(
+ const base::DictionaryValue& from,
+ const std::string& name,
+ scoped_ptr<std::vector<T> >* out) {
+ const base::ListValue* list = NULL;
+ {
+ const base::Value* maybe_list = NULL;
+ // Since |name| is optional, its absence is acceptable. However, anything
+ // other than a ListValue is not.
+ if (!from.GetWithoutPathExpansion(name, &maybe_list))
+ return true;
+ if (!maybe_list->IsType(base::Value::TYPE_LIST))
+ return false;
+ list = static_cast<const base::ListValue*>(maybe_list);
+ }
+
+ return PopulateOptionalArrayFromList(*list, out);
+}
+
+// Appends a Value newly created from |from| to |out|. These used by template
+// specializations of |Set(Optional)ArrayToList|.
+void AddItemToList(const int from, base::ListValue* out);
+void AddItemToList(const bool from, base::ListValue* out);
+void AddItemToList(const double from, base::ListValue* out);
+void AddItemToList(const std::string& from, base::ListValue* out);
+void AddItemToList(const linked_ptr<base::Value>& from,
+ base::ListValue* out);
+void AddItemToList(const linked_ptr<base::DictionaryValue>& from,
+ base::ListValue* out);
+
+// This template is used for types generated by tools/json_schema_compiler.
+template<class T>
+void AddItemToList(const linked_ptr<T>& from, base::ListValue* out) {
+ out->Append(from->ToValue().release());
+}
+
+// Set |out| to the the contents of |from|. Requires GetItemFromList to be
+// implemented for |T|.
+template <class T>
+void PopulateListFromArray(
+ const std::vector<T>& from,
+ base::ListValue* out) {
+ out->Clear();
+ for (typename std::vector<T>::const_iterator it = from.begin();
+ it != from.end(); ++it) {
+ AddItemToList(*it, out);
+ }
+}
+
+// Set |out| to the the contents of |from| if |from| is non-NULL. Requires
+// GetItemFromList to be implemented for |T|.
+template <class T>
+void PopulateListFromOptionalArray(
+ const scoped_ptr<std::vector<T> >& from,
+ base::ListValue* out) {
+ if (from.get())
+ PopulateListFromArray(*from, out);
+
+}
+
+template <class T>
+scoped_ptr<base::Value> CreateValueFromArray(const std::vector<T>& from) {
+ base::ListValue* list = new base::ListValue();
+ PopulateListFromArray(from, list);
+ return scoped_ptr<base::Value>(list);
+}
+
+template <class T>
+scoped_ptr<base::Value> CreateValueFromOptionalArray(
+ const scoped_ptr<std::vector<T> >& from) {
+ if (from.get())
+ return CreateValueFromArray(*from);
+ return scoped_ptr<base::Value>();
+}
+
+std::string ValueTypeToString(base::Value::Type type);
+
+} // namespace util
+} // namespace json_schema_compiler
+
+#endif // TOOLS_JSON_SCHEMA_COMPILER_UTIL_H__
diff --git a/tools/json_schema_compiler/util_cc_helper.py b/tools/json_schema_compiler/util_cc_helper.py
new file mode 100644
index 0000000..0e41abf
--- /dev/null
+++ b/tools/json_schema_compiler/util_cc_helper.py
@@ -0,0 +1,75 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+_API_UTIL_NAMESPACE = 'json_schema_compiler::util'
+
+
+class UtilCCHelper(object):
+ """A util class that generates code that uses
+ tools/json_schema_compiler/util.cc.
+ """
+ def __init__(self, type_manager):
+ self._type_manager = type_manager
+
+ def PopulateArrayFromDictionary(self, array_prop, src, name, dst):
+ """Generates code to get an array from a src.name into dst.
+
+ src: DictionaryValue*
+ dst: std::vector or scoped_ptr<std::vector>
+ """
+ prop = array_prop.item_type
+ sub = {
+ 'namespace': _API_UTIL_NAMESPACE,
+ 'name': name,
+ 'src': src,
+ 'dst': dst,
+ }
+
+ sub['type'] = self._type_manager.GetCppType(prop),
+ if array_prop.optional:
+ val = ('%(namespace)s::PopulateOptionalArrayFromDictionary'
+ '(*%(src)s, "%(name)s", &%(dst)s)')
+ else:
+ val = ('%(namespace)s::PopulateArrayFromDictionary'
+ '(*%(src)s, "%(name)s", &%(dst)s)')
+
+ return val % sub
+
+ def PopulateArrayFromList(self, src, dst, optional):
+ """Generates code to get an array from src into dst.
+
+ src: ListValue*
+ dst: std::vector or scoped_ptr<std::vector>
+ """
+ if optional:
+ val = '%(namespace)s::PopulateOptionalArrayFromList(*%(src)s, &%(dst)s)'
+ else:
+ val = '%(namespace)s::PopulateArrayFromList(*%(src)s, &%(dst)s)'
+ return val % {
+ 'namespace': _API_UTIL_NAMESPACE,
+ 'src': src,
+ 'dst': dst
+ }
+
+ def CreateValueFromArray(self, src, optional):
+ """Generates code to create a scoped_pt<Value> from the array at src.
+
+ |src| The variable to convert, either a vector or scoped_ptr<vector>.
+ |optional| Whether |type_| was optional. Optional types are pointers so
+ must be treated differently.
+ """
+ if optional:
+ name = 'CreateValueFromOptionalArray'
+ else:
+ name = 'CreateValueFromArray'
+ return '%s::%s(%s)' % (_API_UTIL_NAMESPACE, name, src)
+
+ def GetIncludePath(self):
+ return '#include "tools/json_schema_compiler/util.h"'
+
+ def GetValueTypeString(self, value, is_ptr=False):
+ call = '.GetType()'
+ if is_ptr:
+ call = '->GetType()'
+ return 'json_schema_compiler::util::ValueTypeToString(%s%s)' % (value, call)
diff --git a/tools/linux/PRESUBMIT.py b/tools/linux/PRESUBMIT.py
new file mode 100644
index 0000000..d4d8601
--- /dev/null
+++ b/tools/linux/PRESUBMIT.py
@@ -0,0 +1,45 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Top-level presubmit script for linux.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
+details on the presubmit API built into gcl.
+"""
+
+
+def CommonChecks(input_api, output_api):
+ import sys
+ def join(*args):
+ return input_api.os_path.join(input_api.PresubmitLocalPath(), *args)
+
+ output = []
+ sys_path_backup = sys.path
+ try:
+ sys.path = [
+ join('..', 'linux'),
+ ] + sys.path
+ output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
+ finally:
+ sys.path = sys_path_backup
+
+ output.extend(
+ input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api,
+ input_api.os_path.join(input_api.PresubmitLocalPath(), 'tests'),
+ whitelist=[r'.+_tests\.py$']))
+
+ if input_api.is_committing:
+ output.extend(input_api.canned_checks.PanProjectChecks(input_api,
+ output_api,
+ owners_check=False))
+ return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CommonChecks(input_api, output_api)
diff --git a/tools/linux/dump-static-initializers.py b/tools/linux/dump-static-initializers.py
new file mode 100755
index 0000000..0e83456
--- /dev/null
+++ b/tools/linux/dump-static-initializers.py
@@ -0,0 +1,234 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dump functions called by static intializers in a Linux Release binary.
+
+Usage example:
+ tools/linux/dump-static-intializers.py out/Release/chrome
+
+A brief overview of static initialization:
+1) the compiler writes out, per object file, a function that contains
+ the static intializers for that file.
+2) the compiler also writes out a pointer to that function in a special
+ section.
+3) at link time, the linker concatenates the function pointer sections
+ into a single list of all initializers.
+4) at run time, on startup the binary runs all function pointers.
+
+The functions in (1) all have mangled names of the form
+ _GLOBAL__I_foobar.cc
+using objdump, we can disassemble those functions and dump all symbols that
+they reference.
+"""
+
+import optparse
+import re
+import subprocess
+import sys
+
+# A map of symbol => informative text about it.
+NOTES = {
+ '__cxa_atexit@plt': 'registers a dtor to run at exit',
+ 'std::__ioinit': '#includes <iostream>, use <ostream> instead',
+}
+
+# Determine whether this is a git checkout (as opposed to e.g. svn).
+IS_GIT_WORKSPACE = (subprocess.Popen(
+ ['git', 'rev-parse'], stderr=subprocess.PIPE).wait() == 0)
+
+class Demangler(object):
+ """A wrapper around c++filt to provide a function to demangle symbols."""
+ def __init__(self):
+ self.cppfilt = subprocess.Popen(['c++filt'],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+
+ def Demangle(self, sym):
+ """Given mangled symbol |sym|, return its demangled form."""
+ self.cppfilt.stdin.write(sym + '\n')
+ return self.cppfilt.stdout.readline().strip()
+
+# Matches for example: "cert_logger.pb.cc", capturing "cert_logger".
+protobuf_filename_re = re.compile(r'(.*)\.pb\.cc$')
+def QualifyFilenameAsProto(filename):
+ """Attempt to qualify a bare |filename| with a src-relative path, assuming it
+ is a protoc-generated file. If a single match is found, it is returned.
+ Otherwise the original filename is returned."""
+ if not IS_GIT_WORKSPACE:
+ return filename
+ match = protobuf_filename_re.match(filename)
+ if not match:
+ return filename
+ basename = match.groups(0)
+ gitlsfiles = subprocess.Popen(
+ ['git', 'ls-files', '--', '*/%s.proto' % basename],
+ stdout=subprocess.PIPE)
+ candidate = filename
+ for line in gitlsfiles.stdout:
+ if candidate != filename:
+ return filename # Multiple hits, can't help.
+ candidate = line.strip()
+ return candidate
+
+# Regex matching the substring of a symbol's demangled text representation most
+# likely to appear in a source file.
+# Example: "v8::internal::Builtins::InitBuiltinFunctionTable()" becomes
+# "InitBuiltinFunctionTable", since the first (optional & non-capturing) group
+# picks up any ::-qualification and the last fragment picks up a suffix that
+# starts with an opener.
+symbol_code_name_re = re.compile(r'^(?:[^(<[]*::)?([^:(<[]*).*?$')
+def QualifyFilename(filename, symbol):
+ """Given a bare filename and a symbol that occurs in it, attempt to qualify
+ it with a src-relative path. If more than one file matches, return the
+ original filename."""
+ if not IS_GIT_WORKSPACE:
+ return filename
+ match = symbol_code_name_re.match(symbol)
+ if not match:
+ return filename
+ symbol = match.group(1)
+ gitgrep = subprocess.Popen(
+ ['git', 'grep', '-l', symbol, '--', '*/%s' % filename],
+ stdout=subprocess.PIPE)
+ candidate = filename
+ for line in gitgrep.stdout:
+ if candidate != filename: # More than one candidate; return bare filename.
+ return filename
+ candidate = line.strip()
+ return candidate
+
+# Regex matching nm output for the symbols we're interested in.
+# See test_ParseNmLine for examples.
+nm_re = re.compile(r'(\S+) (\S+) t (?:_ZN12)?_GLOBAL__(?:sub_)?I_(.*)')
+def ParseNmLine(line):
+ """Given a line of nm output, parse static initializers as a
+ (file, start, size) tuple."""
+ match = nm_re.match(line)
+ if match:
+ addr, size, filename = match.groups()
+ return (filename, int(addr, 16), int(size, 16))
+
+
+def test_ParseNmLine():
+ """Verify the nm_re regex matches some sample lines."""
+ parse = ParseNmLine(
+ '0000000001919920 0000000000000008 t '
+ '_ZN12_GLOBAL__I_safe_browsing_service.cc')
+ assert parse == ('safe_browsing_service.cc', 26319136, 8), parse
+
+ parse = ParseNmLine(
+ '00000000026b9eb0 0000000000000024 t '
+ '_GLOBAL__sub_I_extension_specifics.pb.cc')
+ assert parse == ('extension_specifics.pb.cc', 40607408, 36), parse
+
+# Just always run the test; it is fast enough.
+test_ParseNmLine()
+
+
+def ParseNm(binary):
+ """Given a binary, yield static initializers as (file, start, size) tuples."""
+ nm = subprocess.Popen(['nm', '-S', binary], stdout=subprocess.PIPE)
+ for line in nm.stdout:
+ parse = ParseNmLine(line)
+ if parse:
+ yield parse
+
+# Regex matching objdump output for the symbols we're interested in.
+# Example line:
+# 12354ab: (disassembly, including <FunctionReference>)
+disassembly_re = re.compile(r'^\s+[0-9a-f]+:.*<(\S+)>')
+def ExtractSymbolReferences(binary, start, end):
+ """Given a span of addresses, returns symbol references from disassembly."""
+ cmd = ['objdump', binary, '--disassemble',
+ '--start-address=0x%x' % start, '--stop-address=0x%x' % end]
+ objdump = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+
+ refs = set()
+ for line in objdump.stdout:
+ if '__static_initialization_and_destruction' in line:
+ raise RuntimeError, ('code mentions '
+ '__static_initialization_and_destruction; '
+ 'did you accidentally run this on a Debug binary?')
+ match = disassembly_re.search(line)
+ if match:
+ (ref,) = match.groups()
+ if ref.startswith('.LC') or ref.startswith('_DYNAMIC'):
+ # Ignore these, they are uninformative.
+ continue
+ if ref.startswith('_GLOBAL__I_'):
+ # Probably a relative jump within this function.
+ continue
+ refs.add(ref)
+
+ return sorted(refs)
+
+def main():
+ parser = optparse.OptionParser(usage='%prog [option] filename')
+ parser.add_option('-d', '--diffable', dest='diffable',
+ action='store_true', default=False,
+ help='Prints the filename on each line, for more easily '
+ 'diff-able output. (Used by sizes.py)')
+ opts, args = parser.parse_args()
+ if len(args) != 1:
+ parser.error('missing filename argument')
+ return 1
+ binary = args[0]
+
+ demangler = Demangler()
+ file_count = 0
+ initializer_count = 0
+
+ files = ParseNm(binary)
+ if opts.diffable:
+ files = sorted(files)
+ for filename, addr, size in files:
+ file_count += 1
+ ref_output = []
+
+ qualified_filename = QualifyFilenameAsProto(filename)
+
+ if size == 2:
+ # gcc generates a two-byte 'repz retq' initializer when there is a
+ # ctor even when the ctor is empty. This is fixed in gcc 4.6, but
+ # Android uses gcc 4.4.
+ ref_output.append('[empty ctor, but it still has cost on gcc <4.6]')
+ else:
+ for ref in ExtractSymbolReferences(binary, addr, addr+size):
+ initializer_count += 1
+
+ ref = demangler.Demangle(ref)
+ if qualified_filename == filename:
+ qualified_filename = QualifyFilename(filename, ref)
+
+ note = ''
+ if ref in NOTES:
+ note = NOTES[ref]
+ elif ref.endswith('_2eproto()'):
+ note = 'protocol compiler bug: crbug.com/105626'
+
+ if note:
+ ref_output.append('%s [%s]' % (ref, note))
+ else:
+ ref_output.append(ref)
+
+ if opts.diffable:
+ if ref_output:
+ print '\n'.join('# ' + qualified_filename + ' ' + r for r in ref_output)
+ else:
+ print '# %s: (empty initializer list)' % qualified_filename
+ else:
+ print '%s (initializer offset 0x%x size 0x%x)' % (qualified_filename,
+ addr, size)
+ print ''.join(' %s\n' % r for r in ref_output)
+
+ if opts.diffable:
+ print '#',
+ print 'Found %d static initializers in %d files.' % (initializer_count,
+ file_count)
+
+ return 0
+
+if '__main__' == __name__:
+ sys.exit(main())
diff --git a/tools/linux/procfs.py b/tools/linux/procfs.py
new file mode 100755
index 0000000..ef19b25
--- /dev/null
+++ b/tools/linux/procfs.py
@@ -0,0 +1,747 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# A Python library to read and store procfs (/proc) information on Linux.
+#
+# Each information storage class in this file stores original data as original
+# as reasonablly possible. Translation is done when requested. It is to make it
+# always possible to probe the original data.
+
+
+import collections
+import logging
+import os
+import re
+import struct
+import sys
+
+
+class _NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+
+_LOGGER = logging.getLogger('procfs')
+_LOGGER.addHandler(_NullHandler())
+
+
+class ProcStat(object):
+ """Reads and stores information in /proc/pid/stat."""
+ _PATTERN = re.compile(r'^'
+ '(?P<PID>-?[0-9]+) '
+ '\((?P<COMM>.+)\) '
+ '(?P<STATE>[RSDZTW]) '
+ '(?P<PPID>-?[0-9]+) '
+ '(?P<PGRP>-?[0-9]+) '
+ '(?P<SESSION>-?[0-9]+) '
+ '(?P<TTY_NR>-?[0-9]+) '
+ '(?P<TPGID>-?[0-9]+) '
+ '(?P<FLAGS>[0-9]+) '
+ '(?P<MINFIT>[0-9]+) '
+ '(?P<CMINFIT>[0-9]+) '
+ '(?P<MAJFIT>[0-9]+) '
+ '(?P<CMAJFIT>[0-9]+) '
+ '(?P<UTIME>[0-9]+) '
+ '(?P<STIME>[0-9]+) '
+ '(?P<CUTIME>[0-9]+) '
+ '(?P<CSTIME>[0-9]+) '
+ '(?P<PRIORITY>[0-9]+) '
+ '(?P<NICE>[0-9]+) '
+ '(?P<NUM_THREADS>[0-9]+) '
+ '(?P<ITREALVALUE>[0-9]+) '
+ '(?P<STARTTIME>[0-9]+) '
+ '(?P<VSIZE>[0-9]+) '
+ '(?P<RSS>[0-9]+) '
+ '(?P<RSSLIM>[0-9]+) '
+ '(?P<STARTCODE>[0-9]+) '
+ '(?P<ENDCODE>[0-9]+) '
+ '(?P<STARTSTACK>[0-9]+) '
+ '(?P<KSTKESP>[0-9]+) '
+ '(?P<KSTKEIP>[0-9]+) '
+ '(?P<SIGNAL>[0-9]+) '
+ '(?P<BLOCKED>[0-9]+) '
+ '(?P<SIGIGNORE>[0-9]+) '
+ '(?P<SIGCATCH>[0-9]+) '
+ '(?P<WCHAN>[0-9]+) '
+ '(?P<NSWAP>[0-9]+) '
+ '(?P<CNSWAP>[0-9]+) '
+ '(?P<EXIT_SIGNAL>[0-9]+) '
+ '(?P<PROCESSOR>[0-9]+) '
+ '(?P<RT_PRIORITY>[0-9]+) '
+ '(?P<POLICY>[0-9]+) '
+ '(?P<DELAYACCT_BLKIO_TICKS>[0-9]+) '
+ '(?P<GUEST_TIME>[0-9]+) '
+ '(?P<CGUEST_TIME>[0-9]+)', re.IGNORECASE)
+
+ def __init__(self, raw, pid, vsize, rss):
+ self._raw = raw
+ self._pid = pid
+ self._vsize = vsize
+ self._rss = rss
+
+ @staticmethod
+ def load_file(stat_f):
+ raw = stat_f.readlines()
+ stat = ProcStat._PATTERN.match(raw[0])
+ return ProcStat(raw,
+ stat.groupdict().get('PID'),
+ stat.groupdict().get('VSIZE'),
+ stat.groupdict().get('RSS'))
+
+ @staticmethod
+ def load(pid):
+ try:
+ with open(os.path.join('/proc', str(pid), 'stat'), 'r') as stat_f:
+ return ProcStat.load_file(stat_f)
+ except IOError:
+ return None
+
+ @property
+ def raw(self):
+ return self._raw
+
+ @property
+ def pid(self):
+ return int(self._pid)
+
+ @property
+ def vsize(self):
+ return int(self._vsize)
+
+ @property
+ def rss(self):
+ return int(self._rss)
+
+
+class ProcStatm(object):
+ """Reads and stores information in /proc/pid/statm."""
+ _PATTERN = re.compile(r'^'
+ '(?P<SIZE>[0-9]+) '
+ '(?P<RESIDENT>[0-9]+) '
+ '(?P<SHARE>[0-9]+) '
+ '(?P<TEXT>[0-9]+) '
+ '(?P<LIB>[0-9]+) '
+ '(?P<DATA>[0-9]+) '
+ '(?P<DT>[0-9]+)', re.IGNORECASE)
+
+ def __init__(self, raw, size, resident, share, text, lib, data, dt):
+ self._raw = raw
+ self._size = size
+ self._resident = resident
+ self._share = share
+ self._text = text
+ self._lib = lib
+ self._data = data
+ self._dt = dt
+
+ @staticmethod
+ def load_file(statm_f):
+ try:
+ raw = statm_f.readlines()
+ except (IOError, OSError):
+ return None
+ statm = ProcStatm._PATTERN.match(raw[0])
+ return ProcStatm(raw,
+ statm.groupdict().get('SIZE'),
+ statm.groupdict().get('RESIDENT'),
+ statm.groupdict().get('SHARE'),
+ statm.groupdict().get('TEXT'),
+ statm.groupdict().get('LIB'),
+ statm.groupdict().get('DATA'),
+ statm.groupdict().get('DT'))
+
+ @staticmethod
+ def load(pid):
+ try:
+ with open(os.path.join('/proc', str(pid), 'statm'), 'r') as statm_f:
+ return ProcStatm.load_file(statm_f)
+ except (IOError, OSError):
+ return None
+
+ @property
+ def raw(self):
+ return self._raw
+
+ @property
+ def size(self):
+ return int(self._size)
+
+ @property
+ def resident(self):
+ return int(self._resident)
+
+ @property
+ def share(self):
+ return int(self._share)
+
+ @property
+ def text(self):
+ return int(self._text)
+
+ @property
+ def lib(self):
+ return int(self._lib)
+
+ @property
+ def data(self):
+ return int(self._data)
+
+ @property
+ def dt(self):
+ return int(self._dt)
+
+
+class ProcStatus(object):
+ """Reads and stores information in /proc/pid/status."""
+ _PATTERN = re.compile(r'^(?P<NAME>[A-Za-z0-9_]+):\s+(?P<VALUE>.*)')
+
+ def __init__(self, raw, dct):
+ self._raw = raw
+ self._pid = dct.get('Pid')
+ self._name = dct.get('Name')
+ self._vm_peak = dct.get('VmPeak')
+ self._vm_size = dct.get('VmSize')
+ self._vm_lck = dct.get('VmLck')
+ self._vm_pin = dct.get('VmPin')
+ self._vm_hwm = dct.get('VmHWM')
+ self._vm_rss = dct.get('VmRSS')
+ self._vm_data = dct.get('VmData')
+ self._vm_stack = dct.get('VmStk')
+ self._vm_exe = dct.get('VmExe')
+ self._vm_lib = dct.get('VmLib')
+ self._vm_pte = dct.get('VmPTE')
+ self._vm_swap = dct.get('VmSwap')
+
+ @staticmethod
+ def load_file(status_f):
+ raw = status_f.readlines()
+ dct = {}
+ for line in raw:
+ status_match = ProcStatus._PATTERN.match(line)
+ if status_match:
+ match_dict = status_match.groupdict()
+ dct[match_dict['NAME']] = match_dict['VALUE']
+ else:
+ raise SyntaxError('Unknown /proc/pid/status format.')
+ return ProcStatus(raw, dct)
+
+ @staticmethod
+ def load(pid):
+ with open(os.path.join('/proc', str(pid), 'status'), 'r') as status_f:
+ return ProcStatus.load_file(status_f)
+
+ @property
+ def raw(self):
+ return self._raw
+
+ @property
+ def pid(self):
+ return int(self._pid)
+
+ @property
+ def vm_peak(self):
+ """Returns a high-water (peak) virtual memory size in kilo-bytes."""
+ if self._vm_peak.endswith('kB'):
+ return int(self._vm_peak.split()[0])
+ raise ValueError('VmPeak is not in kB.')
+
+ @property
+ def vm_size(self):
+ """Returns a virtual memory size in kilo-bytes."""
+ if self._vm_size.endswith('kB'):
+ return int(self._vm_size.split()[0])
+ raise ValueError('VmSize is not in kB.')
+
+ @property
+ def vm_hwm(self):
+ """Returns a high-water (peak) resident set size (RSS) in kilo-bytes."""
+ if self._vm_hwm.endswith('kB'):
+ return int(self._vm_hwm.split()[0])
+ raise ValueError('VmHWM is not in kB.')
+
+ @property
+ def vm_rss(self):
+ """Returns a resident set size (RSS) in kilo-bytes."""
+ if self._vm_rss.endswith('kB'):
+ return int(self._vm_rss.split()[0])
+ raise ValueError('VmRSS is not in kB.')
+
+
+class ProcMapsEntry(object):
+ """A class representing one line in /proc/pid/maps."""
+
+ def __init__(
+ self, begin, end, readable, writable, executable, private, offset,
+ major, minor, inode, name):
+ self.begin = begin
+ self.end = end
+ self.readable = readable
+ self.writable = writable
+ self.executable = executable
+ self.private = private
+ self.offset = offset
+ self.major = major
+ self.minor = minor
+ self.inode = inode
+ self.name = name
+
+ def as_dict(self):
+ return {
+ 'begin': self.begin,
+ 'end': self.end,
+ 'readable': self.readable,
+ 'writable': self.writable,
+ 'executable': self.executable,
+ 'private': self.private,
+ 'offset': self.offset,
+ 'major': self.major,
+ 'minor': self.minor,
+ 'inode': self.inode,
+ 'name': self.name,
+ }
+
+
+class ProcMaps(object):
+ """Reads and stores information in /proc/pid/maps."""
+
+ MAPS_PATTERN = re.compile(
+ r'^([a-f0-9]+)-([a-f0-9]+)\s+(.)(.)(.)(.)\s+([a-f0-9]+)\s+(\S+):(\S+)\s+'
+ r'(\d+)\s*(.*)$', re.IGNORECASE)
+
+ EXECUTABLE_PATTERN = re.compile(
+ r'\S+\.(so|dll|dylib|bundle)((\.\d+)+\w*(\.\d+){0,3})?')
+
+ def __init__(self):
+ self._sorted_indexes = []
+ self._dictionary = {}
+ self._sorted = True
+
+ def iter(self, condition):
+ if not self._sorted:
+ self._sorted_indexes.sort()
+ self._sorted = True
+ for index in self._sorted_indexes:
+ if not condition or condition(self._dictionary[index]):
+ yield self._dictionary[index]
+
+ def __iter__(self):
+ if not self._sorted:
+ self._sorted_indexes.sort()
+ self._sorted = True
+ for index in self._sorted_indexes:
+ yield self._dictionary[index]
+
+ @staticmethod
+ def load_file(maps_f):
+ table = ProcMaps()
+ for line in maps_f:
+ table.append_line(line)
+ return table
+
+ @staticmethod
+ def load(pid):
+ try:
+ with open(os.path.join('/proc', str(pid), 'maps'), 'r') as maps_f:
+ return ProcMaps.load_file(maps_f)
+ except (IOError, OSError):
+ return None
+
+ def append_line(self, line):
+ entry = self.parse_line(line)
+ if entry:
+ self._append_entry(entry)
+ return entry
+
+ @staticmethod
+ def parse_line(line):
+ matched = ProcMaps.MAPS_PATTERN.match(line)
+ if matched:
+ return ProcMapsEntry( # pylint: disable=W0212
+ int(matched.group(1), 16), # begin
+ int(matched.group(2), 16), # end
+ matched.group(3), # readable
+ matched.group(4), # writable
+ matched.group(5), # executable
+ matched.group(6), # private
+ int(matched.group(7), 16), # offset
+ matched.group(8), # major
+ matched.group(9), # minor
+ int(matched.group(10), 10), # inode
+ matched.group(11) # name
+ )
+ else:
+ return None
+
+ @staticmethod
+ def constants(entry):
+ return entry.writable == '-' and entry.executable == '-'
+
+ @staticmethod
+ def executable(entry):
+ return entry.executable == 'x'
+
+ @staticmethod
+ def executable_and_constants(entry):
+ return ((entry.writable == '-' and entry.executable == '-') or
+ entry.executable == 'x')
+
+ def _append_entry(self, entry):
+ if self._sorted_indexes and self._sorted_indexes[-1] > entry.begin:
+ self._sorted = False
+ self._sorted_indexes.append(entry.begin)
+ self._dictionary[entry.begin] = entry
+
+
+class ProcSmaps(object):
+ """Reads and stores information in /proc/pid/smaps."""
+ _SMAPS_PATTERN = re.compile(r'^(?P<NAME>[A-Za-z0-9_]+):\s+(?P<VALUE>.*)')
+
+ class VMA(object):
+ def __init__(self):
+ self._size = 0
+ self._rss = 0
+ self._pss = 0
+
+ def append(self, name, value):
+ dct = {
+ 'Size': '_size',
+ 'Rss': '_rss',
+ 'Pss': '_pss',
+ 'Referenced': '_referenced',
+ 'Private_Clean': '_private_clean',
+ 'Shared_Clean': '_shared_clean',
+ 'KernelPageSize': '_kernel_page_size',
+ 'MMUPageSize': '_mmu_page_size',
+ }
+ if name in dct:
+ self.__setattr__(dct[name], value)
+
+ @property
+ def size(self):
+ if self._size.endswith('kB'):
+ return int(self._size.split()[0])
+ return int(self._size)
+
+ @property
+ def rss(self):
+ if self._rss.endswith('kB'):
+ return int(self._rss.split()[0])
+ return int(self._rss)
+
+ @property
+ def pss(self):
+ if self._pss.endswith('kB'):
+ return int(self._pss.split()[0])
+ return int(self._pss)
+
+ def __init__(self, raw, total_dct, maps, vma_internals):
+ self._raw = raw
+ self._size = total_dct['Size']
+ self._rss = total_dct['Rss']
+ self._pss = total_dct['Pss']
+ self._referenced = total_dct['Referenced']
+ self._shared_clean = total_dct['Shared_Clean']
+ self._private_clean = total_dct['Private_Clean']
+ self._kernel_page_size = total_dct['KernelPageSize']
+ self._mmu_page_size = total_dct['MMUPageSize']
+ self._maps = maps
+ self._vma_internals = vma_internals
+
+ @staticmethod
+ def load(pid):
+ with open(os.path.join('/proc', str(pid), 'smaps'), 'r') as smaps_f:
+ raw = smaps_f.readlines()
+
+ vma = None
+ vma_internals = collections.OrderedDict()
+ total_dct = collections.defaultdict(int)
+ maps = ProcMaps()
+ for line in raw:
+ maps_match = ProcMaps.MAPS_PATTERN.match(line)
+ if maps_match:
+ vma = maps.append_line(line.strip())
+ vma_internals[vma] = ProcSmaps.VMA()
+ else:
+ smaps_match = ProcSmaps._SMAPS_PATTERN.match(line)
+ if smaps_match:
+ match_dict = smaps_match.groupdict()
+ vma_internals[vma].append(match_dict['NAME'], match_dict['VALUE'])
+ total_dct[match_dict['NAME']] += int(match_dict['VALUE'].split()[0])
+
+ return ProcSmaps(raw, total_dct, maps, vma_internals)
+
+ @property
+ def size(self):
+ return self._size
+
+ @property
+ def rss(self):
+ return self._rss
+
+ @property
+ def referenced(self):
+ return self._referenced
+
+ @property
+ def pss(self):
+ return self._pss
+
+ @property
+ def private_clean(self):
+ return self._private_clean
+
+ @property
+ def shared_clean(self):
+ return self._shared_clean
+
+ @property
+ def kernel_page_size(self):
+ return self._kernel_page_size
+
+ @property
+ def mmu_page_size(self):
+ return self._mmu_page_size
+
+ @property
+ def vma_internals(self):
+ return self._vma_internals
+
+
+class ProcPagemap(object):
+ """Reads and stores partial information in /proc/pid/pagemap.
+
+ It picks up virtual addresses to read based on ProcMaps (/proc/pid/maps).
+ See https://www.kernel.org/doc/Documentation/vm/pagemap.txt for details.
+ """
+ _BYTES_PER_PAGEMAP_VALUE = 8
+ _BYTES_PER_OS_PAGE = 4096
+ _VIRTUAL_TO_PAGEMAP_OFFSET = _BYTES_PER_OS_PAGE / _BYTES_PER_PAGEMAP_VALUE
+
+ _MASK_PRESENT = 1 << 63
+ _MASK_SWAPPED = 1 << 62
+ _MASK_FILEPAGE_OR_SHAREDANON = 1 << 61
+ _MASK_SOFTDIRTY = 1 << 55
+ _MASK_PFN = (1 << 55) - 1
+
+ class VMA(object):
+ def __init__(self, vsize, present, swapped, pageframes):
+ self._vsize = vsize
+ self._present = present
+ self._swapped = swapped
+ self._pageframes = pageframes
+
+ @property
+ def vsize(self):
+ return int(self._vsize)
+
+ @property
+ def present(self):
+ return int(self._present)
+
+ @property
+ def swapped(self):
+ return int(self._swapped)
+
+ @property
+ def pageframes(self):
+ return self._pageframes
+
+ def __init__(self, vsize, present, swapped, vma_internals, in_process_dup):
+ self._vsize = vsize
+ self._present = present
+ self._swapped = swapped
+ self._vma_internals = vma_internals
+ self._in_process_dup = in_process_dup
+
+ @staticmethod
+ def load(pid, maps):
+ total_present = 0
+ total_swapped = 0
+ total_vsize = 0
+ in_process_dup = 0
+ vma_internals = collections.OrderedDict()
+ process_pageframe_set = set()
+
+ try:
+ pagemap_fd = os.open(
+ os.path.join('/proc', str(pid), 'pagemap'), os.O_RDONLY)
+ except (IOError, OSError):
+ return None
+ for vma in maps:
+ present = 0
+ swapped = 0
+ vsize = 0
+ pageframes = collections.defaultdict(int)
+ begin_offset = ProcPagemap._offset(vma.begin)
+ chunk_size = ProcPagemap._offset(vma.end) - begin_offset
+ try:
+ os.lseek(pagemap_fd, begin_offset, os.SEEK_SET)
+ buf = os.read(pagemap_fd, chunk_size)
+ except (IOError, OSError):
+ return None
+ if len(buf) < chunk_size:
+ _LOGGER.warn('Failed to read pagemap at 0x%x in %d.' % (vma.begin, pid))
+ pagemap_values = struct.unpack(
+ '=%dQ' % (len(buf) / ProcPagemap._BYTES_PER_PAGEMAP_VALUE), buf)
+ for pagemap_value in pagemap_values:
+ vsize += ProcPagemap._BYTES_PER_OS_PAGE
+ if pagemap_value & ProcPagemap._MASK_PRESENT:
+ if (pagemap_value & ProcPagemap._MASK_PFN) in process_pageframe_set:
+ in_process_dup += ProcPagemap._BYTES_PER_OS_PAGE
+ else:
+ process_pageframe_set.add(pagemap_value & ProcPagemap._MASK_PFN)
+ if (pagemap_value & ProcPagemap._MASK_PFN) not in pageframes:
+ present += ProcPagemap._BYTES_PER_OS_PAGE
+ pageframes[pagemap_value & ProcPagemap._MASK_PFN] += 1
+ if pagemap_value & ProcPagemap._MASK_SWAPPED:
+ swapped += ProcPagemap._BYTES_PER_OS_PAGE
+ vma_internals[vma] = ProcPagemap.VMA(vsize, present, swapped, pageframes)
+ total_present += present
+ total_swapped += swapped
+ total_vsize += vsize
+ try:
+ os.close(pagemap_fd)
+ except OSError:
+ return None
+
+ return ProcPagemap(total_vsize, total_present, total_swapped,
+ vma_internals, in_process_dup)
+
+ @staticmethod
+ def _offset(virtual_address):
+ return virtual_address / ProcPagemap._VIRTUAL_TO_PAGEMAP_OFFSET
+
+ @property
+ def vsize(self):
+ return int(self._vsize)
+
+ @property
+ def present(self):
+ return int(self._present)
+
+ @property
+ def swapped(self):
+ return int(self._swapped)
+
+ @property
+ def vma_internals(self):
+ return self._vma_internals
+
+
+class _ProcessMemory(object):
+ """Aggregates process memory information from /proc for manual testing."""
+ def __init__(self, pid):
+ self._pid = pid
+ self._maps = None
+ self._pagemap = None
+ self._stat = None
+ self._status = None
+ self._statm = None
+ self._smaps = []
+
+ def _read(self, proc_file):
+ lines = []
+ with open(os.path.join('/proc', str(self._pid), proc_file), 'r') as proc_f:
+ lines = proc_f.readlines()
+ return lines
+
+ def read_all(self):
+ self.read_stat()
+ self.read_statm()
+ self.read_status()
+ self.read_smaps()
+ self.read_maps()
+ self.read_pagemap(self._maps)
+
+ def read_maps(self):
+ self._maps = ProcMaps.load(self._pid)
+
+ def read_pagemap(self, maps):
+ self._pagemap = ProcPagemap.load(self._pid, maps)
+
+ def read_smaps(self):
+ self._smaps = ProcSmaps.load(self._pid)
+
+ def read_stat(self):
+ self._stat = ProcStat.load(self._pid)
+
+ def read_statm(self):
+ self._statm = ProcStatm.load(self._pid)
+
+ def read_status(self):
+ self._status = ProcStatus.load(self._pid)
+
+ @property
+ def pid(self):
+ return self._pid
+
+ @property
+ def maps(self):
+ return self._maps
+
+ @property
+ def pagemap(self):
+ return self._pagemap
+
+ @property
+ def smaps(self):
+ return self._smaps
+
+ @property
+ def stat(self):
+ return self._stat
+
+ @property
+ def statm(self):
+ return self._statm
+
+ @property
+ def status(self):
+ return self._status
+
+
+def main(argv):
+ """The main function for manual testing."""
+ _LOGGER.setLevel(logging.WARNING)
+ handler = logging.StreamHandler()
+ handler.setLevel(logging.WARNING)
+ handler.setFormatter(logging.Formatter(
+ '%(asctime)s:%(name)s:%(levelname)s:%(message)s'))
+ _LOGGER.addHandler(handler)
+
+ pids = []
+ for arg in argv[1:]:
+ try:
+ pid = int(arg)
+ except ValueError:
+ raise SyntaxError("%s is not an integer." % arg)
+ else:
+ pids.append(pid)
+
+ procs = {}
+ for pid in pids:
+ procs[pid] = _ProcessMemory(pid)
+ procs[pid].read_all()
+
+ print '=== PID: %d ===' % pid
+
+ print ' stat: %d' % procs[pid].stat.vsize
+ print ' statm: %d' % (procs[pid].statm.size * 4096)
+ print ' status: %d (Peak:%d)' % (procs[pid].status.vm_size * 1024,
+ procs[pid].status.vm_peak * 1024)
+ print ' smaps: %d' % (procs[pid].smaps.size * 1024)
+ print 'pagemap: %d' % procs[pid].pagemap.vsize
+ print ' stat: %d' % (procs[pid].stat.rss * 4096)
+ print ' statm: %d' % (procs[pid].statm.resident * 4096)
+ print ' status: %d (Peak:%d)' % (procs[pid].status.vm_rss * 1024,
+ procs[pid].status.vm_hwm * 1024)
+ print ' smaps: %d' % (procs[pid].smaps.rss * 1024)
+ print 'pagemap: %d' % procs[pid].pagemap.present
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/tools/linux/tests/procfs_tests.py b/tools/linux/tests/procfs_tests.py
new file mode 100755
index 0000000..c829199
--- /dev/null
+++ b/tools/linux/tests/procfs_tests.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import cStringIO
+import logging
+import os
+import sys
+import unittest
+
+ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.insert(0, ROOT_DIR)
+
+from procfs import ProcMaps
+
+
+class ProcMapsTest(unittest.TestCase):
+ _TEST_PROCMAPS = '\n'.join([
+ '00000000-00001000 r--p 00000000 fc:00 0',
+ '0080b000-0080c000 r-xp 0020b000 fc:00 2231329'
+ ' /usr/bin/some',
+ '0080c000-0080f000 ---p 0020c000 fc:00 2231329'
+ ' /usr/bin/some',
+ '0100a000-0100c000 r-xp 0120a000 fc:00 22381'
+ ' /usr/bin/chrome',
+ '0100c000-0100f000 ---p 0120c000 fc:00 22381'
+ ' /usr/bin/chrome',
+ '0237d000-02a9b000 rw-p 00000000 00:00 0'
+ ' [heap]',
+ '7fb920e6d000-7fb920e85000 r-xp 00000000 fc:00 263482'
+ ' /lib/x86_64-linux-gnu/libpthread-2.15.so',
+ '7fb920e85000-7fb921084000 ---p 00018000 fc:00 263482'
+ ' /lib/x86_64-linux-gnu/libpthread-2.15.so',
+ '7fb9225f4000-7fb922654000 rw-s 00000000 00:04 19660808'
+ ' /SYSV00000000 (deleted)',
+ 'ffffffffff600000-ffffffffff601000 r-xp 00000000 00:00 0'
+ ' [vsyscall]',
+ ])
+
+ _EXPECTED = [
+ (0x0, 0x1000, 'r', '-', '-', 'p', 0x0, 'fc', '00', 0, ''),
+ (0x80b000, 0x80c000, 'r', '-', 'x', 'p', 0x20b000,
+ 'fc', '00', 2231329, '/usr/bin/some'),
+ (0x80c000, 0x80f000, '-', '-', '-', 'p', 0x20c000,
+ 'fc', '00', 2231329, '/usr/bin/some'),
+ (0x100a000, 0x100c000, 'r', '-', 'x', 'p', 0x120a000,
+ 'fc', '00', 22381, '/usr/bin/chrome'),
+ (0x100c000, 0x100f000, '-', '-', '-', 'p', 0x120c000,
+ 'fc', '00', 22381, '/usr/bin/chrome'),
+ (0x237d000, 0x2a9b000, 'r', 'w', '-', 'p', 0x0,
+ '00', '00', 0, '[heap]'),
+ (0x7fb920e6d000, 0x7fb920e85000, 'r', '-', 'x', 'p', 0x0,
+ 'fc', '00', 263482, '/lib/x86_64-linux-gnu/libpthread-2.15.so'),
+ (0x7fb920e85000, 0x7fb921084000, '-', '-', '-', 'p', 0x18000,
+ 'fc', '00', 263482, '/lib/x86_64-linux-gnu/libpthread-2.15.so'),
+ (0x7fb9225f4000, 0x7fb922654000, 'r', 'w', '-', 's', 0x0,
+ '00', '04', 19660808, '/SYSV00000000 (deleted)'),
+ (0xffffffffff600000, 0xffffffffff601000, 'r', '-', 'x', 'p', 0x0,
+ '00', '00', 0, '[vsyscall]'),
+ ]
+
+ @staticmethod
+ def _expected_as_dict(index):
+ return {
+ 'begin': ProcMapsTest._EXPECTED[index][0],
+ 'end': ProcMapsTest._EXPECTED[index][1],
+ 'readable': ProcMapsTest._EXPECTED[index][2],
+ 'writable': ProcMapsTest._EXPECTED[index][3],
+ 'executable': ProcMapsTest._EXPECTED[index][4],
+ 'private': ProcMapsTest._EXPECTED[index][5],
+ 'offset': ProcMapsTest._EXPECTED[index][6],
+ 'major': ProcMapsTest._EXPECTED[index][7],
+ 'minor': ProcMapsTest._EXPECTED[index][8],
+ 'inode': ProcMapsTest._EXPECTED[index][9],
+ 'name': ProcMapsTest._EXPECTED[index][10],
+ }
+
+ def test_load(self):
+ maps = ProcMaps.load_file(cStringIO.StringIO(self._TEST_PROCMAPS))
+ for index, entry in enumerate(maps):
+ self.assertEqual(entry.as_dict(), self._expected_as_dict(index))
+
+ def test_constants(self):
+ maps = ProcMaps.load_file(cStringIO.StringIO(self._TEST_PROCMAPS))
+ selected = [0, 2, 4, 7]
+ for index, entry in enumerate(maps.iter(ProcMaps.constants)):
+ self.assertEqual(entry.as_dict(),
+ self._expected_as_dict(selected[index]))
+
+ def test_executable(self):
+ maps = ProcMaps.load_file(cStringIO.StringIO(self._TEST_PROCMAPS))
+ selected = [1, 3, 6, 9]
+ for index, entry in enumerate(maps.iter(ProcMaps.executable)):
+ self.assertEqual(entry.as_dict(),
+ self._expected_as_dict(selected[index]))
+
+ def test_executable_and_constants(self):
+ maps = ProcMaps.load_file(cStringIO.StringIO(self._TEST_PROCMAPS))
+ selected = [0, 1, 2, 3, 4, 6, 7, 9]
+ for index, entry in enumerate(maps.iter(ProcMaps.executable_and_constants)):
+ self.assertEqual(entry.as_dict(),
+ self._expected_as_dict(selected[index]))
+
+
+if __name__ == '__main__':
+ logging.basicConfig(
+ level=logging.DEBUG if '-v' in sys.argv else logging.ERROR,
+ format='%(levelname)5s %(filename)15s(%(lineno)3d): %(message)s')
+ unittest.main()
diff --git a/tools/lsan/PRESUBMIT.py b/tools/lsan/PRESUBMIT.py
new file mode 100644
index 0000000..e7891ff
--- /dev/null
+++ b/tools/lsan/PRESUBMIT.py
@@ -0,0 +1,43 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into gcl.
+"""
+
+import re
+
+def CheckChange(input_api, output_api):
+ errors = []
+
+ for f in input_api.AffectedFiles():
+ if not f.LocalPath().endswith('suppressions.txt'):
+ continue
+ for line_num, line in enumerate(f.NewContents()):
+ line = line.strip()
+ if line.startswith('#') or not line:
+ continue
+ if not line.startswith('leak:'):
+ errors.append('"%s" should be "leak:..." in %s line %d' %
+ (line, f.LocalPath(), line_num))
+ if errors:
+ return [output_api.PresubmitError('\n'.join(errors))]
+ return []
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CheckChange(input_api, output_api)
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CheckChange(input_api, output_api)
+
+def GetPreferredTryMasters(project, change):
+ return {
+ 'tryserver.chromium.linux': {
+ 'linux_asan': set(['compile']),
+ },
+ 'tryserver.chromium.mac': {
+ 'mac_asan': set(['compile']),
+ }
+ }
diff --git a/tools/lsan/suppressions.txt b/tools/lsan/suppressions.txt
new file mode 100644
index 0000000..493ebfc
--- /dev/null
+++ b/tools/lsan/suppressions.txt
@@ -0,0 +1,90 @@
+# HeapCheck sanity test
+leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody
+
+#### Third-party leaks ####
+
+# False positives in libfontconfig. http://crbug.com/39050
+leak:libfontconfig
+
+# Leaks in Nvidia's libGL.
+leak:libGL.so
+
+# A small string is leaked here (57 bytes per process). http://crbug.com/46571#c9
+leak:blink::V8GCController::collectGarbage
+
+# http://crbug.com/270180
+leak:net::ProxyResolverV8::Context::ResolveProxy
+
+# NSS leaks in CertDatabaseNSSTest tests. http://crbug.com/51988
+leak:net::NSSCertDatabase::ImportFromPKCS12
+leak:net::NSSCertDatabase::ListCerts
+leak:net::NSSCertDatabase::DeleteCertAndKey
+leak:crypto::ScopedTestNSSDB::ScopedTestNSSDB
+# Another leak due to not shutting down NSS properly. http://crbug.com/124445
+leak:error_get_my_stack
+
+# XRandR has several one time leaks.
+leak:libxrandr
+
+# The NSS suppressions above will not fire when the fast stack unwinder is used,
+# because it can't unwind through NSS libraries. Apply blanket suppressions for
+# now.
+leak:libnssutil3
+leak:libnspr4
+leak:libnss3
+leak:libplds4
+leak:libnssckbi
+
+# Skia leaks GrGpuGL::ProgramCache::Entry. http://crbug.com/262934
+leak:GrGpuGL::flushGraphicsState
+
+# xrandr leak. http://crbug.com/119677
+leak:XRRFindDisplay
+
+# V8 may leak this by design in unit tests. http://crbug.com/323149
+leak:v8::internal::Genesis::Genesis
+
+# Suppressions for objects which can be owned by the V8 heap. This is a
+# temporary workaround until LeakSanitizer supports the V8 heap.
+# Those should only fire in (browser)tests. If you see one of them in Chrome,
+# then it's a real leak.
+# http://crbug.com/328552
+leak:WTF::StringImpl::createUninitialized
+leak:WTF::StringImpl::create8BitIfPossible
+leak:blink::MouseEvent::create
+leak:blink::WindowProxy::initializeIfNeeded
+leak:blink::*::*GetterCallback
+leak:blink::CSSComputedStyleDeclaration::create
+leak:blink::V8PerIsolateData::ensureDomInJSContext
+leak:gin/object_template_builder.h
+leak:gin::internal::Dispatcher
+
+# http://crbug.com/356785
+leak:content::RenderViewImplTest_DecideNavigationPolicyForWebUI_Test::TestBody
+
+#### Actual bugs in Chromium code ####
+# PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
+# Instead, commits that introduce memory leaks should be reverted. Suppressing
+# the leak is acceptable in some cases when reverting is impossible, i.e. when
+# enabling leak detection for the first time for a test target with pre-existing
+# leaks.
+
+# Small test-only leak in ppapi_unittests. http://crbug.com/258113
+leak:ppapi::proxy::PPP_Instance_Private_ProxyTest_PPPInstancePrivate_Test
+
+# http://crbug.com/318221
+leak:base::EnsureProcessTerminated
+
+# http://crbug.com/322671
+leak:content::SpeechRecognitionBrowserTest::SetUpOnMainThread
+
+# http://crbug.com/355641
+leak:TrayAccessibilityTest
+
+# http://crbug.com/354644
+leak:CertificateViewerUITest::ShowModalCertificateViewer
+
+# http://crbug.com/356306
+leak:content::SetProcessTitleFromCommandLine
+
+# PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
diff --git a/tools/msan/blacklist.txt b/tools/msan/blacklist.txt
new file mode 100644
index 0000000..1a487c7
--- /dev/null
+++ b/tools/msan/blacklist.txt
@@ -0,0 +1,25 @@
+# The rules in this file are only applied at compile time.
+# Because the Chrome buildsystem does not automatically touch the files
+# mentioned here, changing this file requires clobbering all MSan bots.
+#
+# Please think twice before you add or remove these rules.
+
+# False positive in ffmpeg due to assembly code. http://crbug.com/344505
+fun:ff_get_cpu_flags_x86
+
+# Uninit in zlib. http://crbug.com/116277
+fun:*MOZ_Z_deflate*
+
+# Uninit in OSMesa. http://crbug.com/347967
+fun:unpack_RGBA8888
+fun:unpack_RGB888
+
+# An actual Chromium bug which is blocking our progress. http://crbug.com/334448
+fun:*UTF8GenericReplaceInternal*
+
+# False positives due to use of linux_syscall_support. http://crbug.com/394028
+src:*/breakpad/src/*
+src:*/components/crash/app/breakpad_linux.cc
+
+# False positives due to an MSan bug. http://crbug.com/418986
+fun:*SchedGetParamThread*
diff --git a/tools/protoc_wrapper/protoc_wrapper.py b/tools/protoc_wrapper/protoc_wrapper.py
new file mode 100755
index 0000000..e6ddf95
--- /dev/null
+++ b/tools/protoc_wrapper/protoc_wrapper.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A simple wrapper for protoc.
+
+- Adds includes in generated headers.
+- Handles building with system protobuf as an option.
+"""
+
+import fnmatch
+import optparse
+import os.path
+import shutil
+import subprocess
+import sys
+import tempfile
+
+PROTOC_INCLUDE_POINT = '// @@protoc_insertion_point(includes)\n'
+
+def ModifyHeader(header_file, extra_header):
+ """Adds |extra_header| to |header_file|. Returns 0 on success.
+
+ |extra_header| is the name of the header file to include.
+ |header_file| is a generated protobuf cpp header.
+ """
+ include_point_found = False
+ header_contents = []
+ with open(header_file) as f:
+ for line in f:
+ header_contents.append(line)
+ if line == PROTOC_INCLUDE_POINT:
+ extra_header_msg = '#include "%s"\n' % extra_header
+ header_contents.append(extra_header_msg)
+ include_point_found = True;
+ if not include_point_found:
+ return 1
+
+ with open(header_file, 'wb') as f:
+ f.write(''.join(header_contents))
+ return 0
+
+def ScanForBadFiles(scan_root):
+ """Scan for bad file names, see http://crbug.com/386125 for details.
+ Returns True if any filenames are bad. Outputs errors to stderr.
+
+ |scan_root| is the path to the directory to be recursively scanned.
+ """
+ badname = False
+ real_scan_root = os.path.realpath(scan_root)
+ for dirpath, dirnames, filenames in os.walk(real_scan_root):
+ matches = fnmatch.filter(filenames, '*-*.proto')
+ if len(matches) > 0:
+ if not badname:
+ badname = True
+ sys.stderr.write('proto files must not have hyphens in their names ('
+ 'see http://crbug.com/386125 for more information):\n')
+ for filename in matches:
+ sys.stderr.write(' ' + os.path.join(real_scan_root,
+ dirpath, filename) + '\n')
+ return badname
+
+
+def RewriteProtoFilesForSystemProtobuf(path):
+ wrapper_dir = tempfile.mkdtemp()
+ try:
+ for filename in os.listdir(path):
+ if not filename.endswith('.proto'):
+ continue
+ with open(os.path.join(path, filename), 'r') as src_file:
+ with open(os.path.join(wrapper_dir, filename), 'w') as dst_file:
+ for line in src_file:
+ # Remove lines that break build with system protobuf.
+ # We cannot optimize for lite runtime, because system lite runtime
+ # does not have a Chromium-specific hack to retain unknown fields.
+ # Similarly, it does not understand corresponding option to control
+ # the usage of that hack.
+ if 'LITE_RUNTIME' in line or 'retain_unknown_fields' in line:
+ continue
+ dst_file.write(line)
+
+ return wrapper_dir
+ except:
+ shutil.rmtree(wrapper_dir)
+ raise
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('--include', dest='extra_header',
+ help='The extra header to include. This must be specified '
+ 'along with --protobuf.')
+ parser.add_option('--protobuf', dest='generated_header',
+ help='The c++ protobuf header to add the extra header to. '
+ 'This must be specified along with --include.')
+ parser.add_option('--proto-in-dir',
+ help='The directory containing .proto files.')
+ parser.add_option('--proto-in-file', help='Input file to compile.')
+ parser.add_option('--use-system-protobuf', type=int, default=0,
+ help='Option to use system-installed protobuf '
+ 'instead of bundled one.')
+ (options, args) = parser.parse_args(sys.argv)
+ if len(args) < 2:
+ return 1
+
+ if ScanForBadFiles(options.proto_in_dir):
+ return 1
+
+ proto_path = options.proto_in_dir
+ if options.use_system_protobuf == 1:
+ proto_path = RewriteProtoFilesForSystemProtobuf(proto_path)
+ try:
+ # Run what is hopefully protoc.
+ protoc_args = args[1:]
+ protoc_args += ['--proto_path=%s' % proto_path,
+ os.path.join(proto_path, options.proto_in_file)]
+ ret = subprocess.call(protoc_args)
+ if ret != 0:
+ return ret
+ finally:
+ if options.use_system_protobuf == 1:
+ # Remove temporary directory holding re-written files.
+ shutil.rmtree(proto_path)
+
+ # protoc succeeded, check to see if the generated cpp header needs editing.
+ if not options.extra_header or not options.generated_header:
+ return 0
+ return ModifyHeader(options.generated_header, options.extra_header)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/tools/remove_stale_pyc_files.py b/tools/remove_stale_pyc_files.py
new file mode 100755
index 0000000..b32c5f4
--- /dev/null
+++ b/tools/remove_stale_pyc_files.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+def RemoveAllStalePycFiles(base_dir):
+ """Scan directories for old .pyc files without a .py file and delete them."""
+ for dirname, _, filenames in os.walk(base_dir):
+ if '.svn' in dirname or '.git' in dirname:
+ continue
+ for filename in filenames:
+ root, ext = os.path.splitext(filename)
+ if ext != '.pyc':
+ continue
+
+ pyc_path = os.path.join(dirname, filename)
+ py_path = os.path.join(dirname, root + '.py')
+
+ try:
+ if not os.path.exists(py_path):
+ os.remove(pyc_path)
+ except OSError:
+ # Wrap OS calls in try/except in case another process touched this file.
+ pass
+
+ try:
+ os.removedirs(dirname)
+ except OSError:
+ # Wrap OS calls in try/except in case another process touched this dir.
+ pass
+
+
+if __name__ == '__main__':
+ for path in sys.argv[1:]:
+ RemoveAllStalePycFiles(path)
diff --git a/tools/sort-headers.py b/tools/sort-headers.py
new file mode 100755
index 0000000..3f3435d
--- /dev/null
+++ b/tools/sort-headers.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Given a filename as an argument, sort the #include/#imports in that file.
+
+Shows a diff and prompts for confirmation before doing the deed.
+Works great with tools/git/for-all-touched-files.py.
+"""
+
+import optparse
+import os
+import sys
+
+
+def YesNo(prompt):
+ """Prompts with a yes/no question, returns True if yes."""
+ print prompt,
+ sys.stdout.flush()
+ # http://code.activestate.com/recipes/134892/
+ if sys.platform == 'win32':
+ import msvcrt
+ ch = msvcrt.getch()
+ else:
+ import termios
+ import tty
+ fd = sys.stdin.fileno()
+ old_settings = termios.tcgetattr(fd)
+ ch = 'n'
+ try:
+ tty.setraw(sys.stdin.fileno())
+ ch = sys.stdin.read(1)
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+ print ch
+ return ch in ('Y', 'y')
+
+
+def IncludeCompareKey(line):
+ """Sorting comparator key used for comparing two #include lines.
+ Returns the filename without the #include/#import/import prefix.
+ """
+ for prefix in ('#include ', '#import ', 'import '):
+ if line.startswith(prefix):
+ line = line[len(prefix):]
+ break
+
+ # The win32 api has all sorts of implicit include order dependencies :-/
+ # Give a few headers special sort keys that make sure they appear before all
+ # other headers.
+ if line.startswith('<windows.h>'): # Must be before e.g. shellapi.h
+ return '0'
+ if line.startswith('<atlbase.h>'): # Must be before atlapp.h.
+ return '1' + line
+ if line.startswith('<ole2.h>'): # Must be before e.g. intshcut.h
+ return '1' + line
+ if line.startswith('<unknwn.h>'): # Must be before e.g. intshcut.h
+ return '1' + line
+
+ # C++ system headers should come after C system headers.
+ if line.startswith('<'):
+ if line.find('.h>') != -1:
+ return '2' + line.lower()
+ else:
+ return '3' + line.lower()
+
+ return '4' + line
+
+
+def IsInclude(line):
+ """Returns True if the line is an #include/#import/import line."""
+ return any([line.startswith('#include '), line.startswith('#import '),
+ line.startswith('import ')])
+
+
+def SortHeader(infile, outfile):
+ """Sorts the headers in infile, writing the sorted file to outfile."""
+ for line in infile:
+ if IsInclude(line):
+ headerblock = []
+ while IsInclude(line):
+ infile_ended_on_include_line = False
+ headerblock.append(line)
+ # Ensure we don't die due to trying to read beyond the end of the file.
+ try:
+ line = infile.next()
+ except StopIteration:
+ infile_ended_on_include_line = True
+ break
+ for header in sorted(headerblock, key=IncludeCompareKey):
+ outfile.write(header)
+ if infile_ended_on_include_line:
+ # We already wrote the last line above; exit to ensure it isn't written
+ # again.
+ return
+ # Intentionally fall through, to write the line that caused
+ # the above while loop to exit.
+ outfile.write(line)
+
+
+def FixFileWithConfirmFunction(filename, confirm_function,
+ perform_safety_checks):
+ """Creates a fixed version of the file, invokes |confirm_function|
+ to decide whether to use the new file, and cleans up.
+
+ |confirm_function| takes two parameters, the original filename and
+ the fixed-up filename, and returns True to use the fixed-up file,
+ false to not use it.
+
+ If |perform_safety_checks| is True, then the function checks whether it is
+ unsafe to reorder headers in this file and skips the reorder with a warning
+ message in that case.
+ """
+ if perform_safety_checks and IsUnsafeToReorderHeaders(filename):
+ print ('Not reordering headers in %s as the script thinks that the '
+ 'order of headers in this file is semantically significant.'
+ % (filename))
+ return
+ fixfilename = filename + '.new'
+ infile = open(filename, 'rb')
+ outfile = open(fixfilename, 'wb')
+ SortHeader(infile, outfile)
+ infile.close()
+ outfile.close() # Important so the below diff gets the updated contents.
+
+ try:
+ if confirm_function(filename, fixfilename):
+ if sys.platform == 'win32':
+ os.unlink(filename)
+ os.rename(fixfilename, filename)
+ finally:
+ try:
+ os.remove(fixfilename)
+ except OSError:
+ # If the file isn't there, we don't care.
+ pass
+
+
+def DiffAndConfirm(filename, should_confirm, perform_safety_checks):
+ """Shows a diff of what the tool would change the file named
+ filename to. Shows a confirmation prompt if should_confirm is true.
+ Saves the resulting file if should_confirm is false or the user
+ answers Y to the confirmation prompt.
+ """
+ def ConfirmFunction(filename, fixfilename):
+ diff = os.system('diff -u %s %s' % (filename, fixfilename))
+ if sys.platform != 'win32':
+ diff >>= 8
+ if diff == 0: # Check exit code.
+ print '%s: no change' % filename
+ return False
+
+ return (not should_confirm or YesNo('Use new file (y/N)?'))
+
+ FixFileWithConfirmFunction(filename, ConfirmFunction, perform_safety_checks)
+
+def IsUnsafeToReorderHeaders(filename):
+ # *_message_generator.cc is almost certainly a file that generates IPC
+ # definitions. Changes in include order in these files can result in them not
+ # building correctly.
+ if filename.find("message_generator.cc") != -1:
+ return True
+ return False
+
+def main():
+ parser = optparse.OptionParser(usage='%prog filename1 filename2 ...')
+ parser.add_option('-f', '--force', action='store_false', default=True,
+ dest='should_confirm',
+ help='Turn off confirmation prompt.')
+ parser.add_option('--no_safety_checks',
+ action='store_false', default=True,
+ dest='perform_safety_checks',
+ help='Do not perform the safety checks via which this '
+ 'script refuses to operate on files for which it thinks '
+ 'the include ordering is semantically significant.')
+ opts, filenames = parser.parse_args()
+
+ if len(filenames) < 1:
+ parser.print_help()
+ return 1
+
+ for filename in filenames:
+ DiffAndConfirm(filename, opts.should_confirm, opts.perform_safety_checks)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/valgrind/OWNERS b/tools/valgrind/OWNERS
new file mode 100644
index 0000000..73ce47c
--- /dev/null
+++ b/tools/valgrind/OWNERS
@@ -0,0 +1,5 @@
+set noparent
+bruening@chromium.org
+glider@chromium.org
+thestig@chromium.org
+timurrrr@chromium.org
diff --git a/tools/valgrind/README b/tools/valgrind/README
new file mode 100644
index 0000000..ad26439
--- /dev/null
+++ b/tools/valgrind/README
@@ -0,0 +1,4 @@
+Historically this directory has been a home for Valgrind and ThreadSanitizer.
+Since then other memory tools used in Chromium started squatting here and the
+name became confusing.
+We're replacing tools/valgrind with tools/memory/ new tools should go there.
diff --git a/tools/valgrind/asan/asan_symbolize.py b/tools/valgrind/asan/asan_symbolize.py
new file mode 100755
index 0000000..ba8d698
--- /dev/null
+++ b/tools/valgrind/asan/asan_symbolize.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from third_party import asan_symbolize
+
+import os
+import sys
+
+class LineBuffered(object):
+ """Disable buffering on a file object."""
+ def __init__(self, stream):
+ self.stream = stream
+
+ def write(self, data):
+ self.stream.write(data)
+ if '\n' in data:
+ self.stream.flush()
+
+ def __getattr__(self, attr):
+ return getattr(self.stream, attr)
+
+
+def disable_buffering():
+ """Makes this process and child processes stdout unbuffered."""
+ if not os.environ.get('PYTHONUNBUFFERED'):
+ # Since sys.stdout is a C++ object, it's impossible to do
+ # sys.stdout.write = lambda...
+ sys.stdout = LineBuffered(sys.stdout)
+ os.environ['PYTHONUNBUFFERED'] = 'x'
+
+
+def set_symbolizer_path():
+ """Set the path to the llvm-symbolize binary in the Chromium source tree."""
+ if not os.environ.get('LLVM_SYMBOLIZER_PATH'):
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+ # Assume this script resides three levels below src/ (i.e.
+ # src/tools/valgrind/asan/).
+ src_root = os.path.join(script_dir, "..", "..", "..")
+ symbolizer_path = os.path.join(src_root, 'third_party',
+ 'llvm-build', 'Release+Asserts', 'bin', 'llvm-symbolizer')
+ assert(os.path.isfile(symbolizer_path))
+ os.environ['LLVM_SYMBOLIZER_PATH'] = os.path.abspath(symbolizer_path)
+
+
+def main():
+ disable_buffering()
+ set_symbolizer_path()
+ asan_symbolize.demangle = True
+ asan_symbolize.fix_filename_patterns = sys.argv[1:]
+ asan_symbolize.logfile = sys.stdin
+ loop = asan_symbolize.SymbolizationLoop()
+ loop.process_logfile()
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/valgrind/asan/third_party/README.chromium b/tools/valgrind/asan/third_party/README.chromium
new file mode 100644
index 0000000..62f5058
--- /dev/null
+++ b/tools/valgrind/asan/third_party/README.chromium
@@ -0,0 +1,6 @@
+Name: asan_symbolize.py
+License: University of Illinois Open Source License.
+Version: 218072
+URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/scripts/asan_symbolize.py?view=co&content-type=text%2Fplain
+
+asan_symbolize.py is a verbatim copy of asan_symbolize.py in the LLVM trunk.
diff --git a/tools/valgrind/asan/third_party/__init__.py b/tools/valgrind/asan/third_party/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/valgrind/asan/third_party/__init__.py
diff --git a/tools/valgrind/asan/third_party/asan_symbolize.py b/tools/valgrind/asan/third_party/asan_symbolize.py
new file mode 100755
index 0000000..76de60a
--- /dev/null
+++ b/tools/valgrind/asan/third_party/asan_symbolize.py
@@ -0,0 +1,441 @@
+#!/usr/bin/env python
+#===- lib/asan/scripts/asan_symbolize.py -----------------------------------===#
+#
+# The LLVM Compiler Infrastructure
+#
+# This file is distributed under the University of Illinois Open Source
+# License. See LICENSE.TXT for details.
+#
+#===------------------------------------------------------------------------===#
+import argparse
+import bisect
+import getopt
+import os
+import pty
+import re
+import subprocess
+import sys
+import termios
+
+symbolizers = {}
+DEBUG = False
+demangle = False
+binutils_prefix = None
+sysroot_path = None
+binary_name_filter = None
+fix_filename_patterns = None
+logfile = sys.stdin
+
+# FIXME: merge the code that calls fix_filename().
+def fix_filename(file_name):
+ if fix_filename_patterns:
+ for path_to_cut in fix_filename_patterns:
+ file_name = re.sub('.*' + path_to_cut, '', file_name)
+ file_name = re.sub('.*asan_[a-z_]*.cc:[0-9]*', '_asan_rtl_', file_name)
+ file_name = re.sub('.*crtstuff.c:0', '???:0', file_name)
+ return file_name
+
+def sysroot_path_filter(binary_name):
+ return sysroot_path + binary_name
+
+def guess_arch(addr):
+ # Guess which arch we're running. 10 = len('0x') + 8 hex digits.
+ if len(addr) > 10:
+ return 'x86_64'
+ else:
+ return 'i386'
+
+class Symbolizer(object):
+ def __init__(self):
+ pass
+
+ def symbolize(self, addr, binary, offset):
+ """Symbolize the given address (pair of binary and offset).
+
+ Overriden in subclasses.
+ Args:
+ addr: virtual address of an instruction.
+ binary: path to executable/shared object containing this instruction.
+ offset: instruction offset in the @binary.
+ Returns:
+ list of strings (one string for each inlined frame) describing
+ the code locations for this instruction (that is, function name, file
+ name, line and column numbers).
+ """
+ return None
+
+
+class LLVMSymbolizer(Symbolizer):
+ def __init__(self, symbolizer_path, addr):
+ super(LLVMSymbolizer, self).__init__()
+ self.symbolizer_path = symbolizer_path
+ self.default_arch = guess_arch(addr)
+ self.pipe = self.open_llvm_symbolizer()
+
+ def open_llvm_symbolizer(self):
+ cmd = [self.symbolizer_path,
+ '--use-symbol-table=true',
+ '--demangle=%s' % demangle,
+ '--functions=short',
+ '--inlining=true',
+ '--default-arch=%s' % self.default_arch]
+ if DEBUG:
+ print ' '.join(cmd)
+ try:
+ result = subprocess.Popen(cmd, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+ except OSError:
+ result = None
+ return result
+
+ def symbolize(self, addr, binary, offset):
+ """Overrides Symbolizer.symbolize."""
+ if not self.pipe:
+ return None
+ result = []
+ try:
+ symbolizer_input = '%s %s' % (binary, offset)
+ if DEBUG:
+ print symbolizer_input
+ print >> self.pipe.stdin, symbolizer_input
+ while True:
+ function_name = self.pipe.stdout.readline().rstrip()
+ if not function_name:
+ break
+ file_name = self.pipe.stdout.readline().rstrip()
+ file_name = fix_filename(file_name)
+ if (not function_name.startswith('??') or
+ not file_name.startswith('??')):
+ # Append only non-trivial frames.
+ result.append('%s in %s %s' % (addr, function_name,
+ file_name))
+ except Exception:
+ result = []
+ if not result:
+ result = None
+ return result
+
+
+def LLVMSymbolizerFactory(system, addr):
+ symbolizer_path = os.getenv('LLVM_SYMBOLIZER_PATH')
+ if not symbolizer_path:
+ symbolizer_path = os.getenv('ASAN_SYMBOLIZER_PATH')
+ if not symbolizer_path:
+ # Assume llvm-symbolizer is in PATH.
+ symbolizer_path = 'llvm-symbolizer'
+ return LLVMSymbolizer(symbolizer_path, addr)
+
+
+class Addr2LineSymbolizer(Symbolizer):
+ def __init__(self, binary):
+ super(Addr2LineSymbolizer, self).__init__()
+ self.binary = binary
+ self.pipe = self.open_addr2line()
+
+ def open_addr2line(self):
+ addr2line_tool = 'addr2line'
+ if binutils_prefix:
+ addr2line_tool = binutils_prefix + addr2line_tool
+ cmd = [addr2line_tool, '-f']
+ if demangle:
+ cmd += ['--demangle']
+ cmd += ['-e', self.binary]
+ if DEBUG:
+ print ' '.join(cmd)
+ return subprocess.Popen(cmd,
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+
+ def symbolize(self, addr, binary, offset):
+ """Overrides Symbolizer.symbolize."""
+ if self.binary != binary:
+ return None
+ try:
+ print >> self.pipe.stdin, offset
+ function_name = self.pipe.stdout.readline().rstrip()
+ file_name = self.pipe.stdout.readline().rstrip()
+ except Exception:
+ function_name = ''
+ file_name = ''
+ file_name = fix_filename(file_name)
+ return ['%s in %s %s' % (addr, function_name, file_name)]
+
+
+class UnbufferedLineConverter(object):
+ """
+ Wrap a child process that responds to each line of input with one line of
+ output. Uses pty to trick the child into providing unbuffered output.
+ """
+ def __init__(self, args, close_stderr=False):
+ pid, fd = pty.fork()
+ if pid == 0:
+ # We're the child. Transfer control to command.
+ if close_stderr:
+ dev_null = os.open('/dev/null', 0)
+ os.dup2(dev_null, 2)
+ os.execvp(args[0], args)
+ else:
+ # Disable echoing.
+ attr = termios.tcgetattr(fd)
+ attr[3] = attr[3] & ~termios.ECHO
+ termios.tcsetattr(fd, termios.TCSANOW, attr)
+ # Set up a file()-like interface to the child process
+ self.r = os.fdopen(fd, "r", 1)
+ self.w = os.fdopen(os.dup(fd), "w", 1)
+
+ def convert(self, line):
+ self.w.write(line + "\n")
+ return self.readline()
+
+ def readline(self):
+ return self.r.readline().rstrip()
+
+
+class DarwinSymbolizer(Symbolizer):
+ def __init__(self, addr, binary):
+ super(DarwinSymbolizer, self).__init__()
+ self.binary = binary
+ self.arch = guess_arch(addr)
+ self.open_atos()
+
+ def open_atos(self):
+ if DEBUG:
+ print 'atos -o %s -arch %s' % (self.binary, self.arch)
+ cmdline = ['atos', '-o', self.binary, '-arch', self.arch]
+ self.atos = UnbufferedLineConverter(cmdline, close_stderr=True)
+
+ def symbolize(self, addr, binary, offset):
+ """Overrides Symbolizer.symbolize."""
+ if self.binary != binary:
+ return None
+ atos_line = self.atos.convert('0x%x' % int(offset, 16))
+ while "got symbolicator for" in atos_line:
+ atos_line = self.atos.readline()
+ # A well-formed atos response looks like this:
+ # foo(type1, type2) (in object.name) (filename.cc:80)
+ match = re.match('^(.*) \(in (.*)\) \((.*:\d*)\)$', atos_line)
+ if DEBUG:
+ print 'atos_line: ', atos_line
+ if match:
+ function_name = match.group(1)
+ function_name = re.sub('\(.*?\)', '', function_name)
+ file_name = fix_filename(match.group(3))
+ return ['%s in %s %s' % (addr, function_name, file_name)]
+ else:
+ return ['%s in %s' % (addr, atos_line)]
+
+
+# Chain several symbolizers so that if one symbolizer fails, we fall back
+# to the next symbolizer in chain.
+class ChainSymbolizer(Symbolizer):
+ def __init__(self, symbolizer_list):
+ super(ChainSymbolizer, self).__init__()
+ self.symbolizer_list = symbolizer_list
+
+ def symbolize(self, addr, binary, offset):
+ """Overrides Symbolizer.symbolize."""
+ for symbolizer in self.symbolizer_list:
+ if symbolizer:
+ result = symbolizer.symbolize(addr, binary, offset)
+ if result:
+ return result
+ return None
+
+ def append_symbolizer(self, symbolizer):
+ self.symbolizer_list.append(symbolizer)
+
+
+def BreakpadSymbolizerFactory(binary):
+ suffix = os.getenv('BREAKPAD_SUFFIX')
+ if suffix:
+ filename = binary + suffix
+ if os.access(filename, os.F_OK):
+ return BreakpadSymbolizer(filename)
+ return None
+
+
+def SystemSymbolizerFactory(system, addr, binary):
+ if system == 'Darwin':
+ return DarwinSymbolizer(addr, binary)
+ elif system == 'Linux':
+ return Addr2LineSymbolizer(binary)
+
+
+class BreakpadSymbolizer(Symbolizer):
+ def __init__(self, filename):
+ super(BreakpadSymbolizer, self).__init__()
+ self.filename = filename
+ lines = file(filename).readlines()
+ self.files = []
+ self.symbols = {}
+ self.address_list = []
+ self.addresses = {}
+ # MODULE mac x86_64 A7001116478B33F18FF9BEDE9F615F190 t
+ fragments = lines[0].rstrip().split()
+ self.arch = fragments[2]
+ self.debug_id = fragments[3]
+ self.binary = ' '.join(fragments[4:])
+ self.parse_lines(lines[1:])
+
+ def parse_lines(self, lines):
+ cur_function_addr = ''
+ for line in lines:
+ fragments = line.split()
+ if fragments[0] == 'FILE':
+ assert int(fragments[1]) == len(self.files)
+ self.files.append(' '.join(fragments[2:]))
+ elif fragments[0] == 'PUBLIC':
+ self.symbols[int(fragments[1], 16)] = ' '.join(fragments[3:])
+ elif fragments[0] in ['CFI', 'STACK']:
+ pass
+ elif fragments[0] == 'FUNC':
+ cur_function_addr = int(fragments[1], 16)
+ if not cur_function_addr in self.symbols.keys():
+ self.symbols[cur_function_addr] = ' '.join(fragments[4:])
+ else:
+ # Line starting with an address.
+ addr = int(fragments[0], 16)
+ self.address_list.append(addr)
+ # Tuple of symbol address, size, line, file number.
+ self.addresses[addr] = (cur_function_addr,
+ int(fragments[1], 16),
+ int(fragments[2]),
+ int(fragments[3]))
+ self.address_list.sort()
+
+ def get_sym_file_line(self, addr):
+ key = None
+ if addr in self.addresses.keys():
+ key = addr
+ else:
+ index = bisect.bisect_left(self.address_list, addr)
+ if index == 0:
+ return None
+ else:
+ key = self.address_list[index - 1]
+ sym_id, size, line_no, file_no = self.addresses[key]
+ symbol = self.symbols[sym_id]
+ filename = self.files[file_no]
+ if addr < key + size:
+ return symbol, filename, line_no
+ else:
+ return None
+
+ def symbolize(self, addr, binary, offset):
+ if self.binary != binary:
+ return None
+ res = self.get_sym_file_line(int(offset, 16))
+ if res:
+ function_name, file_name, line_no = res
+ result = ['%s in %s %s:%d' % (
+ addr, function_name, file_name, line_no)]
+ print result
+ return result
+ else:
+ return None
+
+
+class SymbolizationLoop(object):
+ def __init__(self, binary_name_filter=None):
+ # Used by clients who may want to supply a different binary name.
+ # E.g. in Chrome several binaries may share a single .dSYM.
+ self.binary_name_filter = binary_name_filter
+ self.system = os.uname()[0]
+ if self.system not in ['Linux', 'Darwin', 'FreeBSD']:
+ raise Exception('Unknown system')
+ self.llvm_symbolizer = None
+ self.frame_no = 0
+
+ def symbolize_address(self, addr, binary, offset):
+ # Initialize llvm-symbolizer lazily.
+ if not self.llvm_symbolizer:
+ self.llvm_symbolizer = LLVMSymbolizerFactory(self.system, addr)
+ # Use the chain of symbolizers:
+ # Breakpad symbolizer -> LLVM symbolizer -> addr2line/atos
+ # (fall back to next symbolizer if the previous one fails).
+ if not binary in symbolizers:
+ symbolizers[binary] = ChainSymbolizer(
+ [BreakpadSymbolizerFactory(binary), self.llvm_symbolizer])
+ result = symbolizers[binary].symbolize(addr, binary, offset)
+ if result is None:
+ # Initialize system symbolizer only if other symbolizers failed.
+ symbolizers[binary].append_symbolizer(
+ SystemSymbolizerFactory(self.system, addr, binary))
+ result = symbolizers[binary].symbolize(addr, binary, offset)
+ # The system symbolizer must produce some result.
+ assert result
+ return result
+
+ def get_symbolized_lines(self, symbolized_lines):
+ if not symbolized_lines:
+ return [self.current_line]
+ else:
+ result = []
+ for symbolized_frame in symbolized_lines:
+ result.append(' #%s %s' % (str(self.frame_no), symbolized_frame.rstrip()))
+ self.frame_no += 1
+ return result
+
+ def process_logfile(self):
+ self.frame_no = 0
+ while True:
+ line = logfile.readline()
+ if not line:
+ break
+ processed = self.process_line(line)
+ print '\n'.join(processed)
+
+ def process_line(self, line):
+ self.current_line = line.rstrip()
+ #0 0x7f6e35cf2e45 (/blah/foo.so+0x11fe45)
+ stack_trace_line_format = (
+ '^( *#([0-9]+) *)(0x[0-9a-f]+) *\((.*)\+(0x[0-9a-f]+)\)')
+ match = re.match(stack_trace_line_format, line)
+ if not match:
+ return [self.current_line]
+ if DEBUG:
+ print line
+ _, frameno_str, addr, binary, offset = match.groups()
+ if frameno_str == '0':
+ # Assume that frame #0 is the first frame of new stack trace.
+ self.frame_no = 0
+ original_binary = binary
+ if self.binary_name_filter:
+ binary = self.binary_name_filter(binary)
+ symbolized_line = self.symbolize_address(addr, binary, offset)
+ if not symbolized_line:
+ if original_binary != binary:
+ symbolized_line = self.symbolize_address(addr, binary, offset)
+ return self.get_symbolized_lines(symbolized_line)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
+ description='ASan symbolization script',
+ epilog='''Example of use:
+ asan_symbolize.py -c "$HOME/opt/cross/bin/arm-linux-gnueabi-" -s "$HOME/SymbolFiles" < asan.log''')
+ parser.add_argument('path_to_cut', nargs='*',
+ help='pattern to be cut from the result file path ')
+ parser.add_argument('-d','--demangle', action='store_true',
+ help='demangle function names')
+ parser.add_argument('-s', metavar='SYSROOT',
+ help='set path to sysroot for sanitized binaries')
+ parser.add_argument('-c', metavar='CROSS_COMPILE',
+ help='set prefix for binutils')
+ parser.add_argument('-l','--logfile', default=sys.stdin, type=argparse.FileType('r'),
+ help='set log file name to parse, default is stdin')
+ args = parser.parse_args()
+ if args.path_to_cut:
+ fix_filename_patterns = args.path_to_cut
+ if args.demangle:
+ demangle = True
+ if args.s:
+ binary_name_filter = sysroot_path_filter
+ sysroot_path = args.s
+ if args.c:
+ binutils_prefix = args.c
+ if args.logfile:
+ logfile = args.logfile
+ else:
+ logfile = sys.stdin
+ loop = SymbolizationLoop(binary_name_filter)
+ loop.process_logfile()
diff --git a/tools/valgrind/browser_wrapper_win.py b/tools/valgrind/browser_wrapper_win.py
new file mode 100644
index 0000000..ee0a961
--- /dev/null
+++ b/tools/valgrind/browser_wrapper_win.py
@@ -0,0 +1,49 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import os
+import re
+import sys
+import subprocess
+
+# TODO(timurrrr): we may use it on POSIX too to avoid code duplication once we
+# support layout_tests, remove Dr. Memory specific code and verify it works
+# on a "clean" Mac.
+
+testcase_name = None
+for arg in sys.argv:
+ m = re.match("\-\-gtest_filter=(.*)", arg)
+ if m:
+ assert testcase_name is None
+ testcase_name = m.groups()[0]
+
+# arg #0 is the path to this python script
+cmd_to_run = sys.argv[1:]
+
+# TODO(timurrrr): this is Dr. Memory-specific
+# Usually, we pass "-logdir" "foo\bar\spam path" args to Dr. Memory.
+# To group reports per UI test, we want to put the reports for each test into a
+# separate directory. This code can be simplified when we have
+# http://code.google.com/p/drmemory/issues/detail?id=684 fixed.
+logdir_idx = cmd_to_run.index("-logdir")
+old_logdir = cmd_to_run[logdir_idx + 1]
+
+wrapper_pid = str(os.getpid())
+
+# On Windows, there is a chance of PID collision. We avoid it by appending the
+# number of entries in the logdir at the end of wrapper_pid.
+# This number is monotonic and we can't have two simultaneously running wrappers
+# with the same PID.
+wrapper_pid += "_%d" % len(glob.glob(old_logdir + "\\*"))
+
+cmd_to_run[logdir_idx + 1] += "\\testcase.%s.logs" % wrapper_pid
+os.makedirs(cmd_to_run[logdir_idx + 1])
+
+if testcase_name:
+ f = open(old_logdir + "\\testcase.%s.name" % wrapper_pid, "w")
+ print >>f, testcase_name
+ f.close()
+
+exit(subprocess.call(cmd_to_run))
diff --git a/tools/valgrind/chrome_tests.bat b/tools/valgrind/chrome_tests.bat
new file mode 100755
index 0000000..138bec7
--- /dev/null
+++ b/tools/valgrind/chrome_tests.bat
@@ -0,0 +1,70 @@
+@echo off
+:: Copyright (c) 2011 The Chromium Authors. All rights reserved.
+:: Use of this source code is governed by a BSD-style license that can be
+:: found in the LICENSE file.
+
+:: TODO(timurrrr): batch files 'export' all the variables to the parent shell
+set THISDIR=%~dp0
+set TOOL_NAME="unknown"
+
+:: Get the tool name and put it into TOOL_NAME {{{1
+:: NB: SHIFT command doesn't modify %*
+:PARSE_ARGS_LOOP
+ if %1 == () GOTO:TOOLNAME_NOT_FOUND
+ if %1 == --tool GOTO:TOOLNAME_FOUND
+ SHIFT
+ goto :PARSE_ARGS_LOOP
+
+:TOOLNAME_NOT_FOUND
+echo "Please specify a tool (tsan or drmemory) by using --tool flag"
+exit /B 1
+
+:TOOLNAME_FOUND
+SHIFT
+set TOOL_NAME=%1
+:: }}}
+if "%TOOL_NAME%" == "drmemory" GOTO :SETUP_DRMEMORY
+if "%TOOL_NAME%" == "drmemory_light" GOTO :SETUP_DRMEMORY
+if "%TOOL_NAME%" == "drmemory_full" GOTO :SETUP_DRMEMORY
+if "%TOOL_NAME%" == "drmemory_pattern" GOTO :SETUP_DRMEMORY
+if "%TOOL_NAME%" == "tsan" GOTO :SETUP_TSAN
+echo "Unknown tool: `%TOOL_NAME%`! Only tsan and drmemory are supported right now"
+exit /B 1
+
+:SETUP_DRMEMORY
+if NOT "%DRMEMORY_COMMAND%"=="" GOTO :RUN_TESTS
+:: Set up DRMEMORY_COMMAND to invoke Dr. Memory {{{1
+set DRMEMORY_PATH=%THISDIR%..\..\third_party\drmemory
+set DRMEMORY_SFX=%DRMEMORY_PATH%\drmemory-windows-sfx.exe
+if EXIST %DRMEMORY_SFX% GOTO DRMEMORY_BINARY_OK
+echo "Can't find Dr. Memory executables."
+echo "See http://www.chromium.org/developers/how-tos/using-valgrind/dr-memory"
+echo "for the instructions on how to get them."
+exit /B 1
+
+:DRMEMORY_BINARY_OK
+%DRMEMORY_SFX% -o%DRMEMORY_PATH%\unpacked -y
+set DRMEMORY_COMMAND=%DRMEMORY_PATH%\unpacked\bin\drmemory.exe
+:: }}}
+goto :RUN_TESTS
+
+:SETUP_TSAN
+:: Set up PIN_COMMAND to invoke TSan {{{1
+set TSAN_PATH=%THISDIR%..\..\third_party\tsan
+set TSAN_SFX=%TSAN_PATH%\tsan-x86-windows-sfx.exe
+if EXIST %TSAN_SFX% GOTO TSAN_BINARY_OK
+echo "Can't find ThreadSanitizer executables."
+echo "See http://www.chromium.org/developers/how-tos/using-valgrind/threadsanitizer/threadsanitizer-on-windows"
+echo "for the instructions on how to get them."
+exit /B 1
+
+:TSAN_BINARY_OK
+%TSAN_SFX% -o%TSAN_PATH%\unpacked -y
+set PIN_COMMAND=%TSAN_PATH%\unpacked\tsan-x86-windows\tsan.bat
+:: }}}
+goto :RUN_TESTS
+
+:RUN_TESTS
+set PYTHONPATH=%THISDIR%../python/google
+set RUNNING_ON_VALGRIND=yes
+python %THISDIR%/chrome_tests.py %*
diff --git a/tools/valgrind/chrome_tests.py b/tools/valgrind/chrome_tests.py
new file mode 100755
index 0000000..d5f3d18
--- /dev/null
+++ b/tools/valgrind/chrome_tests.py
@@ -0,0 +1,807 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+''' Runs various chrome tests through valgrind_test.py.'''
+
+import glob
+import logging
+import multiprocessing
+import optparse
+import os
+import stat
+import subprocess
+import sys
+
+import logging_utils
+import path_utils
+
+import common
+import valgrind_test
+
+class TestNotFound(Exception): pass
+
+class MultipleGTestFiltersSpecified(Exception): pass
+
+class BuildDirNotFound(Exception): pass
+
+class BuildDirAmbiguous(Exception): pass
+
+class ExecutableNotFound(Exception): pass
+
+class BadBinary(Exception): pass
+
+class ChromeTests:
+ SLOW_TOOLS = ["memcheck", "tsan", "tsan_rv", "drmemory"]
+ LAYOUT_TESTS_DEFAULT_CHUNK_SIZE = 300
+
+ def __init__(self, options, args, test):
+ if ':' in test:
+ (self._test, self._gtest_filter) = test.split(':', 1)
+ else:
+ self._test = test
+ self._gtest_filter = options.gtest_filter
+
+ if self._test not in self._test_list:
+ raise TestNotFound("Unknown test: %s" % test)
+
+ if options.gtest_filter and options.gtest_filter != self._gtest_filter:
+ raise MultipleGTestFiltersSpecified("Can not specify both --gtest_filter "
+ "and --test %s" % test)
+
+ self._options = options
+ self._args = args
+
+ script_dir = path_utils.ScriptDir()
+ # Compute the top of the tree (the "source dir") from the script dir (where
+ # this script lives). We assume that the script dir is in tools/valgrind/
+ # relative to the top of the tree.
+ self._source_dir = os.path.dirname(os.path.dirname(script_dir))
+ # since this path is used for string matching, make sure it's always
+ # an absolute Unix-style path
+ self._source_dir = os.path.abspath(self._source_dir).replace('\\', '/')
+ valgrind_test_script = os.path.join(script_dir, "valgrind_test.py")
+ self._command_preamble = ["--source-dir=%s" % (self._source_dir)]
+
+ if not self._options.build_dir:
+ dirs = [
+ os.path.join(self._source_dir, "xcodebuild", "Debug"),
+ os.path.join(self._source_dir, "out", "Debug"),
+ os.path.join(self._source_dir, "build", "Debug"),
+ ]
+ build_dir = [d for d in dirs if os.path.isdir(d)]
+ if len(build_dir) > 1:
+ raise BuildDirAmbiguous("Found more than one suitable build dir:\n"
+ "%s\nPlease specify just one "
+ "using --build-dir" % ", ".join(build_dir))
+ elif build_dir:
+ self._options.build_dir = build_dir[0]
+ else:
+ self._options.build_dir = None
+
+ if self._options.build_dir:
+ build_dir = os.path.abspath(self._options.build_dir)
+ self._command_preamble += ["--build-dir=%s" % (self._options.build_dir)]
+
+ def _EnsureBuildDirFound(self):
+ if not self._options.build_dir:
+ raise BuildDirNotFound("Oops, couldn't find a build dir, please "
+ "specify it manually using --build-dir")
+
+ def _DefaultCommand(self, tool, exe=None, valgrind_test_args=None):
+ '''Generates the default command array that most tests will use.'''
+ if exe and common.IsWindows():
+ exe += '.exe'
+
+ cmd = list(self._command_preamble)
+
+ # Find all suppressions matching the following pattern:
+ # tools/valgrind/TOOL/suppressions[_PLATFORM].txt
+ # and list them with --suppressions= prefix.
+ script_dir = path_utils.ScriptDir()
+ tool_name = tool.ToolName();
+ suppression_file = os.path.join(script_dir, tool_name, "suppressions.txt")
+ if os.path.exists(suppression_file):
+ cmd.append("--suppressions=%s" % suppression_file)
+ # Platform-specific suppression
+ for platform in common.PlatformNames():
+ platform_suppression_file = \
+ os.path.join(script_dir, tool_name, 'suppressions_%s.txt' % platform)
+ if os.path.exists(platform_suppression_file):
+ cmd.append("--suppressions=%s" % platform_suppression_file)
+
+ if self._options.valgrind_tool_flags:
+ cmd += self._options.valgrind_tool_flags.split(" ")
+ if self._options.keep_logs:
+ cmd += ["--keep_logs"]
+ if valgrind_test_args != None:
+ for arg in valgrind_test_args:
+ cmd.append(arg)
+ if exe:
+ self._EnsureBuildDirFound()
+ exe_path = os.path.join(self._options.build_dir, exe)
+ if not os.path.exists(exe_path):
+ raise ExecutableNotFound("Couldn't find '%s'" % exe_path)
+
+ # Make sure we don't try to test ASan-built binaries
+ # with other dynamic instrumentation-based tools.
+ # TODO(timurrrr): also check TSan and MSan?
+ # `nm` might not be available, so use try-except.
+ try:
+ nm_output = subprocess.check_output(["nm", exe_path])
+ if nm_output.find("__asan_init") != -1:
+ raise BadBinary("You're trying to run an executable instrumented "
+ "with AddressSanitizer under %s. Please provide "
+ "an uninstrumented executable." % tool_name)
+ except OSError:
+ pass
+
+ cmd.append(exe_path)
+ # Valgrind runs tests slowly, so slow tests hurt more; show elapased time
+ # so we can find the slowpokes.
+ cmd.append("--gtest_print_time")
+ # Built-in test launcher for gtest-based executables runs tests using
+ # multiple process by default. Force the single-process mode back.
+ cmd.append("--single-process-tests")
+ if self._options.gtest_repeat:
+ cmd.append("--gtest_repeat=%s" % self._options.gtest_repeat)
+ if self._options.gtest_shuffle:
+ cmd.append("--gtest_shuffle")
+ if self._options.brave_new_test_launcher:
+ cmd.append("--brave-new-test-launcher")
+ if self._options.test_launcher_bot_mode:
+ cmd.append("--test-launcher-bot-mode")
+ return cmd
+
+ def Run(self):
+ ''' Runs the test specified by command-line argument --test '''
+ logging.info("running test %s" % (self._test))
+ return self._test_list[self._test](self)
+
+ def _AppendGtestFilter(self, tool, name, cmd):
+ '''Append an appropriate --gtest_filter flag to the googletest binary
+ invocation.
+ If the user passed his own filter mentioning only one test, just use it.
+ Othewise, filter out tests listed in the appropriate gtest_exclude files.
+ '''
+ if (self._gtest_filter and
+ ":" not in self._gtest_filter and
+ "?" not in self._gtest_filter and
+ "*" not in self._gtest_filter):
+ cmd.append("--gtest_filter=%s" % self._gtest_filter)
+ return
+
+ filters = []
+ gtest_files_dir = os.path.join(path_utils.ScriptDir(), "gtest_exclude")
+
+ gtest_filter_files = [
+ os.path.join(gtest_files_dir, name + ".gtest-%s.txt" % tool.ToolName())]
+ # Use ".gtest.txt" files only for slow tools, as they now contain
+ # Valgrind- and Dr.Memory-specific filters.
+ # TODO(glider): rename the files to ".gtest_slow.txt"
+ if tool.ToolName() in ChromeTests.SLOW_TOOLS:
+ gtest_filter_files += [os.path.join(gtest_files_dir, name + ".gtest.txt")]
+ for platform_suffix in common.PlatformNames():
+ gtest_filter_files += [
+ os.path.join(gtest_files_dir, name + ".gtest_%s.txt" % platform_suffix),
+ os.path.join(gtest_files_dir, name + ".gtest-%s_%s.txt" % \
+ (tool.ToolName(), platform_suffix))]
+ logging.info("Reading gtest exclude filter files:")
+ for filename in gtest_filter_files:
+ # strip the leading absolute path (may be very long on the bot)
+ # and the following / or \.
+ readable_filename = filename.replace("\\", "/") # '\' on Windows
+ readable_filename = readable_filename.replace(self._source_dir, "")[1:]
+ if not os.path.exists(filename):
+ logging.info(" \"%s\" - not found" % readable_filename)
+ continue
+ logging.info(" \"%s\" - OK" % readable_filename)
+ f = open(filename, 'r')
+ for line in f.readlines():
+ if line.startswith("#") or line.startswith("//") or line.isspace():
+ continue
+ line = line.rstrip()
+ test_prefixes = ["FLAKY", "FAILS"]
+ for p in test_prefixes:
+ # Strip prefixes from the test names.
+ line = line.replace(".%s_" % p, ".")
+ # Exclude the original test name.
+ filters.append(line)
+ if line[-2:] != ".*":
+ # List all possible prefixes if line doesn't end with ".*".
+ for p in test_prefixes:
+ filters.append(line.replace(".", ".%s_" % p))
+ # Get rid of duplicates.
+ filters = set(filters)
+ gtest_filter = self._gtest_filter
+ if len(filters):
+ if gtest_filter:
+ gtest_filter += ":"
+ if gtest_filter.find("-") < 0:
+ gtest_filter += "-"
+ else:
+ gtest_filter = "-"
+ gtest_filter += ":".join(filters)
+ if gtest_filter:
+ cmd.append("--gtest_filter=%s" % gtest_filter)
+
+ @staticmethod
+ def ShowTests():
+ test_to_names = {}
+ for name, test_function in ChromeTests._test_list.iteritems():
+ test_to_names.setdefault(test_function, []).append(name)
+
+ name_to_aliases = {}
+ for names in test_to_names.itervalues():
+ names.sort(key=lambda name: len(name))
+ name_to_aliases[names[0]] = names[1:]
+
+ print
+ print "Available tests:"
+ print "----------------"
+ for name, aliases in sorted(name_to_aliases.iteritems()):
+ if aliases:
+ print " {} (aka {})".format(name, ', '.join(aliases))
+ else:
+ print " {}".format(name)
+
+ def SetupLdPath(self, requires_build_dir):
+ if requires_build_dir:
+ self._EnsureBuildDirFound()
+ elif not self._options.build_dir:
+ return
+
+ # Append build_dir to LD_LIBRARY_PATH so external libraries can be loaded.
+ if (os.getenv("LD_LIBRARY_PATH")):
+ os.putenv("LD_LIBRARY_PATH", "%s:%s" % (os.getenv("LD_LIBRARY_PATH"),
+ self._options.build_dir))
+ else:
+ os.putenv("LD_LIBRARY_PATH", self._options.build_dir)
+
+ def SimpleTest(self, module, name, valgrind_test_args=None, cmd_args=None):
+ tool = valgrind_test.CreateTool(self._options.valgrind_tool)
+ cmd = self._DefaultCommand(tool, name, valgrind_test_args)
+ self._AppendGtestFilter(tool, name, cmd)
+ cmd.extend(['--test-tiny-timeout=1000'])
+ if cmd_args:
+ cmd.extend(cmd_args)
+
+ self.SetupLdPath(True)
+ return tool.Run(cmd, module)
+
+ def RunCmdLine(self):
+ tool = valgrind_test.CreateTool(self._options.valgrind_tool)
+ cmd = self._DefaultCommand(tool, None, self._args)
+ self.SetupLdPath(False)
+ return tool.Run(cmd, None)
+
+ def TestAccessibility(self):
+ return self.SimpleTest("accessibility", "accessibility_unittests")
+
+ def TestAddressInput(self):
+ return self.SimpleTest("addressinput", "libaddressinput_unittests")
+
+ def TestAngle(self):
+ return self.SimpleTest("angle", "angle_unittests")
+
+ def TestAppList(self):
+ return self.SimpleTest("app_list", "app_list_unittests")
+
+ def TestAsh(self):
+ return self.SimpleTest("ash", "ash_unittests")
+
+ def TestAshShell(self):
+ return self.SimpleTest("ash_shelf", "ash_shell_unittests")
+
+ def TestAura(self):
+ return self.SimpleTest("aura", "aura_unittests")
+
+ def TestBase(self):
+ return self.SimpleTest("base", "base_unittests")
+
+ def TestBlinkHeap(self):
+ return self.SimpleTest("blink_heap", "blink_heap_unittests")
+
+ def TestBlinkPlatform(self):
+ return self.SimpleTest("blink_platform", "blink_platform_unittests")
+
+ def TestCacheInvalidation(self):
+ return self.SimpleTest("cacheinvalidation", "cacheinvalidation_unittests")
+
+ def TestCast(self):
+ return self.SimpleTest("chrome", "cast_unittests")
+
+ def TestCC(self):
+ return self.SimpleTest("cc", "cc_unittests")
+
+ def TestChromeApp(self):
+ return self.SimpleTest("chrome_app", "chrome_app_unittests")
+
+ def TestChromeElf(self):
+ return self.SimpleTest("chrome_elf", "chrome_elf_unittests")
+
+ def TestChromeDriver(self):
+ return self.SimpleTest("chromedriver", "chromedriver_unittests")
+
+ def TestChromeOS(self):
+ return self.SimpleTest("chromeos", "chromeos_unittests")
+
+ def TestCloudPrint(self):
+ return self.SimpleTest("cloud_print", "cloud_print_unittests")
+
+ def TestComponents(self):
+ return self.SimpleTest("components", "components_unittests")
+
+ def TestCompositor(self):
+ return self.SimpleTest("compositor", "compositor_unittests")
+
+ def TestContent(self):
+ return self.SimpleTest("content", "content_unittests")
+
+ def TestCourgette(self):
+ return self.SimpleTest("courgette", "courgette_unittests")
+
+ def TestCrypto(self):
+ return self.SimpleTest("crypto", "crypto_unittests")
+
+ def TestDevice(self):
+ return self.SimpleTest("device", "device_unittests")
+
+ def TestDisplay(self):
+ return self.SimpleTest("display", "display_unittests")
+
+ def TestEvents(self):
+ return self.SimpleTest("events", "events_unittests")
+
+ def TestExtensions(self):
+ return self.SimpleTest("extensions", "extensions_unittests")
+
+ def TestFFmpeg(self):
+ return self.SimpleTest("chrome", "ffmpeg_unittests")
+
+ def TestFFmpegRegressions(self):
+ return self.SimpleTest("chrome", "ffmpeg_regression_tests")
+
+ def TestGCM(self):
+ return self.SimpleTest("gcm", "gcm_unit_tests")
+
+ def TestGfx(self):
+ return self.SimpleTest("gfx", "gfx_unittests")
+
+ def TestGin(self):
+ return self.SimpleTest("gin", "gin_unittests")
+
+ def TestGoogleApis(self):
+ return self.SimpleTest("google_apis", "google_apis_unittests")
+
+ def TestGPU(self):
+ return self.SimpleTest("gpu", "gpu_unittests")
+
+ def TestIpc(self):
+ return self.SimpleTest("ipc", "ipc_tests",
+ valgrind_test_args=["--trace_children"])
+
+ def TestInstallerUtil(self):
+ return self.SimpleTest("installer_util", "installer_util_unittests")
+
+ def TestJingle(self):
+ return self.SimpleTest("chrome", "jingle_unittests")
+
+ def TestKeyboard(self):
+ return self.SimpleTest("keyboard", "keyboard_unittests")
+
+ def TestMedia(self):
+ return self.SimpleTest("chrome", "media_unittests")
+
+ def TestMessageCenter(self):
+ return self.SimpleTest("message_center", "message_center_unittests")
+
+ def TestMojoAppsJS(self):
+ return self.SimpleTest("mojo_apps_js", "mojo_apps_js_unittests")
+
+ def TestMojoCommon(self):
+ return self.SimpleTest("mojo_common", "mojo_common_unittests")
+
+ def TestMojoJS(self):
+ return self.SimpleTest("mojo_js", "mojo_js_unittests")
+
+ def TestMojoPublicBindings(self):
+ return self.SimpleTest("mojo_public_bindings",
+ "mojo_public_bindings_unittests")
+
+ def TestMojoPublicEnv(self):
+ return self.SimpleTest("mojo_public_env",
+ "mojo_public_environment_unittests")
+
+ def TestMojoPublicSystem(self):
+ return self.SimpleTest("mojo_public_system",
+ "mojo_public_system_unittests")
+
+ def TestMojoPublicSysPerf(self):
+ return self.SimpleTest("mojo_public_sysperf",
+ "mojo_public_system_perftests")
+
+ def TestMojoPublicUtility(self):
+ return self.SimpleTest("mojo_public_utility",
+ "mojo_public_utility_unittests")
+
+ def TestMojoApplicationManager(self):
+ return self.SimpleTest("mojo_application_manager",
+ "mojo_application_manager_unittests")
+
+ def TestMojoSystem(self):
+ return self.SimpleTest("mojo_system", "mojo_system_unittests")
+
+ def TestMojoViewManager(self):
+ return self.SimpleTest("mojo_view_manager", "mojo_view_manager_unittests")
+
+ def TestNet(self):
+ return self.SimpleTest("net", "net_unittests")
+
+ def TestNetPerf(self):
+ return self.SimpleTest("net", "net_perftests")
+
+ def TestPhoneNumber(self):
+ return self.SimpleTest("phonenumber", "libphonenumber_unittests")
+
+ def TestPPAPI(self):
+ return self.SimpleTest("chrome", "ppapi_unittests")
+
+ def TestPrinting(self):
+ return self.SimpleTest("chrome", "printing_unittests")
+
+ def TestRemoting(self):
+ return self.SimpleTest("chrome", "remoting_unittests",
+ cmd_args=[
+ "--ui-test-action-timeout=60000",
+ "--ui-test-action-max-timeout=150000"])
+
+ def TestSql(self):
+ return self.SimpleTest("chrome", "sql_unittests")
+
+ def TestSync(self):
+ return self.SimpleTest("chrome", "sync_unit_tests")
+
+ def TestLinuxSandbox(self):
+ return self.SimpleTest("sandbox", "sandbox_linux_unittests")
+
+ def TestUnit(self):
+ # http://crbug.com/51716
+ # Disabling all unit tests
+ # Problems reappeared after r119922
+ if common.IsMac() and (self._options.valgrind_tool == "memcheck"):
+ logging.warning("unit_tests are disabled for memcheck on MacOS.")
+ return 0;
+ return self.SimpleTest("chrome", "unit_tests")
+
+ def TestUIUnit(self):
+ return self.SimpleTest("chrome", "ui_unittests")
+
+ def TestURL(self):
+ return self.SimpleTest("chrome", "url_unittests")
+
+ def TestViews(self):
+ return self.SimpleTest("views", "views_unittests")
+
+
+ # Valgrind timeouts are in seconds.
+ UI_VALGRIND_ARGS = ["--timeout=14400", "--trace_children", "--indirect"]
+ # UI test timeouts are in milliseconds.
+ UI_TEST_ARGS = ["--ui-test-action-timeout=60000",
+ "--ui-test-action-max-timeout=150000",
+ "--no-sandbox"]
+
+ # TODO(thestig) fine-tune these values.
+ # Valgrind timeouts are in seconds.
+ BROWSER_VALGRIND_ARGS = ["--timeout=50000", "--trace_children", "--indirect"]
+ # Browser test timeouts are in milliseconds.
+ BROWSER_TEST_ARGS = ["--ui-test-action-timeout=400000",
+ "--ui-test-action-max-timeout=800000",
+ "--no-sandbox"]
+
+ def TestBrowser(self):
+ return self.SimpleTest("chrome", "browser_tests",
+ valgrind_test_args=self.BROWSER_VALGRIND_ARGS,
+ cmd_args=self.BROWSER_TEST_ARGS)
+
+ def TestContentBrowser(self):
+ return self.SimpleTest("content", "content_browsertests",
+ valgrind_test_args=self.BROWSER_VALGRIND_ARGS,
+ cmd_args=self.BROWSER_TEST_ARGS)
+
+ def TestInteractiveUI(self):
+ return self.SimpleTest("chrome", "interactive_ui_tests",
+ valgrind_test_args=self.UI_VALGRIND_ARGS,
+ cmd_args=self.UI_TEST_ARGS)
+
+ def TestSafeBrowsing(self):
+ return self.SimpleTest("chrome", "safe_browsing_tests",
+ valgrind_test_args=self.UI_VALGRIND_ARGS,
+ cmd_args=(["--ui-test-action-max-timeout=450000"]))
+
+ def TestSyncIntegration(self):
+ return self.SimpleTest("chrome", "sync_integration_tests",
+ valgrind_test_args=self.UI_VALGRIND_ARGS,
+ cmd_args=(["--ui-test-action-max-timeout=450000"]))
+
+ def TestLayoutChunk(self, chunk_num, chunk_size):
+ # Run tests [chunk_num*chunk_size .. (chunk_num+1)*chunk_size) from the
+ # list of tests. Wrap around to beginning of list at end.
+ # If chunk_size is zero, run all tests in the list once.
+ # If a text file is given as argument, it is used as the list of tests.
+ assert((chunk_size == 0) != (len(self._args) == 0))
+ # Build the ginormous commandline in 'cmd'.
+ # It's going to be roughly
+ # python valgrind_test.py ...
+ # but we'll use the --indirect flag to valgrind_test.py
+ # to avoid valgrinding python.
+ # Start by building the valgrind_test.py commandline.
+ tool = valgrind_test.CreateTool(self._options.valgrind_tool)
+ cmd = self._DefaultCommand(tool)
+ cmd.append("--trace_children")
+ cmd.append("--indirect_webkit_layout")
+ cmd.append("--ignore_exit_code")
+ # Now build script_cmd, the run-webkits-tests commandline.
+ # Store each chunk in its own directory so that we can find the data later
+ chunk_dir = os.path.join("layout", "chunk_%05d" % chunk_num)
+ out_dir = os.path.join(path_utils.ScriptDir(), "latest")
+ out_dir = os.path.join(out_dir, chunk_dir)
+ if os.path.exists(out_dir):
+ old_files = glob.glob(os.path.join(out_dir, "*.txt"))
+ for f in old_files:
+ os.remove(f)
+ else:
+ os.makedirs(out_dir)
+ script = os.path.join(self._source_dir, "third_party", "WebKit", "Tools",
+ "Scripts", "run-webkit-tests")
+ # http://crbug.com/260627: After the switch to content_shell from DRT, each
+ # test now brings up 3 processes. Under Valgrind, they become memory bound
+ # and can eventually OOM if we don't reduce the total count.
+ # It'd be nice if content_shell automatically throttled the startup of new
+ # tests if we're low on memory.
+ jobs = max(1, int(multiprocessing.cpu_count() * 0.3))
+ script_cmd = ["python", script, "-v",
+ # run a separate DumpRenderTree for each test
+ "--batch-size=1",
+ "--fully-parallel",
+ "--child-processes=%d" % jobs,
+ "--time-out-ms=800000",
+ "--no-retry-failures", # retrying takes too much time
+ # http://crbug.com/176908: Don't launch a browser when done.
+ "--no-show-results",
+ "--nocheck-sys-deps"]
+ # Pass build mode to run-webkit-tests. We aren't passed it directly,
+ # so parse it out of build_dir. run-webkit-tests can only handle
+ # the two values "Release" and "Debug".
+ # TODO(Hercules): unify how all our scripts pass around build mode
+ # (--mode / --target / --build-dir / --debug)
+ if self._options.build_dir:
+ build_root, mode = os.path.split(self._options.build_dir)
+ script_cmd.extend(["--build-directory", build_root, "--target", mode])
+ if (chunk_size > 0):
+ script_cmd.append("--run-chunk=%d:%d" % (chunk_num, chunk_size))
+ if len(self._args):
+ # if the arg is a txt file, then treat it as a list of tests
+ if os.path.isfile(self._args[0]) and self._args[0][-4:] == ".txt":
+ script_cmd.append("--test-list=%s" % self._args[0])
+ else:
+ script_cmd.extend(self._args)
+ self._AppendGtestFilter(tool, "layout", script_cmd)
+ # Now run script_cmd with the wrapper in cmd
+ cmd.extend(["--"])
+ cmd.extend(script_cmd)
+
+ # Layout tests often times fail quickly, but the buildbot remains green.
+ # Detect this situation when running with the default chunk size.
+ if chunk_size == self.LAYOUT_TESTS_DEFAULT_CHUNK_SIZE:
+ min_runtime_in_seconds=120
+ else:
+ min_runtime_in_seconds=0
+ ret = tool.Run(cmd, "layout", min_runtime_in_seconds=min_runtime_in_seconds)
+ return ret
+
+
+ def TestLayout(self):
+ # A "chunk file" is maintained in the local directory so that each test
+ # runs a slice of the layout tests of size chunk_size that increments with
+ # each run. Since tests can be added and removed from the layout tests at
+ # any time, this is not going to give exact coverage, but it will allow us
+ # to continuously run small slices of the layout tests under valgrind rather
+ # than having to run all of them in one shot.
+ chunk_size = self._options.num_tests
+ if chunk_size == 0 or len(self._args):
+ return self.TestLayoutChunk(0, 0)
+ chunk_num = 0
+ chunk_file = os.path.join("valgrind_layout_chunk.txt")
+ logging.info("Reading state from " + chunk_file)
+ try:
+ f = open(chunk_file)
+ if f:
+ chunk_str = f.read()
+ if len(chunk_str):
+ chunk_num = int(chunk_str)
+ # This should be enough so that we have a couple of complete runs
+ # of test data stored in the archive (although note that when we loop
+ # that we almost guaranteed won't be at the end of the test list)
+ if chunk_num > 10000:
+ chunk_num = 0
+ f.close()
+ except IOError, (errno, strerror):
+ logging.error("error reading from file %s (%d, %s)" % (chunk_file,
+ errno, strerror))
+ # Save the new chunk size before running the tests. Otherwise if a
+ # particular chunk hangs the bot, the chunk number will never get
+ # incremented and the bot will be wedged.
+ logging.info("Saving state to " + chunk_file)
+ try:
+ f = open(chunk_file, "w")
+ chunk_num += 1
+ f.write("%d" % chunk_num)
+ f.close()
+ except IOError, (errno, strerror):
+ logging.error("error writing to file %s (%d, %s)" % (chunk_file, errno,
+ strerror))
+ # Since we're running small chunks of the layout tests, it's important to
+ # mark the ones that have errors in them. These won't be visible in the
+ # summary list for long, but will be useful for someone reviewing this bot.
+ return self.TestLayoutChunk(chunk_num, chunk_size)
+
+ # The known list of tests.
+ # Recognise the original abbreviations as well as full executable names.
+ _test_list = {
+ "cmdline" : RunCmdLine,
+ "addressinput": TestAddressInput,
+ "libaddressinput_unittests": TestAddressInput,
+ "accessibility": TestAccessibility,
+ "angle": TestAngle, "angle_unittests": TestAngle,
+ "app_list": TestAppList, "app_list_unittests": TestAppList,
+ "ash": TestAsh, "ash_unittests": TestAsh,
+ "ash_shell": TestAshShell, "ash_shell_unittests": TestAshShell,
+ "aura": TestAura, "aura_unittests": TestAura,
+ "base": TestBase, "base_unittests": TestBase,
+ "blink_heap": TestBlinkHeap,
+ "blink_platform": TestBlinkPlatform,
+ "browser": TestBrowser, "browser_tests": TestBrowser,
+ "cacheinvalidation": TestCacheInvalidation,
+ "cacheinvalidation_unittests": TestCacheInvalidation,
+ "cast": TestCast, "cast_unittests": TestCast,
+ "cc": TestCC, "cc_unittests": TestCC,
+ "chrome_app": TestChromeApp,
+ "chrome_elf": TestChromeElf,
+ "chromedriver": TestChromeDriver,
+ "chromeos": TestChromeOS, "chromeos_unittests": TestChromeOS,
+ "cloud_print": TestCloudPrint,
+ "cloud_print_unittests": TestCloudPrint,
+ "components": TestComponents,"components_unittests": TestComponents,
+ "compositor": TestCompositor,"compositor_unittests": TestCompositor,
+ "content": TestContent, "content_unittests": TestContent,
+ "content_browsertests": TestContentBrowser,
+ "courgette": TestCourgette, "courgette_unittests": TestCourgette,
+ "crypto": TestCrypto, "crypto_unittests": TestCrypto,
+ "device": TestDevice, "device_unittests": TestDevice,
+ "display": TestDisplay, "display_unittests": TestDisplay,
+ "events": TestEvents, "events_unittests": TestEvents,
+ "extensions": TestExtensions, "extensions_unittests": TestExtensions,
+ "ffmpeg": TestFFmpeg, "ffmpeg_unittests": TestFFmpeg,
+ "ffmpeg_regression_tests": TestFFmpegRegressions,
+ "gcm": TestGCM, "gcm_unit_tests": TestGCM,
+ "gin": TestGin, "gin_unittests": TestGin,
+ "gfx": TestGfx, "gfx_unittests": TestGfx,
+ "google_apis": TestGoogleApis,
+ "gpu": TestGPU, "gpu_unittests": TestGPU,
+ "ipc": TestIpc, "ipc_tests": TestIpc,
+ "installer_util": TestInstallerUtil,
+ "interactive_ui": TestInteractiveUI,
+ "jingle": TestJingle, "jingle_unittests": TestJingle,
+ "keyboard": TestKeyboard, "keyboard_unittests": TestKeyboard,
+ "layout": TestLayout, "layout_tests": TestLayout,
+ "media": TestMedia, "media_unittests": TestMedia,
+ "message_center": TestMessageCenter,
+ "message_center_unittests" : TestMessageCenter,
+ "mojo_apps_js": TestMojoAppsJS,
+ "mojo_common": TestMojoCommon,
+ "mojo_js": TestMojoJS,
+ "mojo_system": TestMojoSystem,
+ "mojo_public_system": TestMojoPublicSystem,
+ "mojo_public_utility": TestMojoPublicUtility,
+ "mojo_public_bindings": TestMojoPublicBindings,
+ "mojo_public_env": TestMojoPublicEnv,
+ "mojo_public_sysperf": TestMojoPublicSysPerf,
+ "mojo_application_manager": TestMojoApplicationManager,
+ "mojo_view_manager": TestMojoViewManager,
+ "net": TestNet, "net_unittests": TestNet,
+ "net_perf": TestNetPerf, "net_perftests": TestNetPerf,
+ "phonenumber": TestPhoneNumber,
+ "libphonenumber_unittests": TestPhoneNumber,
+ "ppapi": TestPPAPI, "ppapi_unittests": TestPPAPI,
+ "printing": TestPrinting, "printing_unittests": TestPrinting,
+ "remoting": TestRemoting, "remoting_unittests": TestRemoting,
+ "safe_browsing": TestSafeBrowsing, "safe_browsing_tests": TestSafeBrowsing,
+ "sandbox": TestLinuxSandbox, "sandbox_linux_unittests": TestLinuxSandbox,
+ "sql": TestSql, "sql_unittests": TestSql,
+ "sync": TestSync, "sync_unit_tests": TestSync,
+ "sync_integration_tests": TestSyncIntegration,
+ "sync_integration": TestSyncIntegration,
+ "ui_unit": TestUIUnit, "ui_unittests": TestUIUnit,
+ "unit": TestUnit, "unit_tests": TestUnit,
+ "url": TestURL, "url_unittests": TestURL,
+ "views": TestViews, "views_unittests": TestViews,
+ "webkit": TestLayout,
+ }
+
+
+def _main():
+ parser = optparse.OptionParser("usage: %prog -b <dir> -t <test> "
+ "[-t <test> ...]")
+
+ parser.add_option("--help-tests", dest="help_tests", action="store_true",
+ default=False, help="List all available tests")
+ parser.add_option("-b", "--build-dir",
+ help="the location of the compiler output")
+ parser.add_option("--target", help="Debug or Release")
+ parser.add_option("-t", "--test", action="append", default=[],
+ help="which test to run, supports test:gtest_filter format "
+ "as well.")
+ parser.add_option("--baseline", action="store_true", default=False,
+ help="generate baseline data instead of validating")
+ parser.add_option("--gtest_filter",
+ help="additional arguments to --gtest_filter")
+ parser.add_option("--gtest_repeat", help="argument for --gtest_repeat")
+ parser.add_option("--gtest_shuffle", action="store_true", default=False,
+ help="Randomize tests' orders on every iteration.")
+ parser.add_option("-v", "--verbose", action="store_true", default=False,
+ help="verbose output - enable debug log messages")
+ parser.add_option("--tool", dest="valgrind_tool", default="memcheck",
+ help="specify a valgrind tool to run the tests under")
+ parser.add_option("--tool_flags", dest="valgrind_tool_flags", default="",
+ help="specify custom flags for the selected valgrind tool")
+ parser.add_option("--keep_logs", action="store_true", default=False,
+ help="store memory tool logs in the <tool>.logs directory "
+ "instead of /tmp.\nThis can be useful for tool "
+ "developers/maintainers.\nPlease note that the <tool>"
+ ".logs directory will be clobbered on tool startup.")
+ parser.add_option("-n", "--num_tests", type="int",
+ default=ChromeTests.LAYOUT_TESTS_DEFAULT_CHUNK_SIZE,
+ help="for layout tests: # of subtests per run. 0 for all.")
+ # TODO(thestig) Remove this if we can.
+ parser.add_option("--gtest_color", dest="gtest_color", default="no",
+ help="dummy compatibility flag for sharding_supervisor.")
+ parser.add_option("--brave-new-test-launcher", action="store_true",
+ help="run the tests with --brave-new-test-launcher")
+ parser.add_option("--test-launcher-bot-mode", action="store_true",
+ help="run the tests with --test-launcher-bot-mode")
+
+ options, args = parser.parse_args()
+
+ # Bake target into build_dir.
+ if options.target and options.build_dir:
+ assert (options.target !=
+ os.path.basename(os.path.dirname(options.build_dir)))
+ options.build_dir = os.path.join(os.path.abspath(options.build_dir),
+ options.target)
+
+ if options.verbose:
+ logging_utils.config_root(logging.DEBUG)
+ else:
+ logging_utils.config_root()
+
+ if options.help_tests:
+ ChromeTests.ShowTests()
+ return 0
+
+ if not options.test:
+ parser.error("--test not specified")
+
+ if len(options.test) != 1 and options.gtest_filter:
+ parser.error("--gtest_filter and multiple tests don't make sense together")
+
+ for t in options.test:
+ tests = ChromeTests(options, args, t)
+ ret = tests.Run()
+ if ret: return ret
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(_main())
diff --git a/tools/valgrind/chrome_tests.sh b/tools/valgrind/chrome_tests.sh
new file mode 100755
index 0000000..df5e8e7
--- /dev/null
+++ b/tools/valgrind/chrome_tests.sh
@@ -0,0 +1,122 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Set up some paths and re-direct the arguments to chrome_tests.py
+
+export THISDIR=`dirname $0`
+ARGV_COPY="$@"
+
+# We need to set CHROME_VALGRIND iff using Memcheck or TSan-Valgrind:
+# tools/valgrind/chrome_tests.sh --tool memcheck
+# or
+# tools/valgrind/chrome_tests.sh --tool=memcheck
+# (same for "--tool=tsan")
+tool="memcheck" # Default to memcheck.
+while (( "$#" ))
+do
+ if [[ "$1" == "--tool" ]]
+ then
+ tool="$2"
+ shift
+ elif [[ "$1" =~ --tool=(.*) ]]
+ then
+ tool="${BASH_REMATCH[1]}"
+ fi
+ shift
+done
+
+NEEDS_VALGRIND=0
+NEEDS_DRMEMORY=0
+
+case "$tool" in
+ "memcheck")
+ NEEDS_VALGRIND=1
+ ;;
+ "tsan" | "tsan_rv")
+ if [ "`uname -s`" == CYGWIN* ]
+ then
+ NEEDS_PIN=1
+ else
+ NEEDS_VALGRIND=1
+ fi
+ ;;
+ "drmemory" | "drmemory_light" | "drmemory_full" | "drmemory_pattern")
+ NEEDS_DRMEMORY=1
+ ;;
+esac
+
+if [ "$NEEDS_VALGRIND" == "1" ]
+then
+ export CHROME_VALGRIND=`sh $THISDIR/locate_valgrind.sh`
+ if [ "$CHROME_VALGRIND" = "" ]
+ then
+ # locate_valgrind.sh failed
+ exit 1
+ fi
+ echo "Using valgrind binaries from ${CHROME_VALGRIND}"
+
+ PATH="${CHROME_VALGRIND}/bin:$PATH"
+ # We need to set these variables to override default lib paths hard-coded into
+ # Valgrind binary.
+ export VALGRIND_LIB="$CHROME_VALGRIND/lib/valgrind"
+ export VALGRIND_LIB_INNER="$CHROME_VALGRIND/lib/valgrind"
+
+ # Clean up some /tmp directories that might be stale due to interrupted
+ # chrome_tests.py execution.
+ # FYI:
+ # -mtime +1 <- only print files modified more than 24h ago,
+ # -print0/-0 are needed to handle possible newlines in the filenames.
+ echo "Cleanup /tmp from Valgrind stuff"
+ find /tmp -maxdepth 1 \(\
+ -name "vgdb-pipe-*" -or -name "vg_logs_*" -or -name "valgrind.*" \
+ \) -mtime +1 -print0 | xargs -0 rm -rf
+fi
+
+if [ "$NEEDS_DRMEMORY" == "1" ]
+then
+ if [ -z "$DRMEMORY_COMMAND" ]
+ then
+ DRMEMORY_PATH="$THISDIR/../../third_party/drmemory"
+ DRMEMORY_SFX="$DRMEMORY_PATH/drmemory-windows-sfx.exe"
+ if [ ! -f "$DRMEMORY_SFX" ]
+ then
+ echo "Can't find Dr. Memory executables."
+ echo "See http://www.chromium.org/developers/how-tos/using-valgrind/dr-memory"
+ echo "for the instructions on how to get them."
+ exit 1
+ fi
+
+ chmod +x "$DRMEMORY_SFX" # Cygwin won't run it without +x.
+ "$DRMEMORY_SFX" -o"$DRMEMORY_PATH/unpacked" -y
+ export DRMEMORY_COMMAND="$DRMEMORY_PATH/unpacked/bin/drmemory.exe"
+ fi
+fi
+
+if [ "$NEEDS_PIN" == "1" ]
+then
+ if [ -z "$PIN_COMMAND" ]
+ then
+ # Set up PIN_COMMAND to invoke TSan.
+ TSAN_PATH="$THISDIR/../../third_party/tsan"
+ TSAN_SFX="$TSAN_PATH/tsan-x86-windows-sfx.exe"
+ echo "$TSAN_SFX"
+ if [ ! -f $TSAN_SFX ]
+ then
+ echo "Can't find ThreadSanitizer executables."
+ echo "See http://www.chromium.org/developers/how-tos/using-valgrind/threadsanitizer/threadsanitizer-on-windows"
+ echo "for the instructions on how to get them."
+ exit 1
+ fi
+
+ chmod +x "$TSAN_SFX" # Cygwin won't run it without +x.
+ "$TSAN_SFX" -o"$TSAN_PATH"/unpacked -y
+ export PIN_COMMAND="$TSAN_PATH/unpacked/tsan-x86-windows/tsan.bat"
+ fi
+fi
+
+
+PYTHONPATH=$THISDIR/../python/google python \
+ "$THISDIR/chrome_tests.py" $ARGV_COPY
diff --git a/tools/valgrind/common.py b/tools/valgrind/common.py
new file mode 100644
index 0000000..7e163e3
--- /dev/null
+++ b/tools/valgrind/common.py
@@ -0,0 +1,252 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import platform
+import os
+import signal
+import subprocess
+import sys
+import time
+
+
+class NotImplementedError(Exception):
+ pass
+
+
+class TimeoutError(Exception):
+ pass
+
+
+def RunSubprocessInBackground(proc):
+ """Runs a subprocess in the background. Returns a handle to the process."""
+ logging.info("running %s in the background" % " ".join(proc))
+ return subprocess.Popen(proc)
+
+
+def RunSubprocess(proc, timeout=0):
+ """ Runs a subprocess, until it finishes or |timeout| is exceeded and the
+ process is killed with taskkill. A |timeout| <= 0 means no timeout.
+
+ Args:
+ proc: list of process components (exe + args)
+ timeout: how long to wait before killing, <= 0 means wait forever
+ """
+
+ logging.info("running %s, timeout %d sec" % (" ".join(proc), timeout))
+ sys.stdout.flush()
+ sys.stderr.flush()
+
+ # Manually read and print out stdout and stderr.
+ # By default, the subprocess is supposed to inherit these from its parent,
+ # however when run under buildbot, it seems unable to read data from a
+ # grandchild process, so we have to read the child and print the data as if
+ # it came from us for buildbot to read it. We're not sure why this is
+ # necessary.
+ # TODO(erikkay): should we buffer stderr and stdout separately?
+ p = subprocess.Popen(proc, universal_newlines=True,
+ bufsize=0, # unbuffered
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+
+ logging.info("started subprocess")
+
+ did_timeout = False
+ if timeout > 0:
+ wait_until = time.time() + timeout
+ while p.poll() is None and not did_timeout:
+ # Have to use readline rather than readlines() or "for line in p.stdout:",
+ # otherwise we get buffered even with bufsize=0.
+ line = p.stdout.readline()
+ while line and not did_timeout:
+ sys.stdout.write(line)
+ sys.stdout.flush()
+ line = p.stdout.readline()
+ if timeout > 0:
+ did_timeout = time.time() > wait_until
+
+ if did_timeout:
+ logging.info("process timed out")
+ else:
+ logging.info("process ended, did not time out")
+
+ if did_timeout:
+ if IsWindows():
+ subprocess.call(["taskkill", "/T", "/F", "/PID", str(p.pid)])
+ else:
+ # Does this kill all children, too?
+ os.kill(p.pid, signal.SIGINT)
+ logging.error("KILLED %d" % p.pid)
+ # Give the process a chance to actually die before continuing
+ # so that cleanup can happen safely.
+ time.sleep(1.0)
+ logging.error("TIMEOUT waiting for %s" % proc[0])
+ raise TimeoutError(proc[0])
+ else:
+ for line in p.stdout:
+ sys.stdout.write(line)
+ if not IsMac(): # stdout flush fails on Mac
+ logging.info("flushing stdout")
+ sys.stdout.flush()
+
+ logging.info("collecting result code")
+ result = p.poll()
+ if result:
+ logging.error("%s exited with non-zero result code %d" % (proc[0], result))
+ return result
+
+
+def IsLinux():
+ return sys.platform.startswith('linux')
+
+
+def IsMac():
+ return sys.platform.startswith('darwin')
+
+
+def IsWindows():
+ return sys.platform == 'cygwin' or sys.platform.startswith('win')
+
+
+def WindowsVersionName():
+ """Returns the name of the Windows version if it is known, or None.
+
+ Possible return values are: xp, vista, 7, 8, or None
+ """
+ if sys.platform == 'cygwin':
+ # Windows version number is hiding in system name. Looks like:
+ # CYGWIN_NT-6.1-WOW64
+ try:
+ version_str = platform.uname()[0].split('-')[1]
+ except:
+ return None
+ elif sys.platform.startswith('win'):
+ # Normal Windows version string. Mine: 6.1.7601
+ version_str = platform.version()
+ else:
+ return None
+
+ parts = version_str.split('.')
+ try:
+ major = int(parts[0])
+ minor = int(parts[1])
+ except:
+ return None # Can't parse, unknown version.
+
+ if major == 5:
+ return 'xp'
+ elif major == 6 and minor == 0:
+ return 'vista'
+ elif major == 6 and minor == 1:
+ return '7'
+ elif major == 6 and minor == 2:
+ return '8' # Future proof. ;)
+ return None
+
+
+def PlatformNames():
+ """Return an array of string to be used in paths for the platform
+ (e.g. suppressions, gtest filters, ignore files etc.)
+ The first element of the array describes the 'main' platform
+ """
+ if IsLinux():
+ return ['linux']
+ if IsMac():
+ return ['mac']
+ if IsWindows():
+ names = ['win32']
+ version_name = WindowsVersionName()
+ if version_name is not None:
+ names.append('win-%s' % version_name)
+ return names
+ raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+
+def PutEnvAndLog(env_name, env_value):
+ os.putenv(env_name, env_value)
+ logging.info('export %s=%s', env_name, env_value)
+
+def BoringCallers(mangled, use_re_wildcards):
+ """Return a list of 'boring' function names (optinally mangled)
+ with */? wildcards (optionally .*/.).
+ Boring = we drop off the bottom of stack traces below such functions.
+ """
+
+ need_mangling = [
+ # Don't show our testing framework:
+ ("testing::Test::Run", "_ZN7testing4Test3RunEv"),
+ ("testing::TestInfo::Run", "_ZN7testing8TestInfo3RunEv"),
+ ("testing::internal::Handle*ExceptionsInMethodIfSupported*",
+ "_ZN7testing8internal3?Handle*ExceptionsInMethodIfSupported*"),
+
+ # Depend on scheduling:
+ ("MessageLoop::Run", "_ZN11MessageLoop3RunEv"),
+ ("MessageLoop::RunTask", "_ZN11MessageLoop7RunTask*"),
+ ("RunnableMethod*", "_ZN14RunnableMethod*"),
+ ("DispatchToMethod*", "_Z*16DispatchToMethod*"),
+ ("base::internal::Invoker*::DoInvoke*",
+ "_ZN4base8internal8Invoker*DoInvoke*"), # Invoker{1,2,3}
+ ("base::internal::RunnableAdapter*::Run*",
+ "_ZN4base8internal15RunnableAdapter*Run*"),
+ ]
+
+ ret = []
+ for pair in need_mangling:
+ ret.append(pair[1 if mangled else 0])
+
+ ret += [
+ # Also don't show the internals of libc/pthread.
+ "start_thread",
+ "main",
+ "BaseThreadInitThunk",
+ ]
+
+ if use_re_wildcards:
+ for i in range(0, len(ret)):
+ ret[i] = ret[i].replace('*', '.*').replace('?', '.')
+
+ return ret
+
+def NormalizeWindowsPath(path):
+ """If we're using Cygwin Python, turn the path into a Windows path.
+
+ Don't turn forward slashes into backslashes for easier copy-pasting and
+ escaping.
+
+ TODO(rnk): If we ever want to cut out the subprocess invocation, we can use
+ _winreg to get the root Cygwin directory from the registry key:
+ HKEY_LOCAL_MACHINE\SOFTWARE\Cygwin\setup\rootdir.
+ """
+ if sys.platform.startswith("cygwin"):
+ p = subprocess.Popen(["cygpath", "-m", path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (out, err) = p.communicate()
+ if err:
+ logging.warning("WARNING: cygpath error: %s", err)
+ return out.strip()
+ else:
+ return path
+
+############################
+# Common output format code
+
+def PrintUsedSuppressionsList(suppcounts):
+ """ Prints out the list of used suppressions in a format common to all the
+ memory tools. If the list is empty, prints nothing and returns False,
+ otherwise True.
+
+ suppcounts: a dictionary of used suppression counts,
+ Key -> name, Value -> count.
+ """
+ if not suppcounts:
+ return False
+
+ print "-----------------------------------------------------"
+ print "Suppressions used:"
+ print " count name"
+ for (name, count) in sorted(suppcounts.items(), key=lambda (k,v): (v,k)):
+ print "%7d %s" % (count, name)
+ print "-----------------------------------------------------"
+ sys.stdout.flush()
+ return True
diff --git a/tools/valgrind/drmemory.bat b/tools/valgrind/drmemory.bat
new file mode 100755
index 0000000..46d5a4f
--- /dev/null
+++ b/tools/valgrind/drmemory.bat
@@ -0,0 +1,5 @@
+@echo off
+:: Copyright (c) 2011 The Chromium Authors. All rights reserved.
+:: Use of this source code is governed by a BSD-style license that can be
+:: found in the LICENSE file.
+%~dp0\chrome_tests.bat -t cmdline --tool drmemory %*
diff --git a/tools/valgrind/drmemory/OWNERS b/tools/valgrind/drmemory/OWNERS
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/tools/valgrind/drmemory/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/tools/valgrind/drmemory/PRESUBMIT.py b/tools/valgrind/drmemory/PRESUBMIT.py
new file mode 100644
index 0000000..0ff7618
--- /dev/null
+++ b/tools/valgrind/drmemory/PRESUBMIT.py
@@ -0,0 +1,39 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into gcl.
+"""
+
+
+def CheckChange(input_api, output_api):
+ """Checks the DrMemory suppression files for bad suppressions."""
+
+ # TODO(timurrrr): find out how to do relative imports
+ # and remove this ugly hack. Also, the CheckChange function won't be needed.
+ tools_vg_path = input_api.os_path.join(input_api.PresubmitLocalPath(), '..')
+ import sys
+ old_path = sys.path
+ try:
+ sys.path = sys.path + [tools_vg_path]
+ import suppressions
+ return suppressions.PresubmitCheck(input_api, output_api)
+ finally:
+ sys.path = old_path
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CheckChange(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CheckChange(input_api, output_api)
+
+
+def GetPreferredTryMasters(project, change):
+ return {
+ 'tryserver.chromium.win': {
+ 'win_drmemory': set(['defaulttests']),
+ }
+ }
diff --git a/tools/valgrind/drmemory/suppressions.txt b/tools/valgrind/drmemory/suppressions.txt
new file mode 100644
index 0000000..eec7064
--- /dev/null
+++ b/tools/valgrind/drmemory/suppressions.txt
@@ -0,0 +1,663 @@
+# This file contains suppressions for the Dr.Memory tool, see
+# http://dev.chromium.org/developers/how-tos/using-drmemory
+#
+# This file contains suppressions for the DrMemory reports happening
+# in the 'light' mode (a.k.a. drmemory_light) as well as in the 'full' mode.
+# Please use suppressions_full.txt for all the reports that can happen only
+# in the full mode (drmemory_full),
+
+############################
+# Known reports on the third party we have no control over.
+
+# Reports from Sophos antivirus
+UNADDRESSABLE ACCESS
+name=Sophos UNADDR
+...
+sophos*.dll!*
+
+UNINITIALIZED READ
+name=Sophos UNINIT
+...
+sophos*.dll!*
+
+LEAK
+name=Sophos LEAK
+...
+sophos*.dll!*
+
+# Reports from Micorosft RDP ActiveX control (mstscax.dll)
+
+GDI USAGE ERROR
+name=crbug.com/177832: mstscax.dll causes "GDI USAGE ERROR" errors.
+...
+mstscax.dll!*
+
+UNADDRESSABLE ACCESS
+name=crbug.com/177832: mstscax.dll causes "UNADDRESSABLE ACCESS" errors.
+...
+mstscax.dll!*
+
+############################
+# Suppress some false reports due to bugs in Dr.Memory like wrong analysis
+# assumptions or unhandled syscalls
+
+# Please note: the following suppressions were written in the abscense of
+# private symbols so may need to be updated when we switch to auto-loading PDBs
+
+UNADDRESSABLE ACCESS
+name=http://code.google.com/p/drmemory/issues/detail?id=12 UNADDR
+...
+SHELL32.dll!SHFileOperation*
+
+UNADDRESSABLE ACCESS
+name=http://code.google.com/p/drmemory/issues/detail?id=40 UNADDR
+...
+WINSPOOL.DRV!*
+
+INVALID HEAP ARGUMENT
+name=http://code.google.com/p/drmemory/issues/detail?id=40 INVALID HEAP
+...
+WINSPOOL.DRV!*
+
+UNADDRESSABLE ACCESS
+name=http://code.google.com/p/drmemory/issues/detail?id=59
+...
+*!SetEnvironmentVariable*
+
+UNADDRESSABLE ACCESS
+name=http://code.google.com/p/drmemory/issues/detail?id=68 (UNADDR 1)
+...
+MSWSOCK.dll!WSPStartup
+
+UNADDRESSABLE ACCESS
+name=http://code.google.com/p/drmemory/issues/detail?id=68 (UNADDR 2)
+...
+ntdll.dll!RtlValidateUnicodeString
+
+############################
+# TODO(timurrrr): investigate these
+UNADDRESSABLE ACCESS
+name=TODO SHParseDisplayName
+...
+*!SHParseDisplayName
+
+UNADDRESSABLE ACCESS
+name=TODO GetCanonicalPathInfo
+...
+*!GetCanonicalPathInfo*
+
+UNADDRESSABLE ACCESS
+name=TODO CreateDC
+...
+GDI32.dll!CreateDC*
+
+# This one looks interesting
+INVALID HEAP ARGUMENT
+name=TODO ExitProcess
+...
+KERNEL32.dll!ExitProcess
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/103365 (a)
+ppapi_tests.dll!*
+...
+ppapi_tests.dll!*
+*!base::internal::RunnableAdapter<*>::Run
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/103365 (b)
+ppapi_tests.dll!*
+...
+ppapi_tests.dll!*
+*!PP_RunCompletionCallback
+...
+*!base::internal::RunnableAdapter<*>::Run
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/107567 intentional mismatch in _DebugHeapDelete, no frame
+*!std::numpunct<*>::_Tidy
+*!std::numpunct<*>::~numpunct<*>
+
+# TODO(rbultje): Investigate if code fix is required instead.
+WARNING
+name=http://crbug.com/223255 - prefetches in vp8
+instruction=prefetch*
+ffmpegsumo.dll!ff_prefetch_mmxext
+
+############################
+# Intentional errors in Chromium tests (ToolsSanityTests)
+LEAK
+name=sanity test 01 (memory leak)
+base_unittests.exe!operator new
+base_unittests.exe!operator new[]
+base_unittests.exe!base::ToolsSanityTest_MemoryLeak_Test::TestBody
+
+# "..." is needed due to http://code.google.com/p/drmemory/issues/detail?id=666
+UNADDRESSABLE ACCESS
+name=sanity test 02 (malloc/read left)
+base_unittests.exe!*ReadValueOutOfArrayBoundsLeft
+...
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 03 (malloc/read right)
+base_unittests.exe!*ReadValueOutOfArrayBoundsRight
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 04 (malloc/write left)
+base_unittests.exe!*WriteValueOutOfArrayBoundsLeft
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 05 (malloc/write right)
+base_unittests.exe!*WriteValueOutOfArrayBoundsRight
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+# "..." is needed due to http://code.google.com/p/drmemory/issues/detail?id=666
+UNADDRESSABLE ACCESS
+name=sanity test 06 (new/read left)
+base_unittests.exe!*ReadValueOutOfArrayBoundsLeft
+...
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 07 (new/read right)
+base_unittests.exe!*ReadValueOutOfArrayBoundsRight
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 08 (new/write left)
+base_unittests.exe!*WriteValueOutOfArrayBoundsLeft
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 09 (new/write right)
+base_unittests.exe!*WriteValueOutOfArrayBoundsRight
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 10 (write after free)
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 11 (write after delete)
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+INVALID HEAP ARGUMENT
+name=sanity test 12 (array deleted without [])
+base_unittests.exe!base::ToolsSanityTest_ArrayDeletedWithoutBraces_Test::TestBody
+
+INVALID HEAP ARGUMENT
+name=sanity test 13 (single element deleted with [])
+base_unittests.exe!base::ToolsSanityTest_SingleElementDeletedWithBraces_Test::TestBody
+
+UNINITIALIZED READ
+name=sanity test 14 (malloc/read uninit)
+base_unittests.exe!*ReadUninitializedValue
+...
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+UNINITIALIZED READ
+name=sanity test 15 (new/read uninit)
+base_unittests.exe!*ReadUninitializedValue
+...
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=AboutHandler::AboutCrash deliberate crash
+# TODO(bruening): switch to annotation once have support for that
+chrome.dll!AboutHandler::AboutCrash
+
+UNADDRESSABLE ACCESS
+name=NPAPITesterBase.NoHangIfInitCrashes deliberate crash
+# function is small, little risk for false negative in rest of it
+# TODO(bruening): switch to annotation once have support for that
+npapi_test_plugin.dll!NPAPIClient::PluginClient::Initialize
+
+# Deliberate NULL deref to crash the child process
+UNADDRESSABLE ACCESS
+name=CrashingChildProcess deliberate crash
+*!CrashingChildProcess
+
+UNADDRESSABLE ACCESS
+name=::Crasher::Run deliberate crash
+*!base::`anonymous namespace'::Crasher::Run
+
+############################
+# Benign issues in Chromium
+
+WARNING
+name=http://crbug.com/72463 - prefetches in generated MemCopy
+instruction=prefetch*
+<not in a module>
+chrome.dll!v8::internal::CopyChars*
+
+WARNING
+name=prefetches in NVD3DUM.dll
+instruction=prefetch*
+NVD3DUM.dll!*
+
+WARNING
+name=prefetches in igdumd32.dll
+instruction=prefetch*
+igdumd32.dll!*
+
+UNADDRESSABLE ACCESS
+name=http://code.google.com/p/drmemory/issues/detail?id=582 bizarre cl-generated read-beyond-TOS
+instruction=mov 0xfffffffc(%esp) -> %eax
+chrome.dll!blink::RenderStyle::resetBorder*
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/101717 (1)
+*!sandbox::PolicyBase::~PolicyBase
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/101717 (2)
+*!scoped_ptr<>::~scoped_ptr<>
+*!sandbox::GetHandleName
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/101717 (3)
+*!scoped_ptr<>::~scoped_ptr<>
+*!sandbox::GetPathFromHandle
+
+GDI USAGE ERROR
+name=http://code.google.com/p/drmemory/issues/detail?id=899 deleting bitmap which is probably safe
+system call NtGdiDeleteObjectApp
+*!skia::`anonymous namespace'::Bitmap::~Bitmap
+*!skia::`anonymous namespace'::Bitmap::`scalar deleting destructor'
+
+############################
+# Real issues in Chromium
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/88213
+*!base::win::ObjectWatcher::StopWatching
+*!base::win::ObjectWatcher::WillDestroyCurrentMessageLoop
+*!MessageLoop::~MessageLoop
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/96010
+*!TestingProfile::FinishInit
+*!TestingProfile::TestingProfile
+*!BrowserAboutHandlerTest_WillHandleBrowserAboutURL_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/106522
+npapi_test_plugin.dll!NPAPIClient::PluginTest::id
+npapi_test_plugin.dll!NPAPIClient::ExecuteGetJavascriptUrlTest::TimerProc
+
+GDI USAGE ERROR
+name=http://crbug.com/109963 c
+system call NtGdiDeleteObjectApp
+GDI32.dll!DeleteDC
+content.dll!*
+
+GDI USAGE ERROR
+name=http://crbug.com/109963 d
+system call NtGdiDeleteObjectApp
+GDI32.dll!DeleteDC
+*!base::internal::RunnableAdapter*
+
+# GDI usage errors in 3rd-party components
+GDI USAGE ERROR
+name=http://crbug.com/119552 a
+system call NtGdiDeleteObjectApp
+...
+*!OmniboxViewWin::*
+
+GDI USAGE ERROR
+name=http://crbug.com/119552 b
+system call Nt*
+...
+*!ATL::*
+
+GDI USAGE ERROR
+name=http://crbug.com/119552 c
+# optional gdi32.dll frame followed by user32.dll
+# TODO(bruening): once have
+# http://code.google.com/p/drmemory/issues/detail?id=846
+# I would do "gdi32.dll!...\nuser32.dll!*"
+*32.dll!*
+...
+shell32.dll!SHGetFileInfoW
+*!IconLoader::ReadIcon
+
+GDI USAGE ERROR
+name=http://crbug.com/119552 d
+system call NtGdiDeleteObjectApp
+gdi32.dll!DeleteObject
+riched20.dll!*
+riched20.dll!*
+riched20.dll!*
+
+GDI USAGE ERROR
+name=http://crbug.com/120157
+# "ReleaseDC called from different thread than GetDC"
+system call NtUserCallOneParam.RELEASEDC
+*!*FontCache::CacheElement::~CacheElement
+
+GDI USAGE ERROR
+name=http://crbug.com/158090
+# "DC created by one thread and used by another"
+...
+content.dll!content::*::FontCache::PreCacheFont
+content.dll!content::FontCacheDispatcher::OnPreCacheFont
+content.dll!DispatchToMethod<>
+
+GDI USAGE ERROR
+name=http://crbug.com/158090 c#4
+# ReleaseDC for DC called from different thread than the thread that called GetDC
+system call NtUserCallOneParam.RELEASEDC
+ui.dll!gfx::ReadColorProfile
+ui.dll!gfx::GetColorProfile
+content.dll!content::RenderMessageFilter::OnGetMonitorColorProfile
+content.dll!DispatchToMethod*
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/158350
+# allocated with operator new[], freed with operator delete
+*!*
+*!*
+*!*
+*!*
+*!*
+content.dll!*
+content.dll!*
+content.dll!*
+content.dll!*
+content.dll!*
+*!*
+*!*
+*!*
+*!*
+*!*
+KERNEL32.dll!*
+ntdll.dll!*
+ntdll.dll!*
+
+WARNING
+name=Security test (new oveflow)
+MSVCR100D.dll!operator new
+*!operator new
+*!operator new[]
+*!`anonymous namespace'::SecurityTest_NewOverflow_Test::TestBody
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+WARNING
+name=Security test (calloc overflow)
+*!`anonymous namespace'::CallocReturnsNull
+*!`anonymous namespace'::SecurityTest_CallocOverflow_Test::TestBody
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+GDI USAGE ERROR
+name=http://crbug.com/234484
+# "DC created by one thread and used by another"
+...
+*!chrome::`anonymous namespace'::SetOverlayIcon
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/262088
+drmemorylib.dll!av_dup_packet
+msvcrt.dll!wcsrchr
+ntdll.dll!RtlIsCurrentThreadAttachExempt
+ntdll.dll!LdrShutdownThread
+ntdll.dll!RtlExitUserThread
+
+GDI USAGE ERROR
+name=http://crbug.com/266484
+skia.dll!HDCOffscreen::draw
+...
+skia.dll!SkScalerContext::getImage
+skia.dll!SkGlyphCache::findImage
+skia.dll!D1G_RectClip
+skia.dll!SkDraw::drawText
+
+HANDLE LEAK
+name=http://crbug.com/346842
+system call NtGdiCreateDIBSection
+*!CreateDIBSection
+*!HDCOffscreen::draw
+*!SkScalerContext_GDI::generateImage
+*!SkScalerContext::getImage
+*!SkGlyphCache::findImage
+*!D1G*RectClip
+*!SkDraw::drawPosText
+*!SkBitmapDevice::drawPosText
+
+HANDLE LEAK
+name=http://crbug.com/346993
+system call NtDuplicateObject
+KERNELBASE.dll!DuplicateHandle
+KERNEL32.dll!DuplicateHandle*
+base.dll!base::`anonymous namespace'::ThreadFunc
+KERNEL32.dll!BaseThreadInitThunk
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/42043-uninit
+...
+QuickTime.qts!*
+
+GDI USAGE ERROR
+name=http://crbug.com/42043-gdi
+...
+QuickTime.qts!*
+
+UNADDRESSABLE ACCESS
+name=IntentionalCrash
+*!content::*::*Crash*
+*!content::*::MaybeHandleDebugURL
+
+HANDLE LEAK
+name=http://crbug.com/371348
+system call NtCreateSection
+KERNELBASE.dll!CreateFileMappingW
+base.dll!base::SharedMemory::Create
+base.dll!base::SharedMemory::CreateAndMapAnonymous
+content.dll!content::ChildThread::AllocateSharedMemory
+content.dll!content::ChildSharedBitmapManager::AllocateSharedBitmap
+cc.dll!cc::ResourceProvider::CreateBitmap
+
+HANDLE LEAK
+name=http://crbug.com/371357
+system call NtCreateEvent
+KERNELBASE.dll!CreateEventExW
+KERNELBASE.dll!CreateEventW
+
+HANDLE LEAK
+name=http://crbug.com/371368
+system call NtCreateNamedPipeFile
+KERNELBASE.dll!CreateNamedPipeW
+ipc.dll!IPC::Channel::ChannelImpl::CreatePipe
+ipc.dll!IPC::Channel::ChannelImpl::ChannelImpl
+ipc.dll!IPC::Channel::Channel
+ipc.dll!IPC::ChannelProxy::Context::CreateChannel
+
+HANDLE LEAK
+name=http://crbug.com/371942
+system call NtCreateThreadEx
+KERNELBASE.dll!CreateRemoteThreadEx
+KERNEL32.dll!CreateThread
+
+HANDLE LEAK
+name=http://crbug.com/371946
+system call NtUserWindowFromPoint
+content.dll!content::LegacyRenderWidgetHostHWND::OnMouseLeave
+content.dll!content::LegacyRenderWidgetHostHWND::_ProcessWindowMessage
+content.dll!content::LegacyRenderWidgetHostHWND::ProcessWindowMessage
+content.dll!ATL::CWindowImplBaseT<>::WindowProc
+USER32.dll!gapfnScSendMessage
+USER32.dll!GetThreadDesktop
+USER32.dll!CharPrevW
+USER32.dll!DispatchMessageW
+base.dll!base::MessagePumpForUI::ProcessMessageHelper
+base.dll!base::MessagePumpForUI::ProcessNextWindowsMessage
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/372177
+NPCTRL.dll!DllGetClassObject
+NPCTRL.dll!DllGetClassObject
+NPCTRL.dll!DllGetClassObject
+NPCTRL.dll!DllGetClassObject
+
+HANDLE LEAK
+name=http://crbug.com/373333
+system call NtGdiCreateCompatibleDC
+*!CreateCompatibleDC
+*!SkScalerContext_GDI::SkScalerContext_GDI
+*!LogFontTypeface::onCreateScalerContext
+*!SkTypeface::createScalerContext
+*!SkGlyphCache::VisitCache
+...
+*!SkPaint::descriptorProc
+...
+*!blink::RenderBlockFlow::layoutBlockFlow
+*!blink::RenderBlockFlow::layoutBlock
+*!blink::RenderBlock::layout
+
+HANDLE LEAK
+name=https://code.google.com/p/drmemory/issues/detail?id=1545
+system call NtGdiCreateCompatibleDC
+GDI32.dll!CreateCompatibleDC
+skia.dll!LogFontTypeface::onGetTableData
+
+HANDLE LEAK
+name=http://crbug.com/379000
+system call NtCreate*
+...
+*!disk_cache::MappedFile::Init
+*!disk_cache::BlockFiles::OpenBlockFile
+*!disk_cache::BlockFiles::Init
+*!disk_cache::BackendImpl::SyncInit
+*!disk_cache::BackendIO::ExecuteBackendOperation
+*!base::internal::Invoker<>::Run
+*!base::MessageLoop::RunTask
+*!base::MessageLoop::DeferOrRunPendingTask
+*!base::MessageLoop::DoWork
+*!base::MessagePumpForIO::DoRunLoop
+*!base::MessagePumpWin::Run
+*!base::MessageLoop::RunHandler
+*!base::Thread::Run
+*!content::BrowserThreadImpl::CacheThreadRun
+*!content::BrowserThreadImpl::Run
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/379204
+...
+*!WTF::MessageQueue<>::waitForMessageWithTimeout
+*!blink::WorkerRunLoop::run
+*!blink::WorkerRunLoop::run
+*!blink::WorkerThread::workerThread
+*!WTF::threadEntryPoint
+
+GDI USAGE ERROR
+name=379774
+system call NtUserCallOneParam.RELEASEDC
+USER32.dll!ReleaseDC
+*!std::_Tree<>::_Erase
+*!std::_Tree<>::erase
+*!DefaultSingletonTraits<>::Delete
+*!Singleton<>::OnExit
+*!base::AtExitManager::ProcessCallbacksNow
+*!base::AtExitManager::~AtExitManager
+*!base::TestSuite::~TestSuite
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/382784
+*!content::PepperMediaStreamAudioTrackHost::AudioSink::OnData
+*!content::MediaStreamAudioSinkOwner::OnData
+*!content::WebRtcLocalAudioTrack::Capture
+*!content::WebRtcAudioCapturer::Capture
+*!media::AudioInputDevice::AudioThreadCallback::Process
+*!media::AudioDeviceThread::Thread::Run
+*!media::AudioDeviceThread::Thread::ThreadMain
+
+HANDLE LEAK
+name=http://crbug.com/383408
+system call NtCreateEvent
+...
+*!base::internal::LockImpl::Lock
+*!mojo::system::MessagePipe::RemoveWaiter
+*!mojo::system::MessagePipeDispatcher::RemoveWaiterImplNoLock
+*!mojo::system::Dispatcher::RemoveWaiter
+*!mojo::system::Core::WaitManyInternal
+*!mojo::system::Core::WaitMany
+*!MojoWaitMany
+
+UNADDRESSABLE ACCESS
+name=IntentionalCrashPluginTest.plugin_client.cc
+npapi_test_plugin.dll!NP_Initialize
+...
+*!content::PluginLib::NP_Initialize
+*!content::PluginThread::PluginThread
+*!content::PluginMain
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/313788
+*!InProcessBrowserTest::AddBlankTabAndShow
+*!InProcessBrowserTest::CreateBrowser
+*!*::RunTestOnMainThread
+*!InProcessBrowserTest::RunTestOnMainThreadLoop
+*!ChromeBrowserMainParts::PreMainMessageLoopRunImpl
+*!ChromeBrowserMainParts::PreMainMessageLoopRun
+
+# This suppression is deliberately general, as bugs reported in
+# v8 generated code are difficult to track down. Xref Dr. Memory issue
+# https://code.google.com/p/drmemory/issues/detail?id=1582
+UNADDRESSABLE ACCESS
+name=https://code.google.com/p/drmemory/issues/detail?id=1582
+...
+*!v8::internal::Invoke
+*!v8::internal::Execution::Call
+*!v8::Function::Call
+
+HANDLE LEAK
+name=http://crbug.com/387394
+system call NtCreateTimer
+KERNELBASE.dll!CreateWaitableTimerExW
+KERNEL32.dll!CreateWaitableTimerW
+*!rtc::Timing::Timing
+...
+*!content::RenderProcessHostImpl::CreateMessageFilters
+*!content::RenderProcessHostImpl::Init
+*!content::RenderViewHostImpl::CreateRenderView
+
+UNADDRESSABLE ACCESS
+name=http://code.google.com/p/dynamorio/issues/detail?id=1443
+dynamorio.dll!*
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/400495
+aura.dll!aura::Window::Contains
+aura.dll!aura::WindowEventDispatcher::OnWindowHidden
+aura.dll!aura::WindowEventDispatcher::OnPostNotifiedWindowDestroying
+aura.dll!aura::Window::~Window
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/400511
+content.dll!content::WebThreadBase::TaskObserverAdapter::WillProcessTask
+base.dll!base::MessageLoop::RunTask
+base.dll!base::Thread::StopSoon
+base.dll!base::MessageLoop::DeferOrRunPendingTask
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/414675
+blink_web.dll!blink::toCoreFrame
+blink_web.dll!blink::RemoteFrameClient::firstChild
+blink_web.dll!blink::WebRemoteFrameImpl::~WebRemoteFrameImpl
+blink_web.dll!blink::WebRemoteFrameImpl::close
+content.dll!content::RenderFrameProxy::~RenderFrameProxy
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/420013
+content.dll!content::RenderFrameImpl::OnSwapOut
+content.dll!FrameMsg_SwapOut::Dispatch<>
diff --git a/tools/valgrind/drmemory/suppressions_full.txt b/tools/valgrind/drmemory/suppressions_full.txt
new file mode 100644
index 0000000..c3c7f8e
--- /dev/null
+++ b/tools/valgrind/drmemory/suppressions_full.txt
@@ -0,0 +1,1875 @@
+# This file contains suppressions for the Dr.Memory tool, see
+# http://dev.chromium.org/developers/how-tos/using-drmemory
+#
+# This file should contain suppressions only for the reports happening
+# in the 'full' mode (drmemory_full).
+# For the reports that can happen in the light mode (a.k.a. drmemory_light),
+# please use suppressions.txt instead.
+
+###############################################################
+# Known reports on the third party we have no control over.
+
+UNINITIALIZED READ
+name=http://crbug.com/116277
+...
+*!MOZ_Z_deflate
+
+# TODO(timurrrr): check if these frames change when NT_SYMBOLS are present.
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=406
+ADVAPI32.dll!WmiOpenBlock
+ADVAPI32.dll!WmiOpenBlock
+
+# Leaks inside GoogleDesktop - it injects into our processes for some reason
+LEAK
+name=GoogleDesktop LEAK
+...
+GoogleDesktopNetwork3.DLL!DllUnregisterServer
+
+# They deliberately use uninit local var in sqlite random generator.
+# Random bytes may mess up the call stack between randomByte and
+# sqlite3_* frames (http://code.google.com/p/drmemory/issues/detail?id=1514)
+# so we just look for randomByte.
+UNINITIALIZED READ
+name=sqlite3_randomness UNINIT
+*!randomByte
+
+# Intentional leak in WebKit Template Framework for ThreadData.
+LEAK
+name=intentional WTF ThreadData leak
+...
+*!WTF::wtfThreadData
+
+# Happens when winhttp returns ERROR_WINHTTP_UNABLE_TO_DOWNLOAD_SCRIPT.
+LEAK
+name=http://crbug.com/125558 a
+KERNELBASE.dll!LocalAlloc
+SECHOST.dll!...
+SECHOST.dll!NotifyServiceStatusChange
+WINHTTP.dll!...
+WINHTTP.dll!WinHttpDetectAutoProxyConfigUrl
+*!net::ProxyResolverWinHttp::GetProxyForURL
+
+# Tiny locale-related leaks in ntdll. Probably system bug.
+LEAK
+name=http://crbug.com/125558 b
+ntdll.dll!...
+ntdll.dll!*
+KERNELBASE.dll!...
+KERNELBASE.dll!GetCPInfoExW
+webio.dll!*
+webio.dll!*
+webio.dll!*
+WINHTTP.dll!...
+WINHTTP.dll!WinHttpGetIEProxyConfigForCurrentUser
+*!net::ProxyConfigServiceWin::GetCurrentProxyConfig
+
+UNINITIALIZED READ
+name=http://crbug.com/30704 #f
+libpng.dll!wk_png_write_find_filter
+libpng.dll!wk_png_write_row
+
+###############################################################
+# Suppress some false reports due to bugs in Dr.Memory like wrong analysis
+# assumptions or unhandled syscalls
+
+# Please note: the following suppressions were written in the abscense of
+# private symbols so may need to be updated when we switch to auto-loading PDBs
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (1)
+ntdll.dll!Rtl*
+ntdll.dll!Rtl*
+ntdll.dll!RtlFindActivationContextSectionString
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (2)
+...
+SHELL32.dll!SHFileOperation*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (3)
+...
+SHELL32.dll!SHGetFolderPath*
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (4)
+...
+SHELL32.dll!SHGetFolderPath*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (5)
+...
+SHELL32.dll!SHCreateDirectory*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (6)
+...
+SHELL32.dll!ILLoadFromStream*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (7)
+...
+SHELL32.dll!ILSaveToStream*
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (8)
+...
+SHELL32.dll!SHFileOperation*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (9)
+...
+SHELL32.dll!SHGetItemFromDataObject
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (10)
+...
+SHELL32.dll!SHGetItemFromDataObject
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=12 (11)
+...
+ole32.dll!*
+SHELL32.dll!SHChangeNotifySuspendResume
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=14 (1)
+...
+*!CreateProcess*
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=14 (2)
+...
+*!CreateProcess*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=14 (3)
+...
+*!base::LaunchApp*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=17 (1)
+...
+*!CreateWindow*
+
+POSSIBLE LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=17 (2)
+GDI32.dll!*
+GDI32.dll!CreateFontIndirectExW
+GDI32.dll!CreateFontIndirectW
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=17 (3)
+KERNELBASE.dll!LocalAlloc
+...
+USER32.dll!CreateWindow*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=18 a
+...
+*!CoInitialize*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=18 b
+...
+*!CoCreateInstance*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=18 c
+...
+*!CoUninitialize*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=18 d
+...
+UxTheme.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=40 a
+...
+WINSPOOL.DRV!*
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=40 b
+...
+WINSPOOL.DRV!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=48 a
+system call NtContinue
+...
+*!*SetThreadName
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=48 b
+system call NtContinue
+*!WTF::initializeCurrentThreadInternal
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=52 a
+...
+DBGHELP.dll!SymInitialize
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=52 b
+...
+DBGHELP.dll!SymEnumSourceFiles
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=52 c
+...
+msvcrt.dll!_RTDynamicCast
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=52 bit-level fp in dbghelp
+instruction=test 0x*(%*) $0x??
+DBGHELP.dll!SymUnloadModule64
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=53
+ADVAPI32.dll!WmiMofEnumerateResourcesA
+ADVAPI32.dll!WmiMofEnumerateResourcesA
+ADVAPI32.dll!Sta*TraceW
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=58
+...
+*!_cfltcvt_l
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=60
+USP10.dll!*
+...
+USP10.dll!ScriptStringAnalyse
+
+LEAK
+IMM32.dll!ImmGetIMCCSize
+IMM32.dll!ImmLockClientImc
+IMM32.dll!ImmDisableIME
+IMM32.dll!ImmSetActiveContext
+USER32.dll!IMPSetIMEA
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=65 a
+...
+*!SystemFunction036
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=65 b
+...
+*!talk_base::CreateRandomString
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=68 a
+...
+WS2_32.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=68 b
+...
+ADVAPI32.dll!SetSecurityDescriptorDacl
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=68 c
+...
+MSWSOCK.dll!WSPStartup
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=68 d
+...
+ntdll.dll!RtlValidateUnicodeString
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=256
+*!_mtinit
+*!__tmainCRTStartup
+*!mainCRTStartup
+
+POSSIBLE LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=274 a
+...
+GDI32.dll!CreateDCW
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=274 b
+...
+GDI32.dll!CreateDCW
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=275
+...
+*!_getptd*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=276
+...
+ntdll.dll!RtlConvertUlongToLargeInteger
+ntdll.dll!RtlConvertUlongToLargeInteger
+ntdll.dll!KiUserExceptionDispatcher
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=305
+*!free
+*!free
+*!operator new
+...
+*!MiniDumpWriteDump
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=346 a
+...
+GDI32.dll!CloseEnhMetaFile
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=346 b
+GDI32.dll!SetPolyFillMode
+GDI32.dll!CreateICW
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=362
+USER32.dll!UnregisterClass*
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=382
+...
+ntdll.dll!CsrNewThread
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=397
+system call NtDeviceIoControlFile InputBuffer
+ADVAPI32.dll!ImpersonateAnonymousToken
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=407 a
+system call NtRequestWaitReplyPort
+RPCRT4.dll!I_RpcSendReceive
+RPCRT4.dll!NdrSendReceive
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=407 b
+IMM32.dll!*
+ntdll.dll!LdrInitializeThunk
+ntdll.dll!LdrShutdownThread
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=412 a
+ADVAPI32.dll!RegDeleteValue*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=412 b
+...
+ADVAPI32.dll!Crypt*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=412 c
+...
+RPCRT4.dll!NdrClientCall2
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=412 d
+RSAENH.dll!DllUnregisterServer
+...
+ADVAPI32.dll!CryptAcquireContextA
+CRYPT32.dll!CryptEnumOIDFunction
+...
+CRYPT32.dll!CertFindCertificateInStore
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=412 e
+...
+RSAENH.dll!CPGenRandom
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=412 f
+...
+CRYPT??.dll!Crypt*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=412 g
+*!replace_memcmp
+...
+*!testing::internal::CmpHelperEQ*
+...
+*!SymmetricKeyTest_ImportGeneratedKey_Test::TestBody
+
+# We get these sometimes from AesEncrypt and AesExpandKey. AesEncrypt doesn't
+# have frame pointers, and we have trouble unwinding from it. Therefore, we use
+# this broad suppression, effectively disabling uninit checks in rsaenh.dll.
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=412 h
+RSAENH.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=425 a
+CLBCatQ.DLL!DestroyStgDatabase
+CLBCatQ.DLL!PostError
+CLBCatQ.DLL!PostError
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=425 b
+RPCRT4.dll!I_RpcBCacheFree
+RPCRT4.dll!I_RpcBCacheFree
+...
+RPCRT4.dll!NdrClientCall2
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=425 c
+msdmo.dll!*
+msdmo.dll!*
+DEVENUM.DLL!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=435 a
+...
+ntdll.dll!RtlSetSecurityObject
+ntdll.dll!RtlNewSecurityObjectEx
+ADVAPI32.dll!CreatePrivateObjectSecurityEx
+NTMARTA.dll!AccRewriteSetNamedRights
+
+POSSIBLE LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=435 b
+WLDAP32.dll!Ordinal325
+...
+WLDAP32.dll!Ordinal325
+ntdll.dll!LdrInitializeThunk
+ntdll.dll!LdrFindResourceDirectory_U
+ntdll.dll!RtlValidateUnicodeString
+ntdll.dll!LdrLoadDll
+KERNEL32.dll!LoadLibraryExW
+
+# mod+offs suppression because the symbolic makes no sense and changes
+# completely in the presence of WS2_32.dll symbols.
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=438
+<WS2_32.dll+0x260c>
+<WS2_32.dll+0x2b76>
+<WS2_32.dll+0x2c61>
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=454 a
+...
+WINMM.dll!wave*GetNumDevs
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=454 b
+...
+WINMM.dll!wave*GetNumDevs
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=466
+ntdll.dll!RtlRunOnceBeginInitialize
+ntdll.dll!RtlInitializeCriticalSectionAndSpinCount
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=473 a
+system call NtDeviceIoControlFile InputBuffer
+...
+iphlpapi.dll!GetAdaptersAddresses
+
+POSSIBLE LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=473 b
+ESENT.dll!*
+ESENT.dll!*
+ESENT.dll!*
+ntdll.dll!Ldr*Init*
+ntdll.dll!Ldr*
+ntdll.dll!*
+ntdll.dll!LdrLoadDll
+...
+iphlpapi.dll!GetPerAdapterInfo
+...
+iphlpapi.dll!GetAdaptersAddresses
+
+POSSIBLE LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=473 c
+RPCRT4.dll!*
+RPCRT4.dll!*
+...
+IPHLPAPI.DLL!GetAdaptersAddresses
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=475
+...
+ADVAPI32.dll!CryptAcquireContextA
+...
+CRYPT32.dll!CryptMsgOpenToDecode
+...
+CRYPT32.dll!CryptQueryObject
+
+# Lots of leaks from our interactions with the system certificate store. May be
+# worth reviewing our use of their API.
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=476 a
+KERNEL*.dll!LocalAlloc
+...
+CRYPT32.dll!CertGetCRLContextProperty
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=476 b
+KERNEL*.dll!LocalAlloc
+...
+CRYPT32.dll!CertAddCRLContextToStore
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=476 c
+KERNEL*.dll!LocalAlloc
+...
+CRYPT32.dll!CertOpenStore
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=476 d
+...
+CRYPT32.dll!CertOpenSystemStore?
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=476 e
+...
+CRYPT32.dll!CertGetCertificateChain
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=476 f
+...
+CRYPT32.dll!CertCompareIntegerBlob
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=476 g
+...
+CRYPT32.dll!CryptUnprotectData
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=476 h
+KERNEL*.dll!LocalAlloc
+...
+CRYPT32.dll!CertEnumCertificatesInStore
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=476 i
+...
+CRYPT32.dll!CryptProtectData
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=476 j
+...
+CRYPT32.dll!CryptExportPublicKeyInfoEx
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=502 a
+system call NtSecureConnectPort parameter #3
+GDI32.dll!*
+GDI32.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=502 b
+system call NtGdiEnumFonts parameter #6
+GDI32.dll!*
+GDI32.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=511 a
+RPCRT4.dll!...
+ole32.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=511 b
+ole32.dll!*
+ole32.dll!*
+ole32.dll!StringFromGUID2
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=512 a
+...
+*!browser_sync::Cryptographer::PackBootstrapToken
+*!browser_sync::Cryptographer::GetBootstrapToken
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=512 b
+...
+*!Encrypt*
+
+# TODO(bruening): remove these once we have v8 bitfields handled
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=513 a
+*!v8*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=513 b
+*!*
+*!v8*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=513 c
+<not in a module>
+...
+*!v8*
+
+# We have seen some cases (not yet understood: crbug.com/364146) where v8.dll
+# has no symbols. These are all on the bots using component build, so we use
+# v8.dll. TODO(bruening): remove these once we've fixed the symbol issue.
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=513 d
+v8.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=513 e
+<not in a module>
+...
+v8.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=546
+...
+mscms.dll!*
+...
+GDI32.dll!*
+*!IconUtil::Create*HICON*
+
+LEAK
+name=http://crbug.com/92152
+...
+USER32.dll!CreateWindowExW
+*!views::TooltipManagerWin::Init
+*!views::TooltipManagerWin::TooltipManagerWin
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=567 a
+dbghelp.dll!*
+...
+dbghelp.dll!StackWalk64
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=567 b
+*!*
+dbghelp.dll!*
+...
+dbghelp.dll!StackWalk64
+
+# Symbols w/o PDB make no sense, first ntdll frame is TpSetTimer w/o syms and
+# TppWorkerThread w/ syms. We used to use mod+offs here, but that was too
+# brittle, so we switched to RPCRT4.dll!*.
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=569
+RPCRT4.dll!...
+ntdll.dll!*
+ntdll.dll!*
+KERNEL*.dll!BaseThreadInitThunk
+
+# TODO(timurrrr): investigate these
+UNINITIALIZED READ
+name=http://crbug.com/TODO a
+...
+*!win_util::GetLogonSessionOnlyDACL
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO b
+...
+ntshrui.dll!IsPathSharedW
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO c
+...
+*!NetApiBufferFree
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO d
+...
+*!ShellExecute*
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO e
+...
+*!SHParseDisplayName
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO f
+...
+*!GetCanonicalPathInfo*
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO g
+...
+SHELL32.dll!Ordinal*
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO h
+...
+GDI32.dll!GetTextExtentPoint32*
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO i
+...
+*!SyncSocketClientListener::OnMsgClassResponse
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO j
+...
+*!*NSPRInitSingleton*
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO k
+*!NdrSimpleStructFree
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO l
+ntdll.dll!RtlpNtOpenKey
+ntdll.dll!RtlMakeSelfRelativeSD
+ntdll.dll!RtlAbsoluteToSelfRelativeSD
+ADVAPI32.dll!MakeSelfRelativeSD
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO m
+...
+CRYPT32.dll!I_CertSyncStore
+
+# This matches the same stack as DrMem i#751, but it's an uninit read instead of
+# a leak. Must be some early thread initialization. Doesn't look like
+# bit-level though.
+UNINITIALIZED READ
+name=http://crbug.com/TODO n
+RPCRT4.dll!*
+RPCRT4.dll!*
+RPCRT4.dll!*
+ntdll.dll!*
+ntdll.dll!*
+KERNEL*.dll!BaseThreadInitThunk
+
+# No idea where this is from, but Chrome isn't even on the stack.
+POSSIBLE LEAK
+name=http://crbug.com/TODO o
+RPCRT4.dll!...
+ole32.dll!OleInitialize
+ole32.dll!...
+KERNEL32.dll!BaseThreadInitThunk
+
+# Matches lots of RPC related leaks. So far RPC handles have been mostly owned
+# by system libraries and are not something we can fix easily.
+POSSIBLE LEAK
+name=http://crbug.com/TODO p
+RPCRT4.dll!*
+RPCRT4.dll!*
+RPCRT4.dll!NDRCContextBinding
+
+# No idea, but all system code, not interesting.
+POSSIBLE LEAK
+name=http://crbug.com/TODO q
+RPCRT4.dll!...
+RPCRT4.dll!*
+RPCRT4.dll!*
+ole32.dll!...
+ole32.dll!*
+ole32.dll!*
+...
+SHELL32.dll!*
+
+LEAK
+name=http://crbug.com/109278 video device COM leaks
+...
+*!media::VideoCaptureDevice::*
+
+LEAK
+name=http://crbug.com/109278 audio device COM leaks
+...
+*!media::GetInputDeviceNamesWin
+
+# False pos uninit in shell32 when resolving links.
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=745
+SHELL*.dll!*
+...
+SHELL*.dll!*
+*!file_util::ResolveShortcut
+
+# Probable false pos uninit in ffmpeg. Probably due to running off the end of a
+# buffer with SSE/MMX instructions whose results are then masked out later.
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=747 a
+*!ff_pred4x4_vertical_vp8_mmxext
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=747 b
+*!ff_pred4x4_down_left_mmxext
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=747 c
+*!ff_vorbis_floor1_render_list
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=747 d
+*!ff_put_vp8_epel8_h6_ssse3
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=747 e
+*!ff_put_vp8_epel8_h4_ssse3
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=747 f
+*!ff_fft_permute_sse
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=747 g
+*!ff_simple_idct_add_mmx
+
+# ffmpeg seems to leak a pthread condition variable.
+LEAK
+name=http://crbug.com/110042
+*!ptw32_new
+*!pthread_self
+*!sem_wait
+*!pthread_cond_wait
+*!ff_thread_decode_frame
+*!avcodec_decode_video2
+
+# Improperly handled ioctl in bcrypt.
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=748
+system call NtDeviceIoControlFile InputBuffer
+...
+bcrypt.dll!BCryptUnregisterConfigChangeNotify
+bcrypt.dll!BCryptGetFipsAlgorithmMode
+ntdll.dll!RtlQueryEnvironmentVariable
+
+# Not sure what this is.
+POSSIBLE LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=749
+...
+fwpuclnt.dll!*
+...
+RPCRT4.dll!*
+...
+fwpuclnt.dll!*
+...
+WS2_32.dll!*
+*!talk_base::SafeGetHostByName
+*!talk_base::SocketAddress::GetLocalIPs
+*!talk_base::SocketAddress::IsLocalIP
+*!cricket::Transport::VerifyCandidate
+*!cricket::Session::OnRemoteCandidates
+*!cricket::Session::OnTransportInfoMessage
+*!cricket::Session::OnIncomingMessage
+*!cricket::SessionManager::OnIncomingMessage
+
+# More uninit false pos in rpcrt4.dll not caught by default suppressions.
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=529
+RPCRT4.dll!*
+...
+*!base::LaunchProcess
+
+# System leak from CreateEnvironmentBlock.
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=757
+...
+USERENV.dll!CreateEnvironmentBlock
+
+# Looks like another instance of 753
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=753
+...
+ntdll.dll!RtlLoadString
+
+# More bit manip fps
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=493
+USP10.dll!ScriptPositionSingleGlyph
+
+# Various TLS leaks that we don't understand yet. We should be finding a root
+# for these.
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=778 a
+KERNELBASE.dll!TlsSetValue
+
+# Originally filed as: http://crbug.com/109281
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=778 b
+*!operator new
+*!operator new[]
+*!*::ConstructTlsVector
+*!base::ThreadLocalStorage::StaticSlot::Get
+
+# This is an NSS PRThread object installed in TLS. Why isn't this detected as a
+# root? See also http://crbug.com/32624
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=778 c
+*!PR_Calloc
+*!_PR_AttachThread
+*!_PRI_AttachThread
+
+# Bit-level fps in rich edit layer.
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=791
+RICHED20.dll!*
+RICHED20.dll!*
+
+# Already suppressed by drmemory default supp we don't have yet.
+LEAK
+name=i#757: RPC binding leaks in sspicli.dll
+RPCRT4.dll!*
+...
+SspiCli.dll!*
+SspiCli.dll!Cre*
+
+# Async NtReadFile false positives. This was fixed in drmemory r772, remove
+# this supp when we pull that rev.
+UNADDRESSABLE ACCESS
+name=http://code.google.com/p/drmemory/issues/detail?id=798
+system call NtReadFile parameter #5
+KERNEL32.dll!ReadFile
+
+# Probable syscall false positive.
+UNADDRESSABLE ACCESS
+name=http://code.google.com/p/drmemory/issues/detail?id=809
+system call NtGdiPolyPolyDraw parameter #1
+*!gfx::Path::CreateNativeRegion
+
+# Very wide suppression for all uninits in rpcrt4.dll. We get bad stack traces
+# coming out of this module (sometimes only one frame), which makes it hard to
+# write precise suppressions. Until we have bit-level tracking (DRMi#113) we
+# should keep this.
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=113 rpcrt4.dll wildcard
+RPCRT4.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=841 a
+...
+CRYPTNET.dll!I_CryptNetGetConnectivity
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=841 b
+...
+webio.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=841 c
+...
+winhttp.dll!*
+
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=841 d
+...
+CRYPTNET.dll!I_CryptNetGetConnectivity
+
+# Often missing a ntdll.dll!KiUserCallbackDispatcher frame.
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=810
+instruction=test %edx %edx
+USER32.dll!GetClassLongW
+...
+*!ui::CenterAndSizeWindow
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=815
+KERNEL*.dll!...
+dxgi.dll!*
+USER32.dll!GetMonitorInfoA
+ntdll.dll!KiUserCallbackDispatcher
+dxgi.dll!*
+WinSATAPI.DLL!*
+
+# Suppress anything in cmd.exe. It's safer to suppress these than disable
+# following, since someone might launch a Chrome process via cmd.exe.
+LEAK
+name=cmd.exe
+...
+cmd.exe!*
+
+# Possible true system use after free.
+UNADDRESSABLE ACCESS
+name=http://code.google.com/p/drmemory/issues/detail?id=623
+KERNELBASE.dll!TlsGetValue
+OLEAUT32.dll!SysFreeString
+OLEAUT32.dll!SysAllocStringByteLen
+OLEACC.dll!*
+OLEACC.dll!*
+OLEACC.dll!*
+OLEACC.dll!*
+
+# basic_streambuf seems to leak something in creating a std::_Mutex
+LEAK
+name=http://code.google.com/p/drmemory/issues/detail?id=857
+ntdll.dll!...
+ntdll.dll!RtlInitializeCriticalSection
+*!_Mtxinit
+*!std::_Mutex::_Mutex
+*!std::basic_streambuf<>
+
+# Seems to create a DC, sometimes. GetTextMetrics returns no pointers, though.
+LEAK
+name=GDI SetBrushOrgEx leak
+GDI32.dll!...
+GDI32.dll!GetTextMetricsW
+*!gfx::PlatformFontWin::CreateHFontRef
+*!gfx::PlatformFontWin::GetBaseFontRef
+
+###############################################################
+# Benign issues in Chromium
+
+# This test intentionally leaks an object and checks that it's never deleted.
+LEAK
+name=BrowserThreadTest.NotReleasedIfTargetThreadNonExistant leak
+...
+*!BrowserThreadTest_NotReleasedIfTargetThreadNonExistent_Test::TestBody
+
+LEAK
+name=deliberate histogram leak
+...
+*!replace_operator_new
+...
+*!*::*Histogram::FactoryGet
+
+LEAK
+name=deliberate leak for SampleMap
+...
+*!base::SampleMap::Accumulate
+*!base::SparseHistogram::Add
+
+LEAK
+name=deliberate LazyInstance leak
+...
+*!*LeakyLazyInstance*
+...
+*!base::LazyInstance*::Pointer
+
+LEAK
+name=http://crbug.com/79933 (2)
+...
+*!TestURLRequestContext::TestURLRequestContext
+*!TestURLRequestContextGetter::GetURLRequestContext
+*!notifier::SingleLoginAttempt::SingleLoginAttempt
+*!notifier::Login::StartConnection
+*!syncer::InvalidationNotifier::UpdateCredentials
+*!syncer::NonBlockingInvalidationNotifier::Core::UpdateCredentials
+
+LEAK
+name=http://crbug.com/79933 (3)
+...
+*!TestURLRequestContext::TestURLRequestContext
+*!TestURLRequestContextGetter::GetURLRequestContext
+*!URLFetcher::Core::StartURLRequest
+
+LEAK
+name=http://crbug.com/79933 (4)
+*!generic_cpp_alloc
+*!operator new
+*!std::_Allocate<>
+*!std::allocator<>::allocate
+*!std::vector<>::_Insert_n
+*!std::vector<>::insert
+*!std::vector<>::push_back
+*!ObserverListBase<>::AddObserver
+...
+*!net::HttpNetworkSession::HttpNetworkSession
+*!notifier::ProxyResolvingClientSocket::ProxyResolvingClientSocket
+*!notifier::XmppClientSocketFactory::CreateTransportClientSocket
+*!notifier::ChromeAsyncSocket::Connect
+*!buzz::XmppClient::ProcessStartXmppLogin
+*!buzz::XmppClient::Process
+*!talk_base::Task::Step
+*!talk_base::TaskRunner::InternalRunTasks
+*!talk_base::TaskRunner::RunTasks
+*!notifier::TaskPump::CheckAndRunTasks
+*!base::internal::RunnableAdapter<>::Run
+
+# Test intentionally leaks an object.
+LEAK
+name=http://crbug.com/86301
+*!replace_operator_new
+...
+*!*_DeadReplyLoopDoesNotDelete_Test::TestBody
+
+# Leak in a binary copy of Firefox 3's NSS dll. Not much we can do about it.
+LEAK
+name=Firefox 3 NSS dll leak
+nspr4.dll!*
+...
+*!NSSDecryptor::~NSSDecryptor
+
+# We get uninit reports inside GMock when it prints the bytes of references to
+# partially initialized objects passed to unexpected method calls.
+UNINITIALIZED READ
+name=http://crbug.com/64887 (GMock printing uninit data)
+...
+*!testing::*::PrintBytesInObjectTo*
+
+###############################################################
+# Proactively borrowed from memcheck/suppressions.txt.
+# We have not yet seen these, but we are expanding the sets of tests
+# we're running, and we've hit redness in the past that could have
+# been avoided by already having the Memcheck suppressions.
+# TODO(bruening): review the entire file (not just these) once we're
+# at the peak set of tests we plan to run and remove the unused ones.
+
+UNINITIALIZED READ
+name=bug_99307
+*!modp_b64_encode
+*!base::Base64Encode*
+*!web_ui_util::GetImageDataUrl
+*!::NetworkInfoDictionary::set_icon
+
+UNINITIALIZED READ
+name=bug_101781
+*!encode_one_block
+*!encode_mcu_huff
+*!compress_data
+*!process_data_simple_main
+*!chromium_jpeg_write_scanlines
+*!gfx::JPEGCodec::Encode
+*!gfx::JPEGEncodedDataFromImage
+*!history::TopSites::EncodeBitmap
+*!history::TopSites::SetPageThumbnail
+*!history::ExpireHistoryTest::AddExampleData
+*!history::ExpireHistoryTest::*
+
+UNINITIALIZED READ
+name=bug_101781_d
+*!testing::AssertionResult testing::internal::CmpHelperGE<>
+*!gfx::JPEGCodec_EncodeDecodeRGBA_Test::TestBody
+
+UNINITIALIZED READ
+name=bug_105907
+...
+*!skia::BGRAConvolve2D
+*!skia::ImageOperations::ResizeBasic*
+*!skia::ImageOperations::Resize*
+
+UNINITIALIZED READ
+name=bug_112278
+*!fetch_texel_2d_f_rgba8888
+*!sample_2d_linear
+*!sample_linear_2d
+*!fetch_texel_lod
+*!fetch_texel
+*!_mesa_execute_program
+*!run_program
+*!_swrast_exec_fragment_program
+*!shade_texture_span
+*!_swrast_write_rgba_span
+*!general_triangle
+*!_swrast_validate_triangle
+*!_swrast_Triangle
+*!triangle_rgba
+*!_tnl_render_triangles_elts
+*!run_render
+*!_tnl_run_pipeline
+*!_tnl_draw_prims
+*!_tnl_vbo_draw_prims
+*!vbo_validated_drawrangeelements
+*!vbo_exec_DrawElements
+*!neutral_DrawElements
+
+UNINITIALIZED READ
+name=bug_112278b
+*!fetch_texel_2d_f_rgba8888
+*!sample_2d_nearest
+*!sample_nearest_2d
+*!fetch_texel_lod
+*!fetch_texel
+*!_mesa_execute_program
+*!run_program
+*!_swrast_exec_fragment_program
+*!shade_texture_span
+*!_swrast_write_rgba_span
+*!general_triangle
+...
+*!_swrast_Triangle
+*!triangle_rgba
+...
+*!run_render
+*!_tnl_run_pipeline
+*!_tnl_draw_prims
+*!_tnl_vbo_draw_prims
+
+UNINITIALIZED READ
+name=bug_115419_1
+*!fetch_texel_2d_f_rgba8888
+*!texture_get_row
+*!fast_read_rgba_pixels
+*!read_rgba_pixels
+*!_swrast_ReadPixels
+*!_mesa_ReadPixels
+*!glReadPixels
+*!gpu::gles2::GLES2DecoderImpl::HandleReadPixels
+*!gpu::gles2::GLES2DecoderImpl::DoCommand
+*!gpu::CommandParser::ProcessCommand
+*!gpu::GpuScheduler::PutChanged
+*!webkit::gpu::GLInProcessContext::PumpCommands
+
+UNINITIALIZED READ
+name=bug_115419_2
+*!get_src_arg_mask
+*!_mesa_remove_extra_move_use
+*!_mesa_optimize_program
+*!get_mesa_program
+*!_mesa_ir_link_shader
+*!_mesa_glsl_link_shader
+*!link_program
+*!_mesa_LinkProgramARB
+*!glLinkProgram
+...
+*!gpu::gles2::GLES2DecoderImpl::DoLinkProgram*
+*!gpu::gles2::GLES2DecoderImpl::HandleLinkProgram*
+*!gpu::gles2::GLES2DecoderImpl::DoCommand
+*!gpu::CommandParser::ProcessCommand
+*!gpu::GpuScheduler::PutChanged
+*!webkit::gpu::GLInProcessContext::PumpCommands
+
+UNINITIALIZED READ
+name=bug_138058
+...
+*!blink::WebVTTParser::constructTreeFromToken
+*!blink::WebVTTParser::createDocumentFragmentFromCueText
+*!blink::TextTrackCue::getCueAsHTML
+*!blink::TextTrackCue::updateDisplayTree
+*!blink::HTMLMediaElement::updateActiveTextTrackCues
+
+UNINITIALIZED READ
+name=bug_138220_a
+*!blink::HTMLInputElement::dataList
+*!blink::HTMLInputElement::list
+*!blink::RenderSliderContainer::layout
+*!blink::RenderBlock::layoutBlockChild
+*!blink::RenderBlock::layoutBlockChildren
+*!blink::RenderBlock::layoutBlock
+*!blink::RenderBlock::layout
+*!blink::RenderSlider::layout
+
+UNINITIALIZED READ
+name=bug_138220_b
+*!blink::HTMLInputElement::dataList
+*!blink::HTMLInputElement::list
+*!blink::RenderTheme::paintSliderTicks
+*!blink::RenderThemeChromiumLinux::paintSliderTrack
+*!blink::RenderTheme::paint
+*!blink::RenderBox::paintBoxDecorations
+*!blink::RenderBlock::paintObject
+
+UNINITIALIZED READ
+name=bug_162825
+*!bcmp
+*!gpu::gles2::ShaderTranslatorCache::ShaderTranslatorInitParams::operator<
+*!std::less<>::operator
+...
+*!std::map<>::find
+...
+*!gpu::gles2::GLES2DecoderImpl::InitializeShaderTranslator
+
+UNINITIALIZED READ
+name=bug_176616_a
+*!WebTestRunner::WebTestProxyBase::didCreateDataSource
+*!WebTestRunner::WebTestProxy<>::didCreateDataSource
+*!blink::FrameLoaderClientImpl::createDocumentLoader
+*!blink::FrameLoader::init
+*!blink::Frame::init
+*!blink::WebFrameImpl::initializeAsMainFrame
+*!blink::WebViewImpl::initializeMainFrame
+*!TestShell::createNewWindow
+*!TestShell::createMainWindow
+*!TestShell::initialize
+
+UNINITIALIZED READ
+name=bug_176616_b
+*!WebTestRunner::TestRunner::reset
+*!WebTestRunner::TestInterfaces::resetAll
+*!WebTestRunner::WebTestInterfaces::resetAll
+*!TestShell::resetTestController
+*!runTest
+
+UNINITIALIZED READ
+name=bug_222883
+*!v8::internal::ScavengeVisitor::ScavengePointer
+*!v8::internal::ScavengeVisitor::VisitPointers
+*!v8::internal::StandardFrame::IterateExpressions
+...
+*!v8::internal::Heap::Scavenge
+*!v8::internal::Heap::PerformGarbageCollection*
+
+UNINITIALIZED READ
+name=bug_238170a
+*!blink::ElementRuleCollector::collectMatchingRules
+*!blink::ElementRuleCollector::hasAnyMatchingRules
+
+UNINITIALIZED READ
+name=bug_238170b
+*!blink::ElementRuleCollector::collectMatchingRules
+*!blink::StyleResolver::matchAuthorRules
+
+UNINITIALIZED READ
+name=bug_238170c
+*!blink::ReplaceSelectionCommand::doApply
+*!blink::CompositeEditCommand::apply
+*!blink::applyCommand
+
+UNINITIALIZED READ
+name=bug_259789
+*!blink::::adjustAttributes
+*!blink::WebGLRenderingContext::maybeRestoreContext
+*!blink::Timer<>::fired
+*!blink::ThreadTimers::sharedTimerFiredInternal
+*!blink::ThreadTimers::sharedTimerFired
+*!content::BlinkPlatformImpl::DoTimeout
+
+UNINITIALIZED READ
+name=bug_290405
+*!GrGradientEffect::onIsEqual
+*!GrEffect::isEqual
+*!GrEffectStage::DeferredStage::isEqual
+*!GrDrawState::DeferredState::isEqual
+*!GrInOrderDrawBuffer::needsNewState
+*!GrInOrderDrawBuffer::onDraw
+*!GrDrawTarget::drawIndexedInstances
+*!GrTextContext::flushGlyphs
+
+UNINITIALIZED READ
+name=bug_290435
+*!blink::AudioContext::scheduleNodeDeletion
+*!blink::AudioContext::handlePostRenderTasks
+*!blink::AudioDestinationNode::render
+*!blink::OfflineAudioDestinationNode::offlineRender
+
+UNINITIALIZED READ
+name=bug_364724
+*!base::MD5DigestToBase16
+*!content::WebKitTestRunner::CaptureDumpPixels
+
+UNINITIALIZED READ
+name=bug_298143
+...
+*!blink::TypeConversionsV8Internal*AttributeGetter*
+
+UNINITIALIZED READ
+name=bug_299804
+*!GrConfigConversionEffect::TestForPreservingPMConversions
+*!::test_pm_conversions
+*!GrContext::createPMToUPMEffect
+*!GrContext::readRenderTargetPixels
+*!SkGpuDevice::onReadPixels
+*!SkBaseDevice::readPixels
+*!SkCanvas::readPixels
+*!DeferredDevice::onReadPixels
+*!SkBaseDevice::readPixels
+*!SkCanvas::readPixels
+*!blink::GraphicsContext::readPixels
+*!WTF::PassRefPtr<>
+*!blink::ImageBuffer::getUnmultipliedImageData
+*!blink::CanvasRenderingContext2D::getImageData
+*!blink::CanvasRenderingContext2D::getImageData
+*!blink::CanvasRenderingContext2DV8Internal::getImageDataMethod
+*!blink::CanvasRenderingContext2DV8Internal::getImageDataMethodCallback
+*!v8::internal::FunctionCallbackArguments::Call
+*!v8::internal::HandleApiCallHelper<>
+
+UNINITIALIZED READ
+name=bug_309477
+*!WebTestRunner::EventSender::reset
+*!WebTestRunner::TestInterfaces::resetTestHelperControllers
+*!WebTestRunner::TestInterfaces::resetAll
+...
+*!content::ShellRenderProcessObserver::WebKitInitialized
+*!content::RenderThreadImpl::EnsureWebKitInitialized
+*!content::RenderThreadImpl::OnCreateNewView
+
+###############################################################
+# Real issues in Chromium
+
+LEAK
+name=http://crbug.com/32085
+...
+chrome.dll!NotificationRegistrar::Add
+
+UNINITIALIZED READ
+name=http://crbug.com/57266 (1)
+...
+*!remoting::EncoderVp8::Encode
+
+UNINITIALIZED READ
+name=http://crbug.com/57266 (2)
+...
+*!vp8_*
+
+LEAK
+name=http://crbug.com/70062
+*!PR_Calloc
+*!PR_NewLock
+...
+*!InitSessionCacheLocks
+*!initSessionCacheLocksLazily
+*!PR_CallOnce
+*!ssl_InitSessionCacheLocks
+*!lock_cache
+*!ssl_LookupSID
+*!ssl2_BeginClientHandshake
+*!ssl_Do1stHandshake
+*!SSL_ForceHandshake
+*!net::SSL*SocketNSS::DoHandshake
+*!net::SSL*SocketNSS::DoHandshakeLoop
+
+LEAK
+name=http://crbug.com/74417 a
+*!replace_operator_new
+*!disk_cache::BackendImpl::CreateEntryImpl
+
+LEAK
+name=http://crbug.com/74417 b
+*!replace_operator_new
+*!disk_cache::BackendImpl::NewEntry
+
+# One more disk_cache::BackendImpl leak. See also http://crbug.com/87500.
+LEAK
+name=http://crbug.com/74417 c
+*!replace_operator_new
+...
+*!disk_cache::EntryImpl::UserBuffer::UserBuffer
+
+LEAK
+name=http://crbug.com/75247
+...
+*!replace_operator_new
+*!AutofillDownloadTestHelper::AutofillDownloadTestHelper
+
+LEAK
+name=http://crbug.com/78784
+*!generic_cpp_alloc
+*!operator new
+*!TestingProfile::CreateRequestContext
+*!*ProfileSyncService*::SetUp
+
+LEAK
+name=http://crbug.com/80550 (1)
+...
+*!RenderWidgetHost::WasHidden
+
+LEAK
+name=http://crbug.com/80550 (2)
+...
+*!RenderWidgetHost::WasRestored
+
+LEAK
+name=http://crbug.com/87612
+...
+*!SSL_ConfigSecureServer
+*!net::SSLServerSocketNSS::InitializeSSLOptions
+*!net::SSLServerSocketNSS::Handshake
+
+LEAK
+name=http://crbug.com/88640
+*!generic_cpp_alloc
+*!operator new
+*!ProfileImpl::InitRegisteredProtocolHandlers
+*!ProfileImpl::DoFinalInit
+*!ProfileImpl::OnPrefsLoaded
+
+LEAK
+name=http://crbug.com/91465
+*!generic_cpp_alloc
+*!operator new
+*!browser_sync::internal::WeakHandleCore<>::*
+*!browser_sync::WeakHandle<>::*
+*!syncer::SyncManager::SyncInternal::Init
+
+LEAK
+name=http://crbug.com/91491
+...
+*!CrxUpdateService::ProcessPendingItems
+
+UNINITIALIZED READ
+name=http://crbug.com/92026 (1)
+softokn3.dll!FC_GetFunctionList
+...
+softokn3.dll!NSC_ModuleDBFunc
+
+UNINITIALIZED READ
+name=http://crbug.com/92026 (2)
+freebl3.dll!FREEBL_GetVector
+...
+softokn3.dll!NSC_ModuleDBFunc
+
+# Possible real Chromium issue in DoCrossfade.
+UNINITIALIZED READ
+name=http://crbug.com/110049
+*!media::DoCrossfade<>
+*!media::Crossfade
+*!media::AudioRendererAlgorithmBase::FillBuffer
+
+# Known sqlite3 leaks.
+LEAK
+name=http://crbug.com/113847 (1)
+...
+*!sqlite3MemMalloc
+*!mallocWithAlarm
+*!sqlite3Malloc
+...
+*!yy_reduce
+
+LEAK
+name=http://crbug.com/113847 (2)
+...
+*!openDatabase
+*!sqlite3_open
+
+LEAK
+name=http://crbug.com/115328
+...
+*!GenericInfoViewTest_GenericInfoView_Test::TestBody
+
+LEAK
+name=http://crbug.com/117427 a
+...
+*!net::HostCache::Set
+*!net::HostResolverImpl::CacheResult
+*!net::HostResolverImpl::Job::CompleteRequests
+*!net::HostResolverImpl::Job::OnProcTaskComplete
+*!base::internal::RunnableAdapter<>::Run
+
+# Probably related to 117427. Someone is not tearing down DNS resolution during
+# testing.
+LEAK
+name=http://crbug.com/117427 b
+*!generic_cpp_alloc
+*!operator new
+*!base::internal::WeakReferenceOwner::GetRef
+*!base::SupportsWeakPtr<>::AsWeakPtr
+*!net::HostResolverImpl::Job::Job
+*!net::HostResolverImpl::Resolve
+*!net::SingleRequestHostResolver::Resolve
+*!net::TransportConnectJob::DoResolveHost
+*!net::TransportConnectJob::DoLoop
+*!net::TransportConnectJob::ConnectInternal
+*!net::ConnectJob::Connect
+*!net::internal::ClientSocketPoolBaseHelper::RequestSocketInternal
+*!net::internal::ClientSocketPoolBaseHelper::RequestSocket
+*!net::ClientSocketPoolBase<>::RequestSocket
+*!net::TransportClientSocketPool::RequestSocket
+*!net::ClientSocketHandle::Init<>
+*!net::`anonymous namespace'::InitSocketPoolHelper
+*!net::InitSocketHandleForRawConnect
+*!notifier::ProxyResolvingClientSocket::ProcessProxyResolveDone
+*!base::internal::RunnableAdapter<>::Run
+
+# IE frame possible leak of COM object.
+LEAK
+name=http://crbug.com/122399
+ole32.dll!...
+ole32.dll!CoTaskMemAlloc
+urlmon.dll!...
+urlmon.dll!CreateUri
+IEFRAME.dll!*
+
+# RenderWidgetHelper leak in DelayProfileDestruction test.
+LEAK
+name=http://crbug.com/125565
+*!generic_cpp_alloc
+*!operator new
+*!RenderProcessHostImpl::RenderProcessHostImpl
+*!SiteInstanceImpl::GetProcess
+*!BrowserTestOffTheRecord_DelayProfileDestruction_Test::TestBody
+
+LEAK
+name=http://crbug.com/125807
+*!generic_cpp_alloc
+*!operator new
+*!TransportSecurityPersister::TransportSecurityPersister
+*!TransportSecurityPersisterTest::TransportSecurityPersisterTest
+
+UNINITIALIZED READ
+name=bug_113076
+*!media::ConvertYUVToRGB32_C
+*!media::LinearScaleYUVToRGB32RowWithRange_C
+*!media::ScaleYUVToRGB32WithRect
+
+UNINITIALIZED READ
+name=bug_343248
+osmesa.dll!LINTERP
+osmesa.dll!INTERP_4F
+osmesa.dll!_tnl_generic_interp
+
+UNINITIALIZED READ
+name=bug_340752
+...
+*!*::ThreadState::visitStack
+
+UNINITIALIZED READ
+name=bug_343663
+blink_web.dll!blink::RenderBlock::computeInlinePreferredLogicalWidths
+
+UNINITIALIZED READ
+name=bug_343797
+...
+blink_web.dll!blink::MediaQueryExp::create
+blink_web.dll!blink::BisonCSSParser::createFloatingMediaQueryExp
+blink_web.dll!cssyyparse
+
+UNINITIALIZED READ
+name=bug_343915
+blink_web.dll!blink::BisonCSSParser::parseFlex
+blink_web.dll!blink::BisonCSSParser::parseValue
+blink_web.dll!cssyyparse
+
+UNADDRESSABLE ACCESS
+name=BUG_343958
+blink_web.dll!blink::Node::getFlag
+blink_web.dll!blink::Node::inDocument
+blink_web.dll!blink::Node::isDescendantOf
+blink_web.dll!blink::CompositeEditCommand::cloneParagraphUnderNewElement
+blink_web.dll!blink::CompositeEditCommand::moveParagraphWithClones
+
+UNINITIALIZED READ
+name=BUG_344076
+blink_web.dll!blink::Editor::canSmartReplaceWithPasteboard
+blink_web.dll!blink::Editor::pasteAsPlainTextWithPasteboard
+blink_web.dll!blink::Editor::pasteAsPlainText
+blink_web.dll!blink::executePasteAndMatchStyle
+blink_web.dll!blink::Editor::Command::execute
+blink_web.dll!blink::Document::execCommand
+
+UNINITIALIZED READ
+name=BUG_349128
+content.dll!std::*
+...
+content.dll!content::BrowserAccessibilityManagerWin::*
+...
+*!*::UpdateNode
+
+# There are so many osmesa errors we have to suppress (mostly the unpack_RGB*
+# variety) that it's a performance hit. We avoid that by requesting
+# whole-module suppression
+# (see https://code.google.com/p/drmemory/issues/detail?id=1529).
+UNINITIALIZED READ
+name=bug_347967_all_osmesa
+osmesa.dll!*
+
+UNINITIALIZED READ
+name=bug_347967
+osmesa.dll!unpack_RGB*888
+osmesa.dll!_mesa_unpack_rgba_row
+osmesa.dll!slow_read_rgba_pixels
+osmesa.dll!read_rgba_pixels
+osmesa.dll!_mesa_readpixels
+...
+gpu.dll!gpu::gles2::GLES2DecoderImpl::DoCommand
+gpu.dll!gpu::CommandParser::ProcessCommand
+gpu.dll!gpu::GpuScheduler::PutChanged
+
+UNINITIALIZED READ
+name=bug_347967,bug_348357
+osmesa.dll!clip_span
+osmesa.dll!_swrast_write_rgba_span
+osmesa.dll!general_triangle
+...
+gpu.dll!gpu::gles2::GLES2DecoderImpl::DoDrawElements
+gpu.dll!gpu::gles2::GLES2DecoderImpl::HandleDrawElements
+gpu.dll!gpu::gles2::GLES2DecoderImpl::DoCommand
+gpu.dll!gpu::CommandParser::ProcessCommand
+
+UNINITIALIZED READ
+name=bug_361594
+...
+skia.dll!SkA8_Shader_Blitter::blitH
+skia.dll!SkBlitter::blitRect
+skia.dll!blitrect
+skia.dll!SkScan::FillIRect
+...
+skia.dll!SkDraw::drawRect
+skia.dll!SkDraw::drawBitmap
+skia.dll!SkBitmapDevice::drawBitmap
+skia.dll!SkCanvas::internalDrawBitmap
+skia.dll!SkCanvas::drawBitmap
+content.dll!content::ScreenshotData::EncodeOnWorker
+
+UNINITIALIZED READ
+name=bug_363487
+blink_web.dll!blink::RenderLayerCompositor::updateIfNeeded
+blink_web.dll!blink::RenderLayerCompositor::updateIfNeededRecursive
+blink_web.dll!blink::FrameView::updateLayoutAndStyleForPainting
+blink_web.dll!blink::PageAnimator::updateLayoutAndStyleForPainting
+
+UNINITIALIZED READ
+name=bug_365101
+*!device::BluetoothAdapterWin::AdapterStateChanged
+
+UNINITIALIZED READ
+name=bug_364146
+...
+v8.dll!*
+net_with_v8.dll!net::ProxyResolverV8::Context::*
+
+UNINITIALIZED READ
+name=bug_334448
+*!CLD2::UTF8GenericReplaceInternal
+*!CLD2::UTF8GenericReplace
+*!CLD2::ScriptScanner::LowerScriptSpan
+*!CLD2::ScriptScanner::GetOneScriptSpanLower
+*!CLD2::DetectLanguageSummaryV2
+*!CLD2::DetectLanguageSummary
+
+UNINITIALIZED READ
+name=bug_42043
+...
+QuickTime.qts!*
+
+UNINITIALIZED READ
+name=bug_369141
+...
+*!blink::RenderLayerClipper::updateClipRects
+*!blink::RenderLayerClipper::parentClipRects
+*!blink::RenderLayerClipper::backgroundClipRect
+
+HANDLE LEAK
+name=bug_370178
+system call NtCreateEvent
+KERNELBASE.dll!CreateEventExW
+KERNELBASE.dll!CreateEventW
+base.dll!base::WaitableEvent::WaitableEvent
+gpu.dll!gpu::InProcessCommandBuffer::Initialize
+gl_in_process_context.dll!gpu::`anonymous namespace'::GLInProcessContextImpl::Initialize
+gl_in_process_context.dll!gpu::GLInProcessContext::CreateContext
+
+UNINITIALIZED READ
+name=bug_371844
+*!content::GamepadProvider::PadState::Match
+*!content::GamepadProvider::DoPoll
+
+UNINITIALIZED READ
+name=bug_371950
+media.dll!ConvertYUVToRGB32Row_SSE
+media.dll!media::ConvertYUVToRGB32_SSE
+media.dll!media::ConvertVideoFrameToBitmap
+media.dll!media::SkCanvasVideoRenderer::Paint
+cc.dll!cc::VideoResourceUpdater::CreateForSoftwarePlanes
+cc.dll!cc::VideoResourceUpdater::CreateExternalResourcesFromVideoFrame
+cc.dll!cc::VideoLayerImpl::WillDraw
+
+UNINITIALIZED READ
+name=bug_371959
+content.dll!webrtc::PeerConnection::DoInitialize
+content.dll!webrtc::PeerConnection::Initialize
+content.dll!webrtc::PeerConnectionFactory::CreatePeerConnection_s
+content.dll!webrtc::PeerConnectionFactory::OnMessage
+content.dll!jingle_glue::JingleThreadWrapper::Send
+content.dll!webrtc::PeerConnectionFactory::CreatePeerConnection
+content.dll!webrtc::PeerConnectionFactoryInterface::CreatePeerConnection
+
+# This suppression is deliberately general, as bugs reported in
+# ProcessOutgoingMessages are difficult to track down until we
+# get our annotations in place.
+# TODO(bruening): add annotations once we have the infrastructure.
+UNINITIALIZED READ
+name=bug_371991
+system call NtWriteFile parameter #5
+KERNELBASE.dll!WriteFile
+KERNEL32.dll!WriteFile*
+*!IPC::Channel*::ProcessOutgoingMessages
+
+UNINITIALIZED READ
+name=bug_372254
+*!content::*::NotifyPluginProcessHostHelper
+
+UNINITIALIZED READ
+name=drm_i#1546
+*!testing::internal::CmpHelperEQ<>
+...
+*!content::BrowserPluginTest_ResizeFlowControl_Test::TestBody
+
+UNINITIALIZED READ
+name=bug_374105a
+...
+gpu.dll!gpu::gles2::ShaderTranslatorCache::GetTranslator
+gpu.dll!gpu::gles2::GLES2DecoderImpl::InitializeShaderTranslator
+gpu.dll!gpu::gles2::GLES2DecoderImpl::Initialize
+
+UNINITIALIZED READ
+name=bug_374105b
+...
+gpu.dll!gpu::gles2::ShaderTranslator::GetStringForOptionsThatWouldAffectCompilation
+*!gpu::gles2::ShaderTranslatorTest_OptionsString_Test::TestBody
+
+UNINITIALIZED READ
+name=bug_374410
+*!ui::NativeThemeWin::PaintScrollbarTrack
+*!ui::NativeThemeWin::PaintDirect
+
+UNINITIALIZED READ
+name=bug_377728
+...
+*!Hunspell::suggest
+*!HunspellEngine::FillSuggestionList
+*!SpellcheckLanguage::SpellCheckWord
+*!SpellCheck::SpellCheckWord
+
+UNINITIALIZED READ
+name=bug_387373
+*!blink::WebEmbeddedWorkerImpl::startWorkerContext
+*!content::EmbeddedWorkerDispatcher::OnStartWorker
+*!EmbeddedWorkerMsg_StartWorker::Dispatch<>
+
+UNADDRESSABLE ACCESS
+name=bug_389132
+content.dll!crypto_kernel_do_load_cipher_type
+content.dll!crypto_kernel_load_cipher_type
+content.dll!crypto_kernel_init
+content.dll!cricket::SrtpSession::SetKey
+content.dll!cricket::SrtpSession::SetSend
+content.dll!cricket::SrtpFilter::SetRtpParams
+content.dll!cricket::BaseChannel::SetupDtlsSrtp
+content.dll!cricket::BaseChannel::ChannelWritable_w
+content.dll!cricket::BaseChannel::OnWritableState
+
+UNINITIALIZED READ
+name=bug_392585
+system call NtCreateFile parameter #9
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+UNADDRESSABLE ACCESS
+name=bug_398850
+...
+wtf.dll!WTF::PlatformCondition::timedWait
+
+UNINITIALIZED READ
+name=bug_399293
+blink_web.dll!blink::InputMethodController::extendSelectionAndDelete
+blink_web.dll!blink::WebLocalFrameImpl::extendSelectionAndDelete
+content.dll!content::RenderFrameImpl::OnExtendSelectionAndDelete
+*!content::RenderViewImplTest_OnExtendSelectionAndDelete_Test::TestBody
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+UNINITIALIZED READ
+name=bug_398547
+system call NtWriteFile parameter #5
+KERNELBASE.dll!WriteFile
+KERNEL32.dll!WriteFile
+mojo_system_impl.dll!mojo::system::`anonymous namespace'::RawChannelWin::WriteNoLock
+
+HANDLE LEAK
+name=bug_403544
+system call NtCreateSemaphore
+KERNELBASE.dll!CreateSemaphoreExW
+...
+v8.dll!v8::internal::SweeperThread::SweeperThread
+...
+blink_web.dll!blink::WorkerScriptController::WorkerScriptController
+blink_web.dll!blink::WorkerGlobalScope::WorkerGlobalScope
+blink_web.dll!blink::ServiceWorkerGlobalScope::ServiceWorkerGlobalScope
+
+
+HANDLE LEAK
+name=bug_403681
+system call NtCreateTimer
+KERNELBASE.dll!CreateWaitableTimerExW
+KERNEL32.dll!CreateWaitableTimerW
+content.dll!rtc::Timing::Timing
+content.dll!content::RenderProcessHostImpl::CreateMessageFilters
+...
+content.dll!content::RenderViewHostImpl::CreateRenderView
+content.dll!content::WebContentsImpl::CreateRenderViewForRenderManager
+...
+content.dll!content::NavigationControllerImpl::LoadEntry
+...
+*!extensions::ExtensionHost::LoadInitialURL
+
+UNINITIALIZED READ
+name=bug_414268
+pdf.dll!chrome_pdf::PDFiumEngine::OnMouseMove
+pdf.dll!chrome_pdf::PDFiumEngine::HandleEvent
+pdf.dll!chrome_pdf::OutOfProcessInstance::HandleInputEvent
+pdf.dll!pp::InputEvent_HandleEvent
+ppapi_proxy.dll!ppapi::CallWhileUnlocked<>
+ppapi_proxy.dll!ppapi::proxy::PPP_InputEvent_Proxy::OnMsgHandleFilteredInputEvent
+
diff --git a/tools/valgrind/drmemory_analyze.py b/tools/valgrind/drmemory_analyze.py
new file mode 100755
index 0000000..915c601
--- /dev/null
+++ b/tools/valgrind/drmemory_analyze.py
@@ -0,0 +1,197 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# drmemory_analyze.py
+
+''' Given a Dr. Memory output file, parses errors and uniques them.'''
+
+from collections import defaultdict
+import common
+import hashlib
+import logging
+import optparse
+import os
+import re
+import subprocess
+import sys
+import time
+
+class DrMemoryError:
+ def __init__(self, report, suppression, testcase):
+ self._report = report
+ self._testcase = testcase
+
+ # Chromium-specific transformations of the suppressions:
+ # Replace 'any_test.exe' and 'chrome.dll' with '*', then remove the
+ # Dr.Memory-generated error ids from the name= lines as they don't
+ # make sense in a multiprocess report.
+ supp_lines = suppression.split("\n")
+ for l in xrange(len(supp_lines)):
+ if supp_lines[l].startswith("name="):
+ supp_lines[l] = "name=<insert_a_suppression_name_here>"
+ if supp_lines[l].startswith("chrome.dll!"):
+ supp_lines[l] = supp_lines[l].replace("chrome.dll!", "*!")
+ bang_index = supp_lines[l].find("!")
+ d_exe_index = supp_lines[l].find(".exe!")
+ if bang_index >= 4 and d_exe_index + 4 == bang_index:
+ supp_lines[l] = "*" + supp_lines[l][bang_index:]
+ self._suppression = "\n".join(supp_lines)
+
+ def __str__(self):
+ output = self._report + "\n"
+ if self._testcase:
+ output += "The report came from the `%s` test.\n" % self._testcase
+ output += "Suppression (error hash=#%016X#):\n" % self.ErrorHash()
+ output += (" For more info on using suppressions see "
+ "http://dev.chromium.org/developers/how-tos/using-drmemory#TOC-Suppressing-error-reports-from-the-\n")
+ output += "{\n%s\n}\n" % self._suppression
+ return output
+
+ # This is a device-independent hash identifying the suppression.
+ # By printing out this hash we can find duplicate reports between tests and
+ # different shards running on multiple buildbots
+ def ErrorHash(self):
+ return int(hashlib.md5(self._suppression).hexdigest()[:16], 16)
+
+ def __hash__(self):
+ return hash(self._suppression)
+
+ def __eq__(self, rhs):
+ return self._suppression == rhs
+
+
+class DrMemoryAnalyzer:
+ ''' Given a set of Dr.Memory output files, parse all the errors out of
+ them, unique them and output the results.'''
+
+ def __init__(self):
+ self.known_errors = set()
+ self.error_count = 0;
+
+ def ReadLine(self):
+ self.line_ = self.cur_fd_.readline()
+
+ def ReadSection(self):
+ result = [self.line_]
+ self.ReadLine()
+ while len(self.line_.strip()) > 0:
+ result.append(self.line_)
+ self.ReadLine()
+ return result
+
+ def ParseReportFile(self, filename, testcase):
+ ret = []
+
+ # First, read the generated suppressions file so we can easily lookup a
+ # suppression for a given error.
+ supp_fd = open(filename.replace("results", "suppress"), 'r')
+ generated_suppressions = {} # Key -> Error #, Value -> Suppression text.
+ for line in supp_fd:
+ # NOTE: this regexp looks fragile. Might break if the generated
+ # suppression format slightly changes.
+ m = re.search("# Suppression for Error #([0-9]+)", line.strip())
+ if not m:
+ continue
+ error_id = int(m.groups()[0])
+ assert error_id not in generated_suppressions
+ # OK, now read the next suppression:
+ cur_supp = ""
+ for supp_line in supp_fd:
+ if supp_line.startswith("#") or supp_line.strip() == "":
+ break
+ cur_supp += supp_line
+ generated_suppressions[error_id] = cur_supp.strip()
+ supp_fd.close()
+
+ self.cur_fd_ = open(filename, 'r')
+ while True:
+ self.ReadLine()
+ if (self.line_ == ''): break
+
+ match = re.search("^Error #([0-9]+): (.*)", self.line_)
+ if match:
+ error_id = int(match.groups()[0])
+ self.line_ = match.groups()[1].strip() + "\n"
+ report = "".join(self.ReadSection()).strip()
+ suppression = generated_suppressions[error_id]
+ ret.append(DrMemoryError(report, suppression, testcase))
+
+ if re.search("SUPPRESSIONS USED:", self.line_):
+ self.ReadLine()
+ while self.line_.strip() != "":
+ line = self.line_.strip()
+ (count, name) = re.match(" *([0-9\?]+)x(?: \(.*?\))?: (.*)",
+ line).groups()
+ if (count == "?"):
+ # Whole-module have no count available: assume 1
+ count = 1
+ else:
+ count = int(count)
+ self.used_suppressions[name] += count
+ self.ReadLine()
+
+ if self.line_.startswith("ASSERT FAILURE"):
+ ret.append(self.line_.strip())
+
+ self.cur_fd_.close()
+ return ret
+
+ def Report(self, filenames, testcase, check_sanity):
+ sys.stdout.flush()
+ # TODO(timurrrr): support positive tests / check_sanity==True
+ self.used_suppressions = defaultdict(int)
+
+ to_report = []
+ reports_for_this_test = set()
+ for f in filenames:
+ cur_reports = self.ParseReportFile(f, testcase)
+
+ # Filter out the reports that were there in previous tests.
+ for r in cur_reports:
+ if r in reports_for_this_test:
+ # A similar report is about to be printed for this test.
+ pass
+ elif r in self.known_errors:
+ # A similar report has already been printed in one of the prev tests.
+ to_report.append("This error was already printed in some "
+ "other test, see 'hash=#%016X#'" % r.ErrorHash())
+ reports_for_this_test.add(r)
+ else:
+ self.known_errors.add(r)
+ reports_for_this_test.add(r)
+ to_report.append(r)
+
+ common.PrintUsedSuppressionsList(self.used_suppressions)
+
+ if not to_report:
+ logging.info("PASS: No error reports found")
+ return 0
+
+ sys.stdout.flush()
+ sys.stderr.flush()
+ logging.info("Found %i error reports" % len(to_report))
+ for report in to_report:
+ self.error_count += 1
+ logging.info("Report #%d\n%s" % (self.error_count, report))
+ logging.info("Total: %i error reports" % len(to_report))
+ sys.stdout.flush()
+ return -1
+
+
+def main():
+ '''For testing only. The DrMemoryAnalyze class should be imported instead.'''
+ parser = optparse.OptionParser("usage: %prog <files to analyze>")
+
+ (options, args) = parser.parse_args()
+ if len(args) == 0:
+ parser.error("no filename specified")
+ filenames = args
+
+ logging.getLogger().setLevel(logging.INFO)
+ return DrMemoryAnalyzer().Report(filenames, None, False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/valgrind/fixed_suppressions.sh b/tools/valgrind/fixed_suppressions.sh
new file mode 100755
index 0000000..d2aae91
--- /dev/null
+++ b/tools/valgrind/fixed_suppressions.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+bugs=$(egrep -o 'bug_[0-9]+' tools/valgrind/memcheck/suppressions.txt |\
+ sed -e 's/bug_//' | sort -n | uniq);
+fixed_status='(Fixed|Verified|Duplicate|FixUnreleased|WontFix|Invalid|IceBox)'
+fixed_status="${fixed_status}</span>"
+for bug in $bugs; do
+ echo "Checking bug #$bug";
+ curl -s "http://code.google.com/p/chromium/issues/detail?id=$bug" |\
+ egrep -q $fixed_status;
+ if [ $? -eq 0 ]; then echo "Bug #$bug seems to be closed (http://crbug.com/$bug)"; fi
+done
diff --git a/tools/valgrind/gdb_helper.py b/tools/valgrind/gdb_helper.py
new file mode 100644
index 0000000..548ee94
--- /dev/null
+++ b/tools/valgrind/gdb_helper.py
@@ -0,0 +1,87 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+''' A bunch of helper functions for querying gdb.'''
+
+import logging
+import os
+import re
+import tempfile
+
+GDB_LINE_RE = re.compile(r'Line ([0-9]*) of "([^"]*)".*')
+
+def _GdbOutputToFileLine(output_line):
+ ''' Parse the gdb output line, return a pair (file, line num) '''
+ match = GDB_LINE_RE.match(output_line)
+ if match:
+ return match.groups()[1], match.groups()[0]
+ else:
+ return None
+
+def ResolveAddressesWithinABinary(binary_name, load_address, address_list):
+ ''' For each address, return a pair (file, line num) '''
+ commands = tempfile.NamedTemporaryFile()
+ commands.write('add-symbol-file "%s" %s\n' % (binary_name, load_address))
+ for addr in address_list:
+ commands.write('info line *%s\n' % addr)
+ commands.write('quit\n')
+ commands.flush()
+ gdb_commandline = 'gdb -batch -x %s 2>/dev/null' % commands.name
+ gdb_pipe = os.popen(gdb_commandline)
+ result = gdb_pipe.readlines()
+
+ address_count = 0
+ ret = {}
+ for line in result:
+ if line.startswith('Line'):
+ ret[address_list[address_count]] = _GdbOutputToFileLine(line)
+ address_count += 1
+ if line.startswith('No line'):
+ ret[address_list[address_count]] = (None, None)
+ address_count += 1
+ gdb_pipe.close()
+ commands.close()
+ return ret
+
+class AddressTable(object):
+ ''' Object to do batched line number lookup. '''
+ def __init__(self):
+ self._load_addresses = {}
+ self._binaries = {}
+ self._all_resolved = False
+
+ def AddBinaryAt(self, binary, load_address):
+ ''' Register a new shared library or executable. '''
+ self._load_addresses[binary] = load_address
+
+ def Add(self, binary, address):
+ ''' Register a lookup request. '''
+ if binary == '':
+ logging.warn('adding address %s in empty binary?' % address)
+ if binary in self._binaries:
+ self._binaries[binary].append(address)
+ else:
+ self._binaries[binary] = [address]
+ self._all_resolved = False
+
+ def ResolveAll(self):
+ ''' Carry out all lookup requests. '''
+ self._translation = {}
+ for binary in self._binaries.keys():
+ if binary != '' and binary in self._load_addresses:
+ load_address = self._load_addresses[binary]
+ addr = ResolveAddressesWithinABinary(
+ binary, load_address, self._binaries[binary])
+ self._translation[binary] = addr
+ self._all_resolved = True
+
+ def GetFileLine(self, binary, addr):
+ ''' Get the (filename, linenum) result of a previously-registered lookup
+ request.
+ '''
+ if self._all_resolved:
+ if binary in self._translation:
+ if addr in self._translation[binary]:
+ return self._translation[binary][addr]
+ return (None, None)
diff --git a/tools/valgrind/gtest_exclude/OWNERS b/tools/valgrind/gtest_exclude/OWNERS
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/tools/valgrind/gtest_exclude/ash_unittests.gtest-memcheck.txt b/tools/valgrind/gtest_exclude/ash_unittests.gtest-memcheck.txt
new file mode 100644
index 0000000..c981fbb
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/ash_unittests.gtest-memcheck.txt
@@ -0,0 +1,6 @@
+# http://crbug.com/336351
+AutoclickTest.UserInputCancelsAutoclick
+# http://crbug.com/337149
+AutoclickTest.SynthesizedMouseMovesIgnored
+# http://crbug.com/383384
+SystemGestureEventFilterTest.TwoFingerDragDelayed
diff --git a/tools/valgrind/gtest_exclude/aura_unittests.gtest.txt b/tools/valgrind/gtest_exclude/aura_unittests.gtest.txt
new file mode 100644
index 0000000..69ce8dc
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/aura_unittests.gtest.txt
@@ -0,0 +1,2 @@
+# Flaky under Valgrind, see http://crbug.com/348331
+WindowEventDispatcherTest.TouchMovesHeld
diff --git a/tools/valgrind/gtest_exclude/base_unittests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/base_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..cc442bc
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/base_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,38 @@
+# TODO(timurrrr) investigate the failures and enable these tests one-by-one.
+RSA*
+GmockTest.*
+EtwTrace*
+StatsTableTest.*
+ProcessMemoryTest.EnableLFH
+ScopedNativeLibrary.Basic
+# TODO(zhaoqin) investigate the failures and enable it later, 106043
+ConditionVariableTest.LargeFastTaskTest
+# Next test creates a child that crashes, which naturally generates an
+# unaddressable report as well as a handful of leak reports that we don't need
+# to see.
+ProcessUtilTest.GetTerminationStatusCrash
+# See crbug.com/130668
+ProcessUtilTest.GetTerminationStatusKill
+ProcessUtilTest.KillSlowChild
+ProcessUtilTest.SpawnChild
+ScopedProcessInformationTest.Duplicate
+ScopedProcessInformationTest.Swap
+ScopedProcessInformationTest.TakeBoth
+ScopedProcessInformationTest.TakeProcess
+ScopedProcessInformationTest.TakeWholeStruct
+SharedMemoryProcessTest.Tasks
+
+# crbug/144018
+StartupInformationTest.InheritStdOut
+
+# http://crbug.com/308273
+# This only fails occasionally under full mode -- we perturb it enough that
+# it hits deadlocks that it also hits under tsan?
+TraceEventTestFixture.TraceContinuousSampling
+
+# http://crbug.com/93843
+# This fails only under full mode.
+# TODO(bruening): add a gtest_exclude distinction for light vs full mode
+# so we can avoid excluding this for light mode.
+MessageLoopTestTypeUI.RecursiveDenial3
+
diff --git a/tools/valgrind/gtest_exclude/base_unittests.gtest.txt b/tools/valgrind/gtest_exclude/base_unittests.gtest.txt
new file mode 100644
index 0000000..aa85c27
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/base_unittests.gtest.txt
@@ -0,0 +1,29 @@
+# This test currently times out in valgrind, see http://crbug.com/9194
+WatchdogTest.AlarmTest
+
+# These tests occassionally hangs under Valgrind on Mac. valgrind-darwin r9573
+# Revisit with better valgrind.
+# Valgrind bug: https://bugs.kde.org/show_bug.cgi?id=189661
+TimerTest.RepeatingTimer
+TimerTest.RepeatingTimer_Cancel
+
+# Crashes occasionally, see http://crbug.com/7477
+ObserverListThreadSafeTest.CrossThreadObserver
+ObserverListThreadSafeTest.CrossThreadNotifications
+
+# Hangs sometimes on linux, see http://crbug.com/22138
+ClipboardTest.*
+
+# These tests trigger a CHECK so they will leak memory. They don't test
+# anything else, so just disable them on valgrind. Bug 28179.
+OutOfMemoryDeathTest.*
+
+# Flaky under slow tools or just when the VM is under load.
+# See http://crbug.com/43972
+ConditionVariableTest.LargeFastTaskTest
+
+# Flaky under Valgrind, see http://crbug.com/55517
+PlatformFile.TouchGetInfoPlatformFile
+
+# Crashes under Valgrind, see http://crbug.com/355436
+OutOfMemoryHandledTest.Unchecked*
diff --git a/tools/valgrind/gtest_exclude/base_unittests.gtest_mac.txt b/tools/valgrind/gtest_exclude/base_unittests.gtest_mac.txt
new file mode 100644
index 0000000..937bf28
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/base_unittests.gtest_mac.txt
@@ -0,0 +1,20 @@
+# Fails on Valgrind/Mac, see http://crbug.com/43972
+ConditionVariableTest.LargeFastTaskTest
+
+# Fails on Valgrind/Mac due to missing syscall wrapper
+# for the symlink() syscall. See http://crbug.com/44001
+FileUtilTest.NormalizeFilePathSymlinks
+
+# Fails on Valgrind/Mac, see http://crbug.com/53196
+CancellationFlagTest.SetOnDifferentThreadDeathTest
+
+# Fails on Valgrind/Mac, see http://crbug.com/93722
+ProcessMemoryTest.MacTerminateOnHeapCorruption
+
+# Fails on Valgrind/Mac, see http://crbug.com/122080
+ProcessMemoryTest.MacMallocFailureDoesNotTerminate
+
+# Times out on Valgrind/Mac, see http://crbug.com/172044
+MessageLoopTestTypeUI.RecursivePosts
+MessageLoopTestTypeIO.RecursivePosts
+MessageLoopTestTypeDefault.RecursivePosts
diff --git a/tools/valgrind/gtest_exclude/base_unittests.gtest_win-8.txt b/tools/valgrind/gtest_exclude/base_unittests.gtest_win-8.txt
new file mode 100644
index 0000000..1d24cdf
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/base_unittests.gtest_win-8.txt
@@ -0,0 +1,2 @@
+# Fails natively as well: http://crbug.com/251517
+PEImageTest.EnumeratesPE
diff --git a/tools/valgrind/gtest_exclude/base_unittests.gtest_win32.txt b/tools/valgrind/gtest_exclude/base_unittests.gtest_win32.txt
new file mode 100644
index 0000000..840a303
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/base_unittests.gtest_win32.txt
@@ -0,0 +1,12 @@
+# Too slow under Valgrind/Wine and TSan/Windows
+TimeTicks.WinRollover
+
+# Very sensitive to slowdown
+TimeTicks.Deltas
+TimeTicks.HighResNow
+TimerTest.RepeatingTimer*
+
+# This Windows-native sampling profiler test does not work under our tools
+# because it assumes the original code runs, not the modified version
+# with instrumentation. See http://crbug.com/106829
+SamplingProfilerTest.Sample
diff --git a/tools/valgrind/gtest_exclude/blink_heap_unittests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/blink_heap_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..a9376e5
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/blink_heap_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,2 @@
+# crbug.com/396172: flaky under Dr. Memory
+HeapTest.ThreadedWeakness
diff --git a/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory.txt b/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory.txt
new file mode 100644
index 0000000..b7fd0b1
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory.txt
@@ -0,0 +1,18 @@
+# http://crbug.com/400503
+InterstitialUITest.OpenInterstitial
+
+# http://crbug.com/400509
+CustomLauncherPageBrowserTest.LoadPageAndOpenLauncher
+
+# http://crbug.com/403628
+RangeHistoryWebUITest.monthViewGrouped
+
+# http://crbug.com/403687
+ContentScriptCssInjectionTest.ContentScriptInjectsStyles
+MediaFileValidatorTest.UnsupportedExtension
+MessageCenterNotificationsTest.RetrieveBaseParts
+RequirementsCheckerBrowserTest.Check3DExtension
+SSLUITest.TestBadFrameNavigation
+SSLUITest.TestInterstitialJavaScriptProceeds
+SSLUITest.TestRefNavigation
+SSLUITest.TestWSSInvalidCertAndGoForward
diff --git a/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..3f7f30e
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory_win32.txt
@@ -0,0 +1,201 @@
+# TODO(zhaoqin): File bugs for those failing browser tests.
+
+# Dr.Memory i#1052: http://code.google.com/p/drmemory/issues/detail?id=1052
+#
+# The list is too long for gtest_filter, so we exclude the whole
+# test case if any of its tests failed.
+*FLAKY*
+
+# it takes too long to run all browser_tests with Dr.Memory,
+# and we only select subset to run
+# A*
+Act*
+Ad*
+All*
+AppA*
+AppB*
+AppL*
+AppW*
+Auto*
+# B*
+BaseD*
+Bac*App*.R*
+Blue*
+Bit*
+Bo*sT*
+Br*erA*
+Br*erC*
+Br*erN*
+Br*erT*
+# C*
+Cal*
+Cap*
+Cas*
+Clou*cy
+Clear*
+Click*
+Com*
+ChromeA*
+ChromeC*
+ChromeR*
+Col*
+Con*UITest.*
+Con*.Re*
+Con*Se*
+Cont*ns
+Context*
+Coo*
+# D*
+Dia*E*
+Dec*
+Dev*
+Dns*
+DoNot*
+Down*Ext*
+Down*UI*
+Do*adTest.*
+Do*tTest.*All
+Drive*
+# E*
+ECK*
+Ed*
+Enc*
+Ep*
+Er*
+Exe*
+Ext*.D*
+Ext*.P*
+Ext*.N*
+Ext*.RS*
+Ext*Api*
+Ext*Man*
+Ext*Se*
+Ext*re*
+Ext*Re*
+Ext*nB*
+Ext*Crx*
+# F*
+FileSys*
+Find*
+Fl*
+Font*gs
+Ftp*
+# G*
+GcdPri*.*Remove
+GcdPri*.*Query
+Gcm*.R*
+GetA*
+Ge*Br*
+# H*
+Hot*
+His*
+HostR*
+Ht*
+# I*
+IE*
+Ide*
+Inv*
+Insp*
+Inl*Safe*
+# J*
+# K*
+# L*
+Lan*
+Lau*
+Laz*
+LoadT*
+Loc*
+Log*hs
+# M*
+MDn*
+Med*B*
+Med*Se*
+Med*Ge*
+Mock*
+MSC_C*
+MSE*
+Man*t.*
+# N*
+NaCl*lib*
+NaCl*Pn*
+NaCl*Vc*
+NetIn*
+Noti*
+NoSe*
+# O*
+Opt*WebUI*
+Out*PPAPITe*
+# P*
+Pag*
+Pas*erBro*
+Pe*
+Plat*Bro*
+Policy*H*
+Port*
+Prefe*
+PrefsF*
+Print*
+Prox*
+Prof*ta
+Prof*t
+Prer*
+PPA*ib*
+PPA*lT*.*
+PPAPIT*.F*
+PPAPIT*.W*
+PPAPIT*.U*3
+# Q*
+QU*
+# R*
+# RangeHistoryWebUITest.*
+Ran*
+Red*
+Ref*
+Reg*
+Rem*De*
+Rem*Ad*
+Res*
+# RequestContentScriptAPITest.PermissionMatcherAgreementInjection
+Req*.Per*ion
+# S*
+Sa*
+Sea*Pr*
+Ses*
+Set*
+Scr*
+Soc*
+Spe*
+SRC*.Pl*
+SRC*.Fr*
+SSL*ed
+Sup*Mo*
+Str*
+SyncF*
+SyncInt*
+Sys*Di*
+Sys*In*
+Sys*Ne*
+# T*
+Tab*
+Tas*
+Tran*Bu*
+Tran*Up*
+Tran*ag
+# U*
+Us*
+Un*.Br*
+# V*
+# W*
+WebViewT*
+WebRtcB*
+WebUIA*
+WebUIBr*
+WebC*
+WebN*
+WebstoreIn*
+WebstoreSt*
+WebSo*
+Win*.Cl*
+# X*
+# Y*
+# Z*
diff --git a/tools/valgrind/gtest_exclude/browser_tests.gtest-memcheck.txt b/tools/valgrind/gtest_exclude/browser_tests.gtest-memcheck.txt
new file mode 100644
index 0000000..5abd383
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/browser_tests.gtest-memcheck.txt
@@ -0,0 +1,60 @@
+# Don't run FLAKY or FAILS ui tests under Valgrind.
+# They tend to generate way too many flaky Valgrind reports.
+*FLAKY_*
+*FAILS_*
+
+# NaCl tests fail with Data Execution Prevention error http://crbug.com/104517
+NaClGdbTest.Empty
+PPAPINaClGLibcTest.*
+PPAPINaClNewlibTest.*
+PPAPINaClTest*
+
+# http://crbug.com/109336
+OutOfProcessPPAPITest.View_PageHideShow
+
+# TODO(thestig) File bugs for these failing browser tests.
+AllUrlsApiTest.WhitelistedExtension
+AppBackgroundPageApiTest.NoJsManifestBackgroundPage
+BrowserCloseTest.DownloadsCloseCheck_2
+BrowserCloseTest.DownloadsCloseCheck_5
+BrowserEncodingTest.SLOW_TestEncodingAliasMapping
+BrowserNavigatorTest.Disposition_Bookmarks_DoNothingIfIncognitoIsForced
+BrowserNavigatorTest.Disposition_Incognito
+BrowserNavigatorTest.Disposition_SyncPromo_DoNothingIfIncognitoIsForced
+BrowserTest.ForwardDisabledOnForward
+ClickToPlayPluginTest.Basic
+ClickToPlayPluginTest.LoadAllBlockedPlugins
+ClickToPlayPluginTest.NoCallbackAtLoad
+DevToolsExperimentalExtensionTest.TestDevToolsExperimentalExtensionAPI
+DevToolsExtensionTest.TestDevToolsExtensionMessaging
+DownloadExtensionTest.DownloadExtensionTest_FileIcon_Active
+DownloadExtensionTest.DownloadExtensionTest_FileIcon_History
+DownloadExtensionTest.DownloadExtensionTest_SearchPauseResumeCancelGetFileIconIncognito
+DownloadExtensionTestIncognito.DownloadExtensionTest_SearchPauseResumeCancelGetFileIconIncognito
+ErrorPageTest.DNSError_Basic
+ErrorPageTest.DNSError_GoBack1
+ExecuteScriptApiTest.ExecuteScriptPermissions
+ExtensionApiTest.FontSettingsIncognito
+ExtensionApiTest.PopupBlockingExtension
+ExtensionApiTest.PopupBlockingHostedApp
+FastShutdown.SlowTermination
+IndexedDBLayoutTest.IndexTests
+NetInternalsTest.netInternalsPrerenderViewFail
+NewTabUIBrowserTest.LoadNTPInExistingProcess
+OutOfProcessPPAPITest.NetAddressPrivate_GetAnyAddress
+OutOfProcessPPAPITest.NetAddressPrivate_ReplacePort
+PageCyclerCachedBrowserTest.PlaybackMode
+PageCyclerCachedBrowserTest.URLNotInCache
+PPAPITest.ImeInputEvent
+PrerenderBrowserTest.*
+PrerenderBrowserTestWithNaCl.PrerenderNaClPluginEnabled
+PrintPreviewWebUITest.TestPrinterList
+PrintPreviewWebUITest.TestPrinterListCloudEmpty
+PrintPreviewWebUITest.TestSectionsDisabled
+PrintWebViewHelperTest.BlockScriptInitiatedPrinting
+SafeBrowsingInterstitialVersions/SafeBrowsingBlockingPageBrowserTest.MalwareDontProceed*
+SafeBrowsingInterstitialVersions/SafeBrowsingBlockingPageBrowserTest.ProceedDisabled*
+SocketApiTest.SocketTCPExtension
+SocketApiTest.SocketUDPExtension
+SSLUITest.TestWSSInvalidCertAndGoForward
+WebViewTest.Shim
diff --git a/tools/valgrind/gtest_exclude/cc_unittests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/cc_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..777e3b5
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/cc_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,12 @@
+# DR-i#1476
+LayerTreeHostAnimationTestNoBackgroundTickingWithoutActiveTree.RunSingleThread_DirectRenderer
+LayerTreeHostBlendingPixelTest.*
+LayerTreeHostFiltersPixelTest.*
+SchedulerTest.*
+LayerTreeHostTestContinuousPainting.*
+LayerTreeHostAnimationTestContinuousAnimate.RunMultiThread_DirectRenderer_ImplSidePaint
+LayerTreeHostAnimationTestContinuousAnimate.RunMultiThread_DirectRenderer_MainThreadPaint
+
+# http://crbug.com/416643
+LayerTreeHostCopyRequestTestMultipleRequests.GLRenderer_RunSingleThread
+LayerTreeHostCopyRequestTestMultipleRequests.SoftwareRenderer_RunSingleThread
diff --git a/tools/valgrind/gtest_exclude/components_unittests.gtest.txt b/tools/valgrind/gtest_exclude/components_unittests.gtest.txt
new file mode 100644
index 0000000..8863624
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/components_unittests.gtest.txt
@@ -0,0 +1,2 @@
+# Flaky, see http://crbug.com/420023
+WebDataServiceAutofillTest.ProfileUpdate
diff --git a/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory.txt b/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory.txt
new file mode 100644
index 0000000..9feec33
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory.txt
@@ -0,0 +1,11 @@
+# crbug.com/399289
+WebRtcBrowserTest.NoCrashWhenConnectChromiumSinkToRemoteTrack
+
+# crbug.com/400490
+PluginTest.PluginSingleRangeRequest
+PluginTest.PluginThreadAsyncCall
+PluginTest.PrivateDisabled
+PluginTest.ScheduleTimer
+
+# crbug.com/419396
+WebRtcBrowserTest.CallWithDataAndMedia
diff --git a/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..cd8ecc4
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt
@@ -0,0 +1,44 @@
+# crbug.com/389132
+WebRtcAecDumpBrowserTest.*WithAecDump*
+
+# http://code.google.com/p/drmemory/issues/detail?id=1528
+# Un-analyzed test failures:
+DeviceInertialSensorBrowserTest.MotionNullTestWithAlert
+DeviceInertialSensorBrowserTest.OrientationNullTestWithAlert
+File/MediaTest.VideoTulipWebm/0
+Http/MediaTest.VideoBearTheora/0
+Http/MediaTest.VideoTulipWebm/0
+IndexedDBBrowserTest.DeleteCompactsBackingStore
+MSE_ClearKey/EncryptedMediaTest.FrameSizeChangeVideo/0
+OutOfProcessPPAPITest.MediaStreamAudioTrack
+OutOfProcessPPAPITest.NetworkProxy
+OutOfProcessPPAPITest.VideoDecoder
+RenderViewImplTest.DontIgnoreBackAfterNavEntryLimit
+RenderViewImplTest.ReloadWhileSwappedOut
+RenderViewImplTest.SendSwapOutACK
+RenderViewImplTest.StaleNavigationsIgnored
+ResourceFetcherTests.ResourceFetcher404
+ResourceFetcherTests.ResourceFetcherDidFail
+ResourceFetcherTests.ResourceFetcherDownload
+ResourceFetcherTests.ResourceFetcherPost
+ResourceFetcherTests.ResourceFetcherSetHeader
+SitePerProcessBrowserTest.CrashSubframe
+SRC_ClearKey/EncryptedMediaTest.FrameSizeChangeVideo/0
+SRC_ClearKey/EncryptedMediaTest.Playback_AudioClearVideo_WebM/0
+SRC_ClearKey/EncryptedMediaTest.Playback_VideoAudio_WebM/0
+WebRtcBrowserTest.CallAndVerifyVideoMutingWorks
+WebRtcBrowserTest.CallWithAecDump
+WebRtcBrowserTest.CallWithAecDumpEnabledThenDisabled
+WebRtcBrowserTest.CallWithDataOnly
+WebRtcBrowserTest.CallWithNewVideoMediaStream
+WebRtcBrowserTest.CallWithSctpDataAndMedia
+WebRtcBrowserTest.CallWithSctpDataOnly
+WebRtcBrowserTest.CanSetupAudioAndVideoCall
+WebRtcBrowserTest.CanSetupAudioAndVideoCallWithoutMsidAndBundle
+WebRtcBrowserTest.CanSetupDefaultVideoCall
+WebRtcBrowserTest.CanSetupVideoCallAndDisableLocalVideo
+WebRtcBrowserTest.CanSetupLegacyCall
+WebRtcBrowserTest.CanSetupVideoCallWith16To9AspectRatio
+WebRtcBrowserTest.CanSetupVideoCallWith1To1AspectRatio
+WebRtcBrowserTest.CanSetupVideoCallWith4To3AspectRatio
+WebRtcBrowserTest.NegotiateOfferWithBLine
diff --git a/tools/valgrind/gtest_exclude/content_unittests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/content_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..eff95c6
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/content_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,9 @@
+# Crashes: http://crbug.com/383054
+BrowserAccessibilityManagerWinTest.TestAccessibleHWND
+
+# http://crbug.com/391513
+GamepadServiceTest.ConnectionsTest
+
+# http://crbug.com/418748
+DownloadFile/DownloadFileTestWithRename.RenameWithErrorRetry/0
+DownloadFile/DownloadFileTestWithRename.RenameWithErrorRetry/1
diff --git a/tools/valgrind/gtest_exclude/content_unittests.gtest.txt b/tools/valgrind/gtest_exclude/content_unittests.gtest.txt
new file mode 100644
index 0000000..0431d8c
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/content_unittests.gtest.txt
@@ -0,0 +1,12 @@
+# Flaky, see http://crbug.com/227278
+WebContentsVideoCaptureDeviceTest.WebContentsDestroyed
+
+# False positive, according to crbug.com/386080
+WebLayerImplFixedBoundsTest.CompareToWebLayerImplComplex
+WebLayerImplFixedBoundsTest.CompareToWebLayerImplSimple
+
+# Flaky http://crbug.com/413775
+ServiceWorkerCacheStorageManagerTests/ServiceWorkerCacheStorageManagerTestP.RecreateCacheOnDemand/1
+
+# http://crbug.com/418258
+DevToolsManagerTest.TestObserver
diff --git a/tools/valgrind/gtest_exclude/content_unittests.gtest_mac.txt b/tools/valgrind/gtest_exclude/content_unittests.gtest_mac.txt
new file mode 100644
index 0000000..e82512b
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/content_unittests.gtest_mac.txt
@@ -0,0 +1,68 @@
+# http://crbug.com/93245
+GeolocationGatewayDataProviderCommonTest.*
+GeolocationWifiDataProviderCommonTest.*
+
+# Fail/crash, see http://crbug.com/151939
+WebDragDestTest.URL
+WebDragDestTest.Data
+WebDragSourceMacTest.DragInvalidlyEscapedBookmarklet
+
+# Fail, see http://crbug.com/153007
+MacSandboxTest.ClipboardAccess
+
+# mach_override assertion, see http://crbug.com/162728
+BlobURLRequestJobTest.*
+
+# Fail, see http://crbug.com/159234
+WebContentsVideoCaptureDeviceTest.GoesThroughAllTheMotions
+WebContentsVideoCaptureDeviceTest.BadFramesGoodFrames
+
+# Hang at arbitrary point, can't tell where exactly, see http://crbug.com/163314
+RenderWidgetHostViewMacTest.*
+WebContentsVideoCaptureDeviceTest.*
+RenderViewHostTest.*
+DeviceMotionEventPumpTest.*
+
+# Speculative disable of hanging tests. http://crbug.com/241919
+VideoCaptureControllerTest.*
+VideoCaptureHostTest.*
+
+# Hangs under Valgrind, see http://crbug.com/244257
+SmoothScrollGestureControllerTest.Tick
+
+# http://crbug.com/247163
+VideoCaptureManagerTest.CloseWithoutStop
+VideoCaptureManagerTest.CreateAndClose
+VideoCaptureManagerTest.StartUsingId
+
+# http://crbug.com/247601
+FontSerializationTest.StyledFonts
+MacSandboxTest.FontLoadingTest
+VideoCaptureManagerTest.CreateAndAbort
+VideoCaptureManagerTest.ManipulateDeviceAndCheckCapabilities
+VideoCaptureManagerTest.StartDeviceAndGetDeviceFormatInUse
+
+# http://crbug.com/280583
+DesktopCaptureDeviceTest.ScreenResolutionChangeVariableResolution
+DesktopCaptureDeviceTest.Capture
+
+# http://crbug.com/303709
+GamepadProviderTest.PollingAccess
+GamepadProviderTest.UserGesture
+
+# 'impossible' happend. http://crbug.com/365715, crbug.com/51716
+AppCacheRequestHandlerTest.*
+AppCacheResponseTest.*
+AppCacheStorageImplTest.*
+AppCacheStorageImplTest.*
+AppCacheUpdateJobTest.AlreadyChecking
+AppCacheUpdateJobTest.ManifestMissingMimeTypeTest
+
+# http://crbug.com/391513
+GamepadServiceTest.*
+
+# http://crbug.com/412903
+HostVarTrackerTest.DeleteObjectVarWithInstance
+
+# http://crbug.com/417582
+OneWriterSeqLockTest.ManyThreads
diff --git a/tools/valgrind/gtest_exclude/extensions_unittests.gtest-memcheck.txt b/tools/valgrind/gtest_exclude/extensions_unittests.gtest-memcheck.txt
new file mode 100644
index 0000000..de74c1f
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/extensions_unittests.gtest-memcheck.txt
@@ -0,0 +1,4 @@
+# These contain un-analyzed test failures and leaks
+# http://crbug.com/402257
+ApiTestBaseTest.*
+SerialApiTest.*
diff --git a/tools/valgrind/gtest_exclude/gin_unittests.gtest-drmemory.txt b/tools/valgrind/gtest_exclude/gin_unittests.gtest-drmemory.txt
new file mode 100644
index 0000000..9fcc7cc
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/gin_unittests.gtest-drmemory.txt
@@ -0,0 +1,2 @@
+# http://crbug.com/398549
+GinShellTest.HelloWorld
diff --git a/tools/valgrind/gtest_exclude/interactive_ui_tests.gtest.txt b/tools/valgrind/gtest_exclude/interactive_ui_tests.gtest.txt
new file mode 100644
index 0000000..6ae761f
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/interactive_ui_tests.gtest.txt
@@ -0,0 +1,34 @@
+# These test fail due to mmap Valgrind failures, see http://crbug.com/66677
+CollectedCookiesTest.DoubleDisplay
+CollectedCookiesTest.NavigateAway
+InfoBarsUITest.TestInfoBarsCloseOnNewTheme
+FastShutdown.SlowTermination
+MouseLeaveTest.TestOnMouseOut
+NotificationsPermissionTest.TestNoUserGestureInfobar
+NotificationsPermissionTest.TestUserGestureInfobar
+
+# These test fail due to timeout or limited buildslave support;
+# http://crbug.com/67301
+BrowserFocusTest.InterstitialFocus
+BrowserFocusTest.FindFocusTest
+BrowserFocusTest.FocusTraversalOnInterstitial
+
+# Don't run FLAKY or FAILS tests under Valgrind and TSan
+# as they tend to generate too many reports, see http://crbug.com/67959
+# NB: Can't use FAILS_/FLAKY_ as it will be turned into *.* by chrome_tests.py!
+*.FLAKY*
+*.FAILS*
+
+# Fails under Valgrind, see http://crbug.com/68068
+DevToolsSanityTest.TestPauseWhenScriptIsRunning
+
+# These tests time out under Valgrind, see http://crbug.com/163880
+BrowserFocusTest.FocusOnReload
+CommandsApiTest.Basic
+ExtensionApiTest.NotificationsHasPermissionManifest
+ExtensionCrashRecoveryTest.ReloadTabsWithBackgroundPage
+ExtensionCrashRecoveryTest.TwoExtensionsCrashBothAtOnce
+ExtensionCrashRecoveryTest.TwoExtensionsCrashFirst
+ExtensionCrashRecoveryTest.TwoExtensionsOneByOne
+FullscreenControllerInteractiveTest.TestTabExitsMouseLockOnNavigation
+OmniboxViewTest.Escape
diff --git a/tools/valgrind/gtest_exclude/ipc_tests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/ipc_tests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..ac62a9a
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/ipc_tests.gtest-drmemory_win32.txt
@@ -0,0 +1,2 @@
+# TODO(timurrrr): investigate
+IPCSyncChannelTest.*
diff --git a/tools/valgrind/gtest_exclude/ipc_tests.gtest.txt b/tools/valgrind/gtest_exclude/ipc_tests.gtest.txt
new file mode 100644
index 0000000..30a1f89
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/ipc_tests.gtest.txt
@@ -0,0 +1,6 @@
+# Takes 27-40 seconds to run.
+IPCSyncChannelTest.ChattyServer
+# Hangs on Linux sometimes. See http://crbug.com/22141
+IPCChannelTest.ChannelTest
+# Crashes under Valgrind. See http://crbug.com/46782
+IPCSyncChannelTest.Multiple
diff --git a/tools/valgrind/gtest_exclude/libphonenumber_unittests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/libphonenumber_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..4d2b324
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/libphonenumber_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,3 @@
+# fail in native run
+PhoneNumberMatcherTest.MatchesWithStrictGroupingLeniency
+PhoneNumberMatcherTest.MatchesWithExactGroupingLeniency
diff --git a/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..3152445
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,4 @@
+# Hangs under Dr. Memory
+# http://code.google.com/p/drmemory/issues/detail?id=978
+WinAudioTest.SyncSocketBasic
+AudioBusTest.CopyTo
diff --git a/tools/valgrind/gtest_exclude/media_unittests.gtest.txt b/tools/valgrind/gtest_exclude/media_unittests.gtest.txt
new file mode 100644
index 0000000..368398e
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/media_unittests.gtest.txt
@@ -0,0 +1,12 @@
+# This test tries to record fake audio in real-time.
+# This appears to be too sensitive to slowdown, see http://crbug.com/49497
+FakeAudioInputTest.BasicCallbacks
+
+# Flaky under all Valgrind-based tools, see http://crbug.com/298771
+PipelineIntegrationTest.MediaSource_Opus_Seeking_WebM
+
+# crbug.com/409485, cannot revert due to git migration,
+# exclude the test now
+AudioInputTest.Record
+MacAudioInputTest.AUAudioInputStreamVerifyStereoRecording
+AUHALStreamTest.CreateOpenStartStopClose
diff --git a/tools/valgrind/gtest_exclude/media_unittests.gtest_mac.txt b/tools/valgrind/gtest_exclude/media_unittests.gtest_mac.txt
new file mode 100644
index 0000000..0109aeb
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/media_unittests.gtest_mac.txt
@@ -0,0 +1,6 @@
+# Crashes under Valgrind on Mac, http://crbug.com/247601
+VideoCaptureDeviceTest.FakeCapture
+VideoCaptureDeviceTest.FakeCaptureVariableResolution
+
+# Times out under Valgrind, http://crbug.com/389087
+VideoFrameSchedulerImplTest.EventualDisplay
diff --git a/tools/valgrind/gtest_exclude/message_center_unittests.gtest.txt b/tools/valgrind/gtest_exclude/message_center_unittests.gtest.txt
new file mode 100644
index 0000000..56bb83c
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/message_center_unittests.gtest.txt
@@ -0,0 +1,3 @@
+# Fails http://crbug.com/256911
+MessageCenterImplTest.PopupTimersControllerResetTimer
+MessageCenterImplTest.PopupTimersControllerStartMultipleTimersPause
diff --git a/tools/valgrind/gtest_exclude/mojo_application_manager_unittests.gtest-drmemory.txt b/tools/valgrind/gtest_exclude/mojo_application_manager_unittests.gtest-drmemory.txt
new file mode 100644
index 0000000..bb80765
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/mojo_application_manager_unittests.gtest-drmemory.txt
@@ -0,0 +1,2 @@
+# http://crbug.com/416620
+ApplicationManagerTest.*
diff --git a/tools/valgrind/gtest_exclude/mojo_system_unittests.gtest-drmemory.txt b/tools/valgrind/gtest_exclude/mojo_system_unittests.gtest-drmemory.txt
new file mode 100644
index 0000000..4f2a73c
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/mojo_system_unittests.gtest-drmemory.txt
@@ -0,0 +1,2 @@
+# bug_372452
+RemoteMessagePipeTest.HandlePassing
diff --git a/tools/valgrind/gtest_exclude/mojo_view_manager_unittests.gtest-drmemory.txt b/tools/valgrind/gtest_exclude/mojo_view_manager_unittests.gtest-drmemory.txt
new file mode 100644
index 0000000..3ca77e9
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/mojo_view_manager_unittests.gtest-drmemory.txt
@@ -0,0 +1,2 @@
+# http://crbug.com/416621
+ViewManagerTest.*
diff --git a/tools/valgrind/gtest_exclude/net_unittests.gtest-drmemory_win-xp.txt b/tools/valgrind/gtest_exclude/net_unittests.gtest-drmemory_win-xp.txt
new file mode 100644
index 0000000..46717dc
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/net_unittests.gtest-drmemory_win-xp.txt
@@ -0,0 +1,3 @@
+# http://code.google.com/p/drmemory/issues/detail?id=842
+# Failing and then crashing.
+HttpNetworkTransationSpdy21Test.HttpsProxySpdy*
diff --git a/tools/valgrind/gtest_exclude/net_unittests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/net_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..465d258
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/net_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,33 @@
+# See http://crbug.com/82391
+URLRequestTestHTTP.HTTPSToHTTPRedirectNoRefererTest
+
+# Times out. See http://crbug.com/134313
+URLRequestTestHTTP.GetTest_ManyCookies
+
+# Dr. Memory hits an assertion:
+# http://code.google.com/p/drmemory/issues/detail?id=422
+HttpAuthTest.*
+HttpAuthHandlerFactoryTest.*
+X509CertificateTest.*
+
+# Too many uninits and too slow. TODO(timurrrr): investigate uninits
+ProxyResolverV8Test.*
+
+# Slow
+CookieMonsterTest.GarbageCollectionTriggers
+
+# Hangs only when built in release mode.
+# http://crbug.com/105762
+ClientSocketPoolBaseTest.DisableCleanupTimer
+
+# Flaky, see http://crbug.com/108422
+SSLClientSocketTest.*
+
+# DrM-i#1465
+URLRequestTest.ResolveShortcutTest
+
+# Crashes, http://crbug.com/369840
+ProxyResolverV8TracingTest.MultipleResolvers
+
+# http://crbug.com/400521
+HttpServerTest.RequestWithTooLargeBody
diff --git a/tools/valgrind/gtest_exclude/net_unittests.gtest-memcheck.txt b/tools/valgrind/gtest_exclude/net_unittests.gtest-memcheck.txt
new file mode 100644
index 0000000..87a66ec
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/net_unittests.gtest-memcheck.txt
@@ -0,0 +1,29 @@
+# These tests leak data intentionally, so are inappropriate for Valgrind tests.
+# Similar list in ../purify/net_unittests.exe.gtest.txt
+# TODO(dkegel): either merge the two files or keep them in sync,
+# see http://code.google.com/p/chromium/issues/detail?id=8951
+DiskCacheBackendTest.AppCacheInvalidEntry
+DiskCacheBackendTest.AppCacheInvalidEntryRead
+DiskCacheBackendTest.AppCacheInvalidEntryWithLoad
+DiskCacheBackendTest.InvalidEntry
+DiskCacheBackendTest.InvalidEntryRead
+DiskCacheBackendTest.InvalidEntryWithLoad
+DiskCacheBackendTest.TrimInvalidEntry
+DiskCacheBackendTest.TrimInvalidEntry2
+DiskCacheBackendTest.InvalidEntryEnumeration
+DiskCacheBackendTest.NewEvictionInvalidEntry
+DiskCacheBackendTest.NewEvictionInvalidEntryRead
+DiskCacheBackendTest.NewEvictionInvalidEntryWithLoad
+DiskCacheBackendTest.NewEvictionTrimInvalidEntry
+DiskCacheBackendTest.NewEvictionTrimInvalidEntry2
+DiskCacheBackendTest.NewEvictionInvalidEntryEnumeration
+DiskCacheBackendTest.ShutdownWithPendingCreate_Fast
+DiskCacheBackendTest.ShutdownWithPendingFileIO_Fast
+DiskCacheBackendTest.ShutdownWithPendingIO_Fast
+
+# flaky failure on Linux Tests (valgrind)(2),
+# see http://code.google.com/p/chromium/issues/detail?id=117196
+SSLClientSocketTest.VerifyReturnChainProperlyOrdered
+
+# Flaky: http://crbug.com/374871
+EndToEndTests/EndToEndTest.LargePostSmallBandwidthLargeBuffer*
diff --git a/tools/valgrind/gtest_exclude/net_unittests.gtest.txt b/tools/valgrind/gtest_exclude/net_unittests.gtest.txt
new file mode 100644
index 0000000..03c0fed
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/net_unittests.gtest.txt
@@ -0,0 +1,17 @@
+# Very slow under Valgrind.
+KeygenHandlerTest.*SmokeTest
+KeygenHandlerTest.*ConcurrencyTest
+
+# Hangs, see http://crbug.com/61908
+DirectoryListerTest.BigDirRecursiveTest
+
+# http://crbug.com/88228
+SSLClientSocketTest.Connect
+SSLClientSocketTest.ConnectClientAuthSendNullCert
+
+# Fails flakily. http://crbug.com/255775
+SimpleIndexFileTest.WriteThenLoadIndex
+
+# Flaky. http://crbug.com/237450
+DiskCacheBackendTest.SimpleCacheAppCacheOnlyDoomAll
+DiskCacheBackendTest.SimpleCacheDoomAll
diff --git a/tools/valgrind/gtest_exclude/net_unittests.gtest_linux.txt b/tools/valgrind/gtest_exclude/net_unittests.gtest_linux.txt
new file mode 100644
index 0000000..a93d588
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/net_unittests.gtest_linux.txt
@@ -0,0 +1,5 @@
+# Flaky. crbug.com/234776
+DiskCacheEntryTest.SimpleCacheStreamAccess
+DiskCacheEntryTest.SimpleCacheGrowData
+DiskCacheEntryTest.SimpleCacheSizeChanges
+
diff --git a/tools/valgrind/gtest_exclude/net_unittests.gtest_mac.txt b/tools/valgrind/gtest_exclude/net_unittests.gtest_mac.txt
new file mode 100644
index 0000000..6986bdf
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/net_unittests.gtest_mac.txt
@@ -0,0 +1,6 @@
+# Very slow under Valgrind, (see <http://crbug.com/37289>).
+KeygenHandlerTest.SmokeTest
+
+# These tests fail under Valgrind on Mac, see http://crbug.com/62314
+SSLClientSocketTest.*
+HTTPSRequestTest.*
diff --git a/tools/valgrind/gtest_exclude/printing_unittests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/printing_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..58a6a8d
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/printing_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,3 @@
+# CreateDC returns NULL, see http://crbug.com/73652
+PrintingContextTest.Base
+PrintingContextTest.PrintAll
diff --git a/tools/valgrind/gtest_exclude/remoting_unittests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/remoting_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..761a3a0
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/remoting_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,12 @@
+# This test fails on an assertion, see http://crbug.com/57266
+EncoderVp8Test.TestEncoder
+DecoderVp8Test.EncodeAndDecode
+
+# These test intentionally generate exceptions to verify if a dump is generated
+# during the crash.
+BreakpadWinDeathTest.TestAccessViolation
+BreakpadWinDeathTest.TestInvalidParameter
+BreakpadWinDeathTest.TestDebugbreak
+
+# DrM-i#1465
+RdpClientTest.Basic
diff --git a/tools/valgrind/gtest_exclude/remoting_unittests.gtest.txt b/tools/valgrind/gtest_exclude/remoting_unittests.gtest.txt
new file mode 100644
index 0000000..c322014
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/remoting_unittests.gtest.txt
@@ -0,0 +1,2 @@
+# http://crbug.com/241856
+VideoSchedulerTest.StartAndStop
diff --git a/tools/valgrind/gtest_exclude/remoting_unittests.gtest_win-8.txt b/tools/valgrind/gtest_exclude/remoting_unittests.gtest_win-8.txt
new file mode 100644
index 0000000..eaf36f8
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/remoting_unittests.gtest_win-8.txt
@@ -0,0 +1,2 @@
+# Fails natively as well: http://crbug.com/251517
+RdpClientTest.Basic
diff --git a/tools/valgrind/gtest_exclude/safe_browsing_tests.gtest_mac.txt b/tools/valgrind/gtest_exclude/safe_browsing_tests.gtest_mac.txt
new file mode 100644
index 0000000..3afb727
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/safe_browsing_tests.gtest_mac.txt
@@ -0,0 +1,2 @@
+# Fails on Valgrind/Mac, see http://crbug.com/69280
+SafeBrowsingServiceTest.SafeBrowsingSystemTest
diff --git a/tools/valgrind/gtest_exclude/sandbox_linux_unittests.gtest.txt b/tools/valgrind/gtest_exclude/sandbox_linux_unittests.gtest.txt
new file mode 100644
index 0000000..ffb2857
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/sandbox_linux_unittests.gtest.txt
@@ -0,0 +1,5 @@
+# This test intentionally crashes with a NULL deref.
+UnitTests.SEGVDeathWithMessage
+# http://crbug.com/407357
+SandboxBPF.StartSingleThreadedAsMultiThreaded
+SandboxBPF.StartMultiThreadedAsSingleThreaded
diff --git a/tools/valgrind/gtest_exclude/suppressions.txt b/tools/valgrind/gtest_exclude/suppressions.txt
new file mode 100644
index 0000000..e8cc210
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/suppressions.txt
@@ -0,0 +1,39 @@
+{
+ Test DiskCacheBackendTest.InvalidEntryEnumeration leaks.
+ Memcheck:Leak
+ fun:_Znwj
+ fun:_ZN10disk_cache12StorageBlockINS_12RankingsNodeEE12AllocateDataEv
+ fun:_ZN10disk_cache12StorageBlockINS_12RankingsNodeEE4LoadEv
+ fun:_ZN10disk_cache9EntryImpl15LoadNodeAddressEv
+ fun:_ZN10disk_cache11BackendImpl8NewEntryENS_4AddrEPPNS_9EntryImplEPb
+ fun:_ZN10disk_cache11BackendImpl10MatchEntryERKSsjb
+ fun:_ZN10disk_cache11BackendImpl9OpenEntryERKSsPPNS_5EntryE
+ fun:_ZN49DiskCacheBackendTest_InvalidEntryEnumeration_Test8TestBodyEv
+ fun:_ZN7testing4Test3RunEv
+}
+{
+ Test DiskCacheBackendTest.InvalidEntryRead leaks.
+ Memcheck:Leak
+ fun:_Znwj
+ fun:_ZN10disk_cache11BackendImpl8NewEntryENS_4AddrEPPNS_9EntryImplEPb
+ fun:_ZN10disk_cache11BackendImpl10MatchEntryERKSsjb
+ fun:_ZN10disk_cache11BackendImpl9OpenEntryERKSsPPNS_5EntryE
+ fun:_ZN42DiskCacheBackendTest_InvalidEntryRead_Test8TestBodyEv
+ fun:_ZN7testing4Test3RunEv
+}
+{
+ Test DiskCacheBackendTest.InvalidEntryWithLoad leaks.
+ Memcheck:Leak
+ fun:_Znwj
+ fun:_ZN10disk_cache11BackendImpl11CreateEntryERKSsPPNS_5EntryE
+ fun:_ZN46DiskCacheBackendTest_InvalidEntryWithLoad_Test8TestBodyEv
+ fun:_ZN7testing4Test3RunEv
+}
+{
+ Test FlipNetworkTransactionTest.WriteError Bug 29004
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3net26FlipNetworkTransactionTest17TransactionHelperERKNS_15HttpRequestInfoEPNS_17DelayedSocketDataE
+ fun:_ZN3net42FlipNetworkTransactionTest_WriteError_Test8TestBodyEv
+}
diff --git a/tools/valgrind/gtest_exclude/sync_unit_tests.gtest-asan.txt b/tools/valgrind/gtest_exclude/sync_unit_tests.gtest-asan.txt
new file mode 100644
index 0000000..fc2cc8e
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/sync_unit_tests.gtest-asan.txt
@@ -0,0 +1,2 @@
+# Flaky, see http://crbug.com/118370
+SyncSchedulerTest.TransientPollFailure
diff --git a/tools/valgrind/gtest_exclude/ui_unittests.gtest-memcheck.txt b/tools/valgrind/gtest_exclude/ui_unittests.gtest-memcheck.txt
new file mode 100644
index 0000000..7c5a4be
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/ui_unittests.gtest-memcheck.txt
@@ -0,0 +1,8 @@
+# http://crbug.com/222606
+RenderTextTest.DisplayRectShowsCursorLTR
+
+# http://crbug.com/370168
+TouchExplorationTest.*
+
+# http://crbug.com/414191
+MenuControllerTest.OpenClose
diff --git a/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win-xp.txt b/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win-xp.txt
new file mode 100644
index 0000000..2ef9d50
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win-xp.txt
@@ -0,0 +1,7 @@
+# Crashing (!) since forever, needs analysis.
+BookmarkNodeDataTest.*
+
+# http://code.google.com/p/drmemory/issues/detail?id=842
+# Fails assertion. App data corrupted by DrMemory?
+JsonSchemaTest.TestType
+JsonSchemaTest.TestNumber
diff --git a/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win32.txt
new file mode 100644
index 0000000..7e28846
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win32.txt
@@ -0,0 +1,81 @@
+##################################################
+# known Dr. Memory bugs:
+
+# http://code.google.com/p/drmemory/issues/detail?id=318
+AudioRendererHostTest.*
+
+##################################################
+# un-analyzed Dr. Memory bugs:
+
+# http://code.google.com/p/drmemory/issues/detail?id=979
+FirefoxProfileImporterTest.Firefox35Importer
+
+# http://code.google.com/p/drmemory/issues/detail?id=980
+MetricsLogManagerTest.*
+
+##################################################
+# Chromium bugs:
+
+# times out on the bot
+# http://crbug.com/87887
+VideoCaptureHostTest.*
+
+# crashes due to use-after-free's, http://crbug.com/90980
+FirefoxImporterTest.Firefox*NSS3Decryptor
+
+# fails http://crbug.com/92144
+ServiceProcessStateTest.ForceShutdown
+
+# fails sporadically: http://crbug.com/108205
+MultiProcessLockTest.RecursiveLock
+
+# Poor isolation, DCHECKs when no MessageLoop exists. Breaks when sharded.
+# http://crbug.com/117679
+WebsiteSettingsModelTest.*
+
+# fails to create thread
+# http://crbug.com/144087
+DesktopNotificationServiceTest.SettingsForSchemes
+TemplateURLFetcherTest.*
+
+# times out on the bot.
+# http://crbug.com/148644
+GAIAInfoUpdateServiceTest.*
+ProfileManagerTest.*
+ProfileInfoCacheTest.*
+
+# Failing on the bot. http://crbug.com/168882
+UserCloudPolicyStoreTest.LoadWithInvalidFile
+UserCloudPolicyStoreTest.LoadWithNoFile
+UserCloudPolicyStoreTest.Store
+UserCloudPolicyStoreTest.StoreThenClear
+UserCloudPolicyStoreTest.StoreThenLoad
+UserCloudPolicyStoreTest.StoreTwoTimes
+UserCloudPolicyStoreTest.StoreValidationError
+
+# Failing on the bot. crbug.com/266972
+OneClickSigninBubbleViewTest.ShowBubble
+
+# http://crbug.com/292960
+SyncBackendHostTest.DownloadControlTypes
+SyncBackendHostTest.SilentlyFailToDownloadControlTypes
+
+# DrM-i#1339: https://code.google.com/p/drmemory/issues/detail?id=1339
+ExtensionServiceTest.InstallTheme
+
+# http://crbug.com/302156
+TabStripModelTest.FastShutdown
+
+# http://crbug.com/336349
+NTPUserDataLoggerTest.TestLogging
+
+# http://crbug.com/340837
+ChromeBlacklistTrialTest.*
+
+# http://crbug.com/349778
+TranslateManagerRenderViewHostTest.*
+
+# http://crbug.com/403073
+SafeBrowsingModuleVerifierWinTest.VerifyModuleExportModified
+SafeBrowsingModuleVerifierWinTest.VerifyModuleModified
+SafeBrowsingModuleVerifierWinTest.VerifyModuleUnmodified
diff --git a/tools/valgrind/gtest_exclude/unit_tests.gtest-memcheck.txt b/tools/valgrind/gtest_exclude/unit_tests.gtest-memcheck.txt
new file mode 100644
index 0000000..fa0595b
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/unit_tests.gtest-memcheck.txt
@@ -0,0 +1,15 @@
+# Timing issues. http://crbug.com/241051
+ExtensionAlarmsTest.*
+
+# SEGV_MAPERR. http://crbug.com/245797
+ClientSideDetectionHostTest.NavigationCancelsShouldClassifyUrl
+
+# Flaky. http://crbug.com/308309
+CloudPrintURLFetcherBasicTest.HandleRawData
+CloudPrintURLFetcherOverloadTest.Protect
+
+# Hangs every once in a while. http://crbug.com/309149
+ExtensionUpdaterTest.TestMultipleManifestDownloading
+
+# Test fail: crbug.com/314216
+ExtensionIconManagerTest.LoadComponentExtensionResource
diff --git a/tools/valgrind/gtest_exclude/unit_tests.gtest.txt b/tools/valgrind/gtest_exclude/unit_tests.gtest.txt
new file mode 100644
index 0000000..08a7601
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/unit_tests.gtest.txt
@@ -0,0 +1,39 @@
+# Hangs sometimes; see http://crbug.com/22146
+VisitedLinkEventsTest.Coalescense
+# Hangs sometimes; see http://crbug.com/22160
+VisitedLinkRelayTest.Basics
+# Hangs (or takes forever?) reliably on bots; see http://crbug.com/23580
+RenderViewTest.ImeComposition
+# Hangs sometimes; see http://crbug.com/52844
+PredictorTest.MassiveConcurrentLookupTest
+# Pure virtual method called: see http://crbug.com/50950
+ConnectionTesterTest.RunAllTests
+
+# Following tests fail under valgrind because libjingle has hardcoded
+# timeouts for P2P connections, and it makes these tests fail under valgrind.
+# TODO(sergeyu): Remove hardcoded timeouts from libjingle.
+P2PTransportImplTest.Create
+P2PTransportImplTest.ConnectUdp
+P2PTransportImplTest.ConnectTcp
+P2PTransportImplTest.SendDataUdp
+P2PTransportImplTest.SendDataTcp
+
+# Failing on CrOS, see http://crbug.com/79657
+SignedSettingsTest.StorePolicyNoPolicyData
+
+# Flaky and not very interesting under Valgrind http://crbug.com/93027
+ProcessWatcherTest.ImmediateTermination
+
+# Timing out all over the place. Disabling for now. http://crbug.com/149715
+ExtensionWebRequestTest.*
+# Timing out all over the place. Disabling for now. http://crbug.com/149882
+NativeMessagingTest.*
+
+# Failing, see http://crbug.com/408443
+SyncFileSystemServiceTest.SimpleLocalSyncFlow
+
+# Timing out all over the place. Disabling for now. http://crbug.com/164589
+StorageInfoProviderTest.*
+
+# Failing on CrOS, see http://crbug.com/408013
+ProxyConfigServiceImplTest.*
diff --git a/tools/valgrind/gtest_exclude/unit_tests.gtest_linux.txt b/tools/valgrind/gtest_exclude/unit_tests.gtest_linux.txt
new file mode 100644
index 0000000..a43a673
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/unit_tests.gtest_linux.txt
@@ -0,0 +1,34 @@
+# Fails under Valgrind; see http://crbug.com/36770
+URLFetcherBadHTTPSTest.BadHTTPSTest
+# Fails under Valgrind; see http://crbug.com/44552
+RenderViewTest.OnHandleKeyboardEvent
+# http://crbug.com/88221
+ConnectionTesterTest.DeleteWhileInProgress
+# Crash on CrOS, see http://crbug.com/115979
+ClientSideDetectionHostTest.OnPhishingDetectionDoneNotPhishing
+ClientSideDetectionHostTest.OnPhishingDetectionDoneVerdictNotPhishing
+ClientSideDetectionHostTest.OnPhishingDetectionDoneInvalidVerdict
+ClientSideDetectionHostTest.OnPhishingDetectionDoneDisabled
+
+# http://crbug.com/119610
+ProfileSyncServiceSessionTest.WriteFilledSessionToNode
+ProfileSyncServiceSessionTest.ValidTabs
+
+# http://crbug.com/139652
+BackgroundApplicationListModelTest.RandomTest
+
+# http://crbug.com/179427
+ExtensionPrefsDelayedInstallInfo.DelayedInstallInfo
+ExtensionServiceTest.*
+
+# http://crbug.com/180335
+AutocompleteActionPredictorTest.RecommendActionURL
+
+# http://crbug.com/238964
+CpuInfoProviderTest.*
+
+# http://crbug.com/336349
+NTPUserDataLoggerTest.TestLogging
+
+# http://crbug.com/403533
+ExtensionPathUtilTest.BasicPrettifyPathTest
diff --git a/tools/valgrind/gtest_exclude/unit_tests.gtest_mac.txt b/tools/valgrind/gtest_exclude/unit_tests.gtest_mac.txt
new file mode 100644
index 0000000..589b694
--- /dev/null
+++ b/tools/valgrind/gtest_exclude/unit_tests.gtest_mac.txt
@@ -0,0 +1,39 @@
+# Times out too often
+# crbug.com/15817
+IPCSyncChannelTest.*
+# Hangs
+# http://crbug.com/21890
+WebDropTargetTest.URL
+WebDropTargetTest.Data
+# http://crbug.com/69037
+FirefoxImporterTest.Firefox3NSS3Decryptor
+# http://crbug.com/69039
+ProcessInfoSnapshotMacTest.EffectiveVsRealUserIDTest
+
+# Following tests do not pass memcheck test.
+# See http://crbug.com/30393.
+NSMenuItemAdditionsTest.TestMOnDifferentLayouts
+
+# Hangs
+# See http://crbug.com/75733
+BookmarkBarControllerTest.DeleteFromOffTheSideWhileItIsOpen
+BookmarkBarControllerTest.HideWhenShowBookmarkBarTrueButDisabled
+BookmarkBarControllerTest.HideWhenShowBookmarkBarFalse
+
+# Crashes, see http://crbug.com/86656
+MacSandboxTest.FileAccess
+
+# http://crbug.com/87769
+BalloonControllerTest.ShowAndCloseTest
+BalloonControllerTest.SizesTest
+
+# http://crbug.com/89030
+ConnectionTesterTest.DeleteWhileInProgress
+
+# http://crbug.com/93245
+GeolocationWifiDataProviderCommonTest.*
+
+# http://crbug.com/96298
+FileSystemDirURLRequestJobTest.*
+FileSystemURLRequestJobTest.*
+FileSystemOperationWriteTest.*
diff --git a/tools/valgrind/locate_valgrind.sh b/tools/valgrind/locate_valgrind.sh
new file mode 100755
index 0000000..5d0a06b
--- /dev/null
+++ b/tools/valgrind/locate_valgrind.sh
@@ -0,0 +1,77 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Prints a path to Valgrind binaries to be used for Chromium.
+# Select the valgrind from third_party/valgrind by default,
+# but allow users to override this default without editing scripts and
+# without specifying a commandline option
+
+export THISDIR=`dirname $0`
+
+# User may use his own valgrind by giving its path with CHROME_VALGRIND env.
+if [ "$CHROME_VALGRIND" = "" ]
+then
+ # Guess which binaries we should use by uname
+ case "$(uname -a)" in
+ *Linux*x86_64*)
+ PLATFORM="linux_x64"
+ ;;
+ *Linux*86*)
+ PLATFORM="linux_x86"
+ ;;
+ *Darwin*9.[678].[01]*i386*)
+ # Didn't test other kernels.
+ PLATFORM="mac"
+ ;;
+ *Darwin*10.[0-9].[0-9]*i386*)
+ PLATFORM="mac_10.6"
+ ;;
+ *Darwin*10.[0-9].[0-9]*x86_64*)
+ PLATFORM="mac_10.6"
+ ;;
+ *Darwin*11.[0-9].[0-9]*x86_64*)
+ PLATFORM="mac_10.7"
+ ;;
+ *)
+ echo "Unknown platform:" >&2
+ uname -a >&2
+ echo "We'll try to search for valgrind binaries installed in /usr/local" >&2
+ PLATFORM=
+ esac
+
+ if [ "$PLATFORM" != "" ]
+ then
+ # The binaries should be in third_party/valgrind
+ # (checked out from deps/third_party/valgrind/binaries).
+ CHROME_VALGRIND="$THISDIR/../../third_party/valgrind/$PLATFORM"
+
+ # TODO(timurrrr): readlink -f is not present on Mac...
+ if [ "$PLATFORM" != "mac" ] && \
+ [ "$PLATFORM" != "mac_10.6" ] && \
+ [ "$PLATFORM" != "mac_10.7" ]
+ then
+ # Get rid of all "../" dirs
+ CHROME_VALGRIND=`readlink -f $CHROME_VALGRIND`
+ fi
+
+ if ! test -x $CHROME_VALGRIND/bin/valgrind
+ then
+ # We couldn't find the binaries in third_party/valgrind
+ CHROME_VALGRIND=""
+ fi
+ fi
+fi
+
+if ! test -x $CHROME_VALGRIND/bin/valgrind
+then
+ echo "Oops, could not find Valgrind binaries in your checkout." >&2
+ echo "Please see" >&2
+ echo " http://dev.chromium.org/developers/how-tos/using-valgrind/get-valgrind" >&2
+ echo "for the instructions on how to download pre-built binaries." >&2
+ exit 1
+fi
+
+echo $CHROME_VALGRIND
diff --git a/tools/valgrind/memcheck/OWNERS b/tools/valgrind/memcheck/OWNERS
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/tools/valgrind/memcheck/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/tools/valgrind/memcheck/PRESUBMIT.py b/tools/valgrind/memcheck/PRESUBMIT.py
new file mode 100644
index 0000000..b556e63
--- /dev/null
+++ b/tools/valgrind/memcheck/PRESUBMIT.py
@@ -0,0 +1,88 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into gcl.
+"""
+
+import re
+
+def CheckChange(input_api, output_api):
+ """Checks the memcheck suppressions files for bad data."""
+ sup_regex = re.compile('suppressions.*\.txt$')
+ suppressions = {}
+ errors = []
+ check_for_memcheck = False
+ # skip_next_line has 3 possible values:
+ # - False: don't skip the next line.
+ # - 'skip_suppression_name': the next line is a suppression name, skip.
+ # - 'skip_param': the next line is a system call parameter error, skip.
+ skip_next_line = False
+ for f in filter(lambda x: sup_regex.search(x.LocalPath()),
+ input_api.AffectedFiles()):
+ for line, line_num in zip(f.NewContents(),
+ xrange(1, len(f.NewContents()) + 1)):
+ line = line.lstrip()
+ if line.startswith('#') or not line:
+ continue
+
+ if skip_next_line:
+ if skip_next_line == 'skip_suppression_name':
+ if 'insert_a_suppression_name_here' in line:
+ errors.append('"insert_a_suppression_name_here" is not a valid '
+ 'suppression name')
+ if suppressions.has_key(line):
+ if f.LocalPath() == suppressions[line][1]:
+ errors.append('suppression with name "%s" at %s line %s '
+ 'has already been defined at line %s' %
+ (line, f.LocalPath(), line_num,
+ suppressions[line][1]))
+ else:
+ errors.append('suppression with name "%s" at %s line %s '
+ 'has already been defined at %s line %s' %
+ (line, f.LocalPath(), line_num,
+ suppressions[line][0], suppressions[line][1]))
+ else:
+ suppressions[line] = (f, line_num)
+ check_for_memcheck = True;
+ skip_next_line = False
+ continue
+ if check_for_memcheck:
+ if not line.startswith('Memcheck:'):
+ errors.append('"%s" should be "Memcheck:..." in %s line %s' %
+ (line, f.LocalPath(), line_num))
+ check_for_memcheck = False;
+ if line == '{':
+ skip_next_line = 'skip_suppression_name'
+ continue
+ if line == "Memcheck:Param":
+ skip_next_line = 'skip_param'
+ continue
+
+ if (line.startswith('fun:') or line.startswith('obj:') or
+ line.startswith('Memcheck:') or line == '}' or
+ line == '...'):
+ continue
+ errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(),
+ line_num))
+ if errors:
+ return [output_api.PresubmitError('\n'.join(errors))]
+ return []
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CheckChange(input_api, output_api)
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CheckChange(input_api, output_api)
+
+def GetPreferredTryMasters(project, change):
+ return {
+ 'tryserver.chromium.linux': {
+ 'linux_valgrind': set(['defaulttests']),
+ },
+ 'tryserver.chromium.mac': {
+ 'mac_valgrind': set(['defaulttests']),
+ }
+ }
diff --git a/tools/valgrind/memcheck/suppressions.txt b/tools/valgrind/memcheck/suppressions.txt
new file mode 100644
index 0000000..8d83b61
--- /dev/null
+++ b/tools/valgrind/memcheck/suppressions.txt
@@ -0,0 +1,3472 @@
+# There are four kinds of suppressions in this file.
+# 1. third party stuff we have no control over
+#
+# 2. intentional unit test errors, or stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing
+#
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+# These should all be in chromium's bug tracking system (but a few aren't yet).
+# Periodically we should sweep this file and the bug tracker clean by
+# running overnight and removing outdated bugs/suppressions.
+#-----------------------------------------------------------------------
+
+# 1. third party stuff we have no control over
+{
+ Uninitialized value in deflate (Third Party)
+ Memcheck:Uninitialized
+ ...
+ fun:MOZ_Z_deflate
+}
+{
+ #gtk developers don't like cleaning up one-time leaks. See http://mail.gnome.org/archives/gtk-devel-list/2004-April/msg00230.html
+ gtk_init_check leak (Third Party)
+ Memcheck:Leak
+ ...
+ fun:gtk_init_check
+}
+{
+ Fontconfig leak?
+ Memcheck:Leak
+ ...
+ fun:XML_ParseBuffer
+ fun:FcConfigParseAndLoad
+}
+{
+ bug_9245_FcConfigAppFontAddFile_leak
+ Memcheck:Leak
+ ...
+ fun:FcConfigAppFontAddFile
+}
+{
+ pango_font_leak_todo_3
+ Memcheck:Leak
+ ...
+ fun:FcFontRenderPrepare
+ ...
+ fun:pango_itemize_with_base_dir
+}
+{
+ pango_font_leak_todo_4
+ Memcheck:Leak
+ ...
+ fun:FcFontRenderPrepare
+ ...
+ fun:pango_ot_buffer_output
+}
+{
+ pango_font_leak_todo_5
+ Memcheck:Leak
+ ...
+ fun:FcFontRenderPrepare
+ ...
+ fun:pango_context_get_metrics
+}
+{
+ pango_font_leak_todo_6
+ Memcheck:Leak
+ ...
+ fun:FcDefaultSubstitute
+ ...
+ fun:pango_itemize_with_base_dir
+}
+{
+ # Similar to fontconfig_bug_8428 below. Reported in
+ # https://bugs.freedesktop.org/show_bug.cgi?id=8215
+ fontconfig_bug_8215
+ Memcheck:Leak
+ fun:malloc
+ fun:FcPatternObjectInsertElt
+ fun:FcPatternObjectAddWithBinding
+}
+{
+ # Fontconfig leak, seen in shard 16 of 20 of ui_tests
+ # See https://bugs.freedesktop.org/show_bug.cgi?id=8428
+ # and http://www.gnome.org/~johan/gtk.suppression
+ fontconfig_bug_8428
+ Memcheck:Leak
+ ...
+ fun:realloc
+ fun:FcPatternObjectInsertElt
+ fun:FcPatternObjectAddWithBinding
+}
+{
+ bug_18590 (Third Party)
+ Memcheck:Leak
+ ...
+ fun:malloc
+ fun:FcConfigValues
+ fun:FcConfigValues
+ ...
+ fun:FcConfigValues
+ fun:FcConfigValues
+}
+{
+ dlopen leak on error. Chromium issues 268368,273385. See http://sourceware.org/bugzilla/show_bug.cgi?id=12878.
+ Memcheck:Leak
+ fun:calloc
+ fun:_dlerror_run
+ fun:dlopen@@GLIBC_2.2.5
+}
+{
+ bug_58730_libc.so_value8 (Third Party)
+ Memcheck:Uninitialized
+ obj:/lib/libc-2.11.1.so
+}
+# net::SniffXML() clearly tries to read < 8 bytes, but strncasecmp() reads 8.
+{
+ bug_58730_strncasecmp_uninit (Third Party)
+ Memcheck:Uninitialized
+ ...
+ fun:strncasecmp
+ fun:_ZN4base11strncasecmpEPKcS1_m
+ fun:_ZN3netL8SniffXMLEPKcmPbPSs
+}
+{
+ bug_76386a (Third Party)
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNSs4_Rep9_S_createE*RKSaIcE
+ ...
+ fun:_ZNSsC1*KS*
+}
+{
+ bug_76386b (Third Party)
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNSs4_Rep9_S_createE*RKSaIcE
+ fun:_ZNSs4_Rep8_M_cloneERKSaIcE*
+}
+{
+ getpwuid_and_getgrouplist
+ Memcheck:Leak
+ fun:malloc
+ fun:nss_parse_service_list
+ fun:__nss_database_lookup
+ obj:*
+ ...
+ fun:get*
+}
+
+# XRandRInfo object seems to be leaking inside XRRFindDisplay. This happens the
+# first time it is called, no matter who the caller is. We have observed this
+# problem with both XRRSelectInput and XRRQueryExtension.
+{
+ bug_119677
+ Memcheck:Leak
+ fun:malloc
+ fun:XRRFindDisplay
+}
+{
+ Ubuntu_Precise_Fontconfig_Optimized_Code
+ Memcheck:Unaddressable
+ fun:FcConfigFileExists
+}
+{
+ Ubuntu_Precise_Itoa_Optimized_Code
+ Memcheck:Uninitialized
+ fun:_itoa_word
+ fun:vfprintf
+ fun:__vsnprintf_chk
+ fun:__snprintf_chk
+}
+{
+ Ubuntu_Precise_Wcscmp_Optimized_Code_In_Tests
+ Memcheck:Uninitialized
+ fun:wcscmp
+ fun:_ZN7testing8internal6String17WideCStringEqualsEPKwS3_
+}
+{
+ mesa_glsl_compile_shader
+ Memcheck:Leak
+ ...
+ fun:_mesa_glsl_compile_shader
+ fun:compile_shader
+ fun:_mesa_CompileShaderARB
+ fun:shared_dispatch_stub_529
+}
+
+#-----------------------------------------------------------------------
+# 2. intentional unit test errors, or stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing
+
+# See tools/valgrind/memcheck_analyze.py before modifying sanity tests.
+{
+ Memcheck sanity test 01 (memory leak).
+ Memcheck:Leak
+ fun:_Zna*
+ fun:_ZN4base31ToolsSanityTest_MemoryLeak_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 02 (malloc/read left).
+ Memcheck:Unaddressable
+ fun:*ReadValueOutOfArrayBoundsLeft*
+ ...
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 03 (malloc/read right).
+ Memcheck:Unaddressable
+ fun:*ReadValueOutOfArrayBoundsRight*
+ ...
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 04 (malloc/write left).
+ Memcheck:Unaddressable
+ fun:*WriteValueOutOfArrayBoundsLeft*
+ ...
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 05 (malloc/write right).
+ Memcheck:Unaddressable
+ fun:*WriteValueOutOfArrayBoundsRight*
+ ...
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 06 (new/read left).
+ Memcheck:Unaddressable
+ fun:*ReadValueOutOfArrayBoundsLeft*
+ ...
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 07 (new/read right).
+ Memcheck:Unaddressable
+ fun:*ReadValueOutOfArrayBoundsRight*
+ ...
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 08 (new/write left).
+ Memcheck:Unaddressable
+ fun:*WriteValueOutOfArrayBoundsLeft*
+ ...
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 09 (new/write right).
+ Memcheck:Unaddressable
+ fun:*WriteValueOutOfArrayBoundsRight*
+ ...
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 10 (write after free).
+ Memcheck:Unaddressable
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 11 (write after delete).
+ Memcheck:Unaddressable
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 12 (array deleted without []).
+ Memcheck:Free
+ ...
+ fun:_ZN4base46ToolsSanityTest_ArrayDeletedWithoutBraces_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 13 (single element deleted with []).
+ Memcheck:Free
+ ...
+ fun:_ZN4base51ToolsSanityTest_SingleElementDeletedWithBraces_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 14 (malloc/read uninit).
+ Memcheck:Uninitialized
+ fun:*ReadUninitializedValue*
+ ...
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 15 (new/read uninit).
+ Memcheck:Uninitialized
+ fun:*ReadUninitializedValue*
+ ...
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ bug_86301 This test explicitly verifies PostTaskAndReply leaks the task if the originating MessageLoop has been deleted.
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEESA_
+ fun:_ZN4base10TaskRunner16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEES9_
+ fun:_ZN4base12_GLOBAL__N_169MessageLoopProxyTest_PostTaskAndReply_DeadReplyLoopDoesNotDelete_Test8TestBodyEv
+}
+{
+ # Non-joinable thread doesn't clean up all state on program exit
+ # very common in ui tests
+ bug_16096 (WontFix)
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNSs4_Rep9_S_createE*RKSaIcE
+ fun:_ZNSs4_Rep8_M_cloneERKSaIcE*
+ fun:_ZNSs7reserveE*
+ fun:_ZNSs6appendEPKc*
+ fun:*StringAppendV*
+ ...
+ fun:_ZN4base12StringPrintfEPKcz
+}
+{
+ # According to dglazkov, these are one-time leaks and intentional.
+ # They may go away if the change to move these off the heap lands.
+ bug_17996 (Intentional)
+ Memcheck:Leak
+ ...
+ fun:_ZN5blink8SVGNames4initEv
+}
+{
+ # This is an on demand initialization which is done and then intentionally
+ # kept around (not freed) while the process is running.
+ intentional_blink_XMLNames_init_leak
+ Memcheck:Leak
+ ...
+ fun:_ZN5blink8XMLNames4initEv
+}
+{
+ # Intentionally leaking NSS to prevent shutdown crashes
+ bug_61585a (Intentional)
+ Memcheck:Leak
+ fun:calloc
+ ...
+ fun:error_get_my_stack
+}
+{
+ FileStream::Context can leak through WorkerPool by design
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3net10FileStreamC1EPNS_6NetLogE
+}
+{
+ # Histograms are used on un-joined threads, and can't be deleted atexit.
+ Histograms via FactoryGet including Linear Custom Boolean and Basic
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN4base*Histogram10FactoryGet*
+}
+{
+ Intentional leak for SampleMap (stores SparseHistogram counts).
+ Memcheck:Leak
+ ...
+ fun:_ZN4base9SampleMap10AccumulateEii
+ fun:_ZN4base15SparseHistogram3AddEi
+}
+{
+ bug_73299 (Intentional)
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content17WorkerProcessHost20CreateMessageFiltersEi
+ fun:_ZN7content17WorkerProcessHost4InitE*
+ fun:_ZN7content17WorkerServiceImpl24CreateWorkerFromInstanceENS_17WorkerProcessHost14WorkerInstanceE
+ fun:_ZN7content17WorkerServiceImpl12CreateWorkerE*
+ fun:_ZN7content19WorkerMessageFilter14OnCreateWorkerERK31ViewHostMsg_CreateWorker_ParamsPi
+}
+{
+ bug_83345 (Needs_Annotation)
+ Memcheck:Leak
+ ...
+ fun:_ZN4base*23LeakyLazyInstanceTraits*NewEPv
+ fun:_ZN4base12LazyInstance*LeakyLazyInstanceTraits*PointerEv
+}
+{
+ bug_87500_a (Intentional)
+ Memcheck:Leak
+ ...
+ fun:_ZN10disk_cache9BackendIO23ExecuteBackendOperationEv
+ fun:_ZN10disk_cache9BackendIO16ExecuteOperationEv
+}
+{
+ bug_79322 (Intentional)
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN4base*StatisticsRecorderTest_*_Test8TestBodyEv
+}
+{
+ # According to dglazkov, UA style sheets are intentionally leaked.
+ # As such, treat any leaks originating from parseUASheet as intentional.
+ bug_121729 (Intentional)
+ Memcheck:Leak
+ ...
+ fun:_ZN5blinkL12parseUASheetEPKcj
+}
+{
+ bug_121729_b (Intentional)
+ Memcheck:Leak
+ ...
+ fun:_ZN5blinkL12parseUASheetERKN3WTF6StringE
+}
+{
+ intentional_see_bug_156466
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3ash5ShellC1EPNS_13ShellDelegateE
+ fun:_ZN3ash5Shell14CreateInstanceEPNS_13ShellDelegateE
+}
+
+# http://crbug.com/269278 causes really widespread, flaky leaks in
+# value objects that own some memory. These suppressions will cover
+# all such objects, even though it's possible to get real leaks that
+# look the same way (e.g. by allocating such an object in an arena).
+{
+ bug_269278a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base4Bind*Callback*BindState*
+}
+{
+ bug_269278b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocator*allocate*
+ fun:_ZNSt12_Vector_base*_M_allocate*
+}
+
+# Externally allocated objects referenced by V8 objects can currently
+# be diagnosed as (false) leaks, since memcheck does not know how to
+# handle V8 leaks. More detailed discussion in http://crbug.com/328552
+{
+ bug_328552
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10StringImpl19createUninitializedEjRPh
+}
+{
+ bug_383956
+ Memcheck:Leak
+ fun:calloc
+ fun:_ZN18hb_object_header_t6createEj
+ fun:_Z*hb_object_createI9hb_face_tEPT_v
+ fun:hb_face_create_for_tables
+ fun:_ZN3gfx12_GLOBAL__N_118CreateHarfBuzzFaceEP10SkTypeface
+ fun:_ZN3gfx12_GLOBAL__N_118CreateHarfBuzzFontEP10SkTypefacei
+ fun:_ZN3gfx18RenderTextHarfBuzz8ShapeRunEPNS_8internal15TextRunHarfBuzzE
+ fun:_ZN3gfx18RenderTextHarfBuzz12EnsureLayoutEv
+}
+
+{
+ bug_391510
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base21CancelableTaskTracker16NewTrackedTaskId*
+ fun:_ZN14HistoryService14ScheduleDBTask*
+ fun:_ZN7history19URLIndexPrivateData26ScheduleUpdateRecentVisits*
+}
+{
+ bug_399852_a
+ Memcheck:Uninitialized
+ fun:_ZN5blink14DateComponents9parseTimeERKN3WTF6StringEjRj
+ fun:_ZNK5blink13TimeInputType29parseToDateComponentsInternalERKN3WTF6StringEPNS_14DateComponentsE
+ fun:_ZNK5blink24BaseDateAndTimeInputType21parseToDateComponentsERKN3WTF6StringEPNS_14DateComponentsE
+}
+{
+ bug_399852_b
+ Memcheck:Uninitialized
+ fun:_ZN5blink12_GLOBAL__N_117parseJSONInternalIhEEN3WTF10PassRefPtrINS_9JSONValueEEEPKT_j
+ fun:_ZN5blink9parseJSONERKN3WTF6StringE
+ fun:_ZN5blink*InspectorBackendDispatcher*
+ ...
+ fun:_ZN5blink*WebDevToolsAgent*
+ fun:_ZN7content*DevToolsAgent*
+}
+{
+ bug_399852_c
+ Memcheck:Uninitialized
+ fun:_ZN5blinkL21extractRangeComponentEPN3WTF6StringERKNS0_6RefPtrINS_10JSONObjectEEERKS1_Rj
+ fun:_ZN5blinkL22jsonRangeToSourceRangeEPN3WTF6StringEPNS_23InspectorStyleSheetBase*
+ fun:_ZN5blink17InspectorCSSAgent*
+ fun:_ZThn40_N5blink17InspectorCSSAgent*
+ fun:_ZN5blink30InspectorBackendDispatcherImpl*
+ fun:_ZN5blink30InspectorBackendDispatcherImpl8dispatchERKN3WTF6StringE
+ fun:_ZN5blink19InspectorController27dispatchMessageFromFrontendERKN3WTF6StringE
+ fun:_ZN5blink20WebDevToolsAgentImpl26dispatchOnInspectorBackendERKNS_9WebStringE
+ fun:_ZN7content13DevToolsAgent28OnDispatchOnInspectorBackendERKSs
+}
+{
+ bug_399852_d
+ Memcheck:Uninitialized
+ fun:_ZN5blink14DateComponents10parseMonthERKN3WTF6StringEjRj
+ ...
+ fun:_ZNK5blink24BaseDateAndTimeInputType21parseToDateComponentsERKN3WTF6StringEPNS_14DateComponentsE
+ fun:_ZNK5blink24BaseDateAndTimeInputType15typeMismatchForERKN3WTF6StringE
+ fun:_ZNK5blink24BaseDateAndTimeInputType13sanitizeValueERKN3WTF6StringE
+}
+{
+ bug_418234
+ Memcheck:Uninitialized
+ fun:_ZN10extensions19ExtensionManagement7RefreshEv
+ fun:_ZN10extensions19ExtensionManagement22OnExtensionPrefChangedEv
+}
+
+#-----------------------------------------------------------------------
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+# These should all be in chromium's bug tracking system (but a few aren't yet).
+
+{
+ # webkit leak? See http://crbug.com/9503
+ bug_9503
+ Memcheck:Leak
+ ...
+ fun:_ZN19TestWebViewDelegate24UpdateSelectionClipboardEb
+}
+{
+ # very common in ui tests
+ bug_16091
+ Memcheck:Leak
+ ...
+ fun:_ZN4base11MessageLoop22AddDestructionObserverEPNS0_19DestructionObserverE
+ ...
+ fun:_ZN3IPC11SyncChannel11SyncContext15OnChannelOpenedEv
+}
+{
+ # very common in ui tests
+ bug_16092
+ Memcheck:Leak
+ fun:*
+ fun:_ZN4base11MessageLoopC1ENS0_4TypeE
+ fun:_ZN4base6Thread10ThreadMainEv
+}
+{
+ # very common in ui tests
+ bug_16092b
+ Memcheck:Leak
+ ...
+ fun:_ZNSt11_Deque_baseIN4base11PendingTaskESaIS1_EE17_M_initialize_mapE*
+ ...
+ fun:_ZN4base11MessageLoopC1ENS0_4TypeE
+}
+{
+ # very common in ui tests
+ bug_16092c
+ Memcheck:Leak
+ ...
+ fun:_ZNSt14priority_queueIN11MessageLoop11PendingTaskESt6vectorIS1_SaIS1_EESt4lessIS1_EEC1ERKS6_RKS4_
+ fun:_ZN4base11MessageLoopC1ENS0_4TypeE
+ fun:_ZN4base6Thread10ThreadMainEv
+}
+{
+ # also bug 17979. It's a nest of leaks.
+ bug_17385
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3IPC12ChannelProxy7Context13CreateChannel*Channel4ModeE
+ fun:_ZN3IPC12ChannelProxy4Init*
+ ...
+ fun:_ZN3IPC11SyncChannel*Channel4Mode*Listener*
+}
+{
+ bug_17540
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base19MessagePumpLibevent19WatchFileDescriptor*FileDescriptorWatcherEPNS0_7WatcherE
+ fun:_ZN4base16MessageLoopForIO19WatchFileDescriptor*MessagePumpLibevent21FileDescriptorWatcherEPNS2_7WatcherE
+ ...
+ fun:_ZN3IPC*Channel*ConnectEv
+ fun:_ZN3IPC12ChannelProxy7Context15OnChannelOpenedEv
+}
+{
+ # slight variant of the above
+ bug_19371a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN4base13WaitableEvent7EnqueueEPNS0_6WaiterE
+ fun:_ZN4base13WaitableEvent9TimedWaitERKNS_9TimeDeltaE
+ fun:_ZN4base18MessagePumpDefault3RunEPNS_11MessagePump8DelegateE
+}
+{
+ bug_19775_a
+ Memcheck:Leak
+ ...
+ fun:malloc
+ fun:sqlite3MemMalloc
+ fun:mallocWithAlarm
+ fun:sqlite3Malloc
+ ...
+ fun:sqlite3VdbeExec
+ fun:sqlite3Step
+ fun:sqlite3_step
+ fun:sqlite3_exec
+ fun:_ZN3sql10Connection7Execute*
+ ...
+ fun:_ZN7history*Database*Create*
+}
+{
+ bug_19775_c
+ Memcheck:Leak
+ ...
+ fun:openDatabase
+ fun:sqlite3_open
+ fun:_ZN3sql10Connection12OpenInternalERKSs
+}
+{
+ bug_19775_g
+ Memcheck:Leak
+ fun:malloc
+ fun:sqlite3MemMalloc
+ fun:mallocWithAlarm
+ fun:sqlite3Malloc
+ fun:sqlite3ParserAlloc
+ fun:sqlite3RunParser
+ fun:sqlite3Prepare
+ fun:sqlite3LockAndPrepare
+ fun:sqlite3_prepare*
+}
+{
+ bug_19775_h
+ Memcheck:Leak
+ ...
+ fun:malloc
+ fun:sqlite3MemMalloc
+ fun:mallocWithAlarm
+ fun:sqlite3Malloc
+ ...
+ fun:yy_reduce
+}
+# The following three suppressions are related to the workers code.
+{
+ bug_27837
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN19WebSharedWorkerStub9OnConnectEii
+}
+{
+ bug_32085
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorIN7content21NotificationRegistrar6RecordEE8allocate*
+ fun:_ZNSt12_Vector_baseIN7content21NotificationRegistrar6RecordESaIS*
+ fun:_ZNSt6vectorIN7content21NotificationRegistrar6RecordESaIS2_EE13_M_insert_auxEN9__gnu_cxx17__normal_iteratorIPS2_S*
+ fun:_ZNSt6vectorIN7content21NotificationRegistrar6RecordESaIS*
+ fun:_ZN7content21NotificationRegistrar3Add*
+}
+{
+ bug_32273_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3IPC12ChannelProxy4SendEPNS_7MessageE
+ fun:_ZN3IPC11SyncChannel15SendWithTimeoutEPNS_7MessageEi
+ fun:_ZN3IPC11SyncChannel4SendEPNS_7MessageE
+ fun:_ZN11ChildThread4SendEPN3IPC7MessageE
+ fun:_ZN12RenderThread4SendEPN3IPC7MessageE
+ fun:_ZN12RenderWidget4SendEPN3IPC7MessageE
+ fun:_ZN12RenderWidget16DoDeferredUpdateEv
+ fun:_ZN12RenderWidget20CallDoDeferredUpdateEv
+}
+{
+ bug_32273_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN24BrowserRenderProcessHost4SendEPN3IPC7MessageE
+ fun:_ZN16RenderWidgetHost4SendEPN3IPC7MessageE
+}
+{
+ bug_32624_b
+ Memcheck:Leak
+ fun:malloc
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ fun:secmod_ModuleInit
+}
+{
+ bug_32624_c
+ Memcheck:Leak
+ ...
+ fun:malloc
+ ...
+ fun:PORT_Alloc_Util
+ ...
+ fun:PK11_InitPin
+}
+{
+ bug_32624_f
+ Memcheck:Leak
+ ...
+ fun:CERT_PKIXVerifyCert
+ fun:_ZN3net12_GLOBAL__N_114PKIXVerifyCertE*
+}
+{
+ bug_32624_g
+ Memcheck:Leak
+ ...
+ fun:CERT_VerifySignedData
+ fun:cert_VerifyCertChain
+ fun:CERT_VerifyCertChain
+ fun:CERT_VerifyCert
+}
+{
+ bug_42842
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN19TestWebViewDelegate12createWorkerEPN5blink8WebFrameEPNS0_15WebWorkerClientE
+ fun:_ZN5blink19WebWorkerClientImpl24createWorkerContextProxyEPN7blink6WorkerE
+ fun:_ZN5blink18WorkerContextProxy6createEPNS_6WorkerE
+ fun:_ZN5blink6WorkerC1EPNS_22ScriptExecutionContextE
+ fun:_ZN5blink6Worker6createERKN3WTF6StringEPNS_22ScriptExecutionContextERi
+ fun:_ZN5blink8V8Worker19constructorCallbackERKN2v89ArgumentsE
+}
+{
+ bug_42942_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3sql10Connection18GetCachedStatementERKNS_11StatementIDEPKc
+ fun:_ZN3sql9MetaTable19PrepareGetStatementEPNS_9StatementEPKc
+ fun:_ZN3sql9MetaTable8GetValueEPKcPi
+ fun:_ZN3sql9MetaTable26GetCompatibleVersionNumberEv
+ ...
+ fun:_ZN3net13CookieMonster9InitStoreEv
+}
+{
+ bug_42942_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3sql10Connection18GetCachedStatementERKNS_11StatementIDEPKc
+ fun:_ZN3sql9MetaTable19PrepareSetStatementEPNS_9StatementEPKc
+ fun:_ZN3sql9MetaTable8SetValueEPKci
+ fun:_ZN3sql9MetaTable16SetVersionNumberEi
+ ...
+ fun:_ZN3net13CookieMonster9InitStoreEv
+}
+{
+ bug_64887_a
+ Memcheck:Uninitialized
+ ...
+ fun:*vfprintf
+ ...
+ fun:_ZN7testing*PrintByteSegmentInObjectTo*
+ ...
+ fun:_ZN7testing*PrintBytesInObjectTo*
+ fun:_ZN7testing9internal220PrintBytesInObjectToEPKh*
+ fun:_ZN7testing9internal220TypeWithoutFormatter*
+ fun:_ZN7testing9internal2lsIcSt11char_traitsIcE*
+}
+{
+ bug_64887_b
+ Memcheck:Uninitialized
+ ...
+ fun:_ZNSolsEx
+ fun:_ZN7testing9internal220TypeWithoutFormatterIN5media7PreloadELNS0_8TypeKindE1EE10PrintValueERKS3_PSo
+ fun:_ZN7testing9internal2lsIcSt11char_traitsIcEN5media7PreloadEEERSt13basic_ostreamIT_T0_ESA_RKT1_
+ fun:_ZN16testing_internal26DefaultPrintNonContainerToIN5media7PreloadEEEvRKT_PSo
+ fun:_ZN7testing8internal14DefaultPrintToIN5media7PreloadEEEvcNS0_13bool_constantILb0EEERKT_PSo
+ fun:_ZN7testing8internal7PrintToIN5media7PreloadEEEvRKT_PSo
+ fun:_ZN7testing8internal16UniversalPrinterIN5media7PreloadEE5PrintERKS3_PSo
+ fun:_ZN7testing8internal18TuplePrefixPrinter*
+ fun:_ZN7testing8internal12PrintTupleToINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZN7testing8internal7PrintToIN5media7PreloadEEEvRKNSt3tr15tupleIT*
+ fun:_ZN7testing8internal16UniversalPrinterINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZN7testing8internal14UniversalPrintINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZNK7testing8internal18FunctionMockerBaseIFvN5media7PreloadEEE32UntypedDescribeUninterestingCallEPKvPSo
+ fun:_ZN7testing8internal25UntypedFunctionMockerBase17UntypedInvokeWithEPKv
+ fun:_ZN7testing8internal18FunctionMockerBaseIFvN5media7PreloadEEE10InvokeWithERKNSt3tr15tupleIS3*
+ fun:_ZN7testing8internal14FunctionMockerIFvN5media7PreloadEEE6InvokeES3_
+ fun:_ZN5media11MockDemuxer10SetPreloadENS_7PreloadE
+}
+{
+ bug_64887_c
+ Memcheck:Uninitialized
+ ...
+ fun:_ZNSolsEx
+ fun:_ZN7testing9internal220TypeWithoutFormatterIN5media7PreloadELNS0_8TypeKindE1EE10PrintValueERKS3_PSo
+ fun:_ZN7testing9internal2lsIcSt11char_traitsIcEN5media7PreloadEEERSt13basic_ostreamIT_T0_ESA_RKT1_
+ fun:_ZN16testing_internal26DefaultPrintNonContainerToIN5media7PreloadEEEvRKT_PSo
+ fun:_ZN7testing8internal14DefaultPrintToIN5media7PreloadEEEvcNS0_13bool_constantILb0EEERKT_PSo
+ fun:_ZN7testing8internal7PrintToIN5media7PreloadEEEvRKT_PSo
+ fun:_ZN7testing8internal16UniversalPrinterIN5media7PreloadEE5PrintERKS3_PSo
+ fun:_ZN7testing8internal18TuplePrefixPrinter*
+ fun:_ZN7testing8internal12PrintTupleToINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZN7testing8internal7PrintToIN5media7PreloadEEEvRKNSt3tr15tupleIT*
+ fun:_ZN7testing8internal16UniversalPrinterINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZN7testing8internal14UniversalPrintINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZNK7testing8internal18FunctionMockerBaseIFvN5media7PreloadEEE32UntypedDescribeUninterestingCallEPKvPSo
+ fun:_ZN7testing8internal25UntypedFunctionMockerBase17UntypedInvokeWithEPKv
+ fun:_ZN7testing8internal18FunctionMockerBaseIFvN5media7PreloadEEE10InvokeWithERKNSt3tr15tupleIS3*
+ fun:_ZN7testing8internal14FunctionMockerIFvN5media7PreloadEEE6InvokeES3_
+ fun:_ZN5media11MockDemuxer10SetPreloadENS_7PreloadE
+}
+{
+ bug_65940_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3IPC12ChannelProxy7Context13CreateChannelERKNS_13ChannelHandleERKNS_7Channel4ModeE
+ fun:_ZN3IPC12ChannelProxy4InitERKNS_13ChannelHandleENS_7Channel4ModeEP11MessageLoopb
+ fun:_ZN3IPC12ChannelProxyC2ERKNS_13ChannelHandleENS_7Channel4ModeEP11MessageLoopPNS0_7ContextEb
+ fun:_ZN3IPC11SyncChannelC1ERKNS_13ChannelHandleENS_7Channel4ModeEPNS4_8ListenerEP11MessageLoopbPN4base13WaitableEventE
+}
+{
+ bug_65940_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3IPC11SyncChannelC1ERKNS_13ChannelHandleENS_7Channel4ModeEPNS_8ListenerEPN4base22SingleThreadTaskRunnerEbPNS8_13WaitableEventE
+ fun:_ZN7content11ChildThread4InitEv
+ fun:_ZN7content11ChildThreadC2ERKSs
+}
+{
+ bug_65940_c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorI13scoped_refptrIN3IPC12ChannelProxy13MessageFilterEEE8allocateEmPKv
+ fun:_ZNSt12_Vector_baseI13scoped_refptrIN3IPC12ChannelProxy13MessageFilterEESaIS4_EE11_M_allocateEm
+ fun:_ZNSt6vectorI13scoped_refptrIN3IPC12ChannelProxy13MessageFilterEESaIS4_EE13_M_insert_auxEN9__gnu_cxx17__normal_iteratorIPS4_S6_EERKS4_
+ fun:_ZNSt6vectorI13scoped_refptrIN3IPC12ChannelProxy13MessageFilterEESaIS4_EE9push_backERKS4_
+ fun:_ZN3IPC12ChannelProxy7Context11OnAddFilterEv
+}
+{
+ bug_65940_d
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content11ChildThread4InitEv
+ fun:_ZN7content11ChildThreadC*
+ ...
+ fun:_ZN7content21WebRTCAudioDeviceTest5SetUpEv
+}
+{
+ bug_65940_e
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content16RenderThreadImpl4InitEv
+ fun:_ZN7content16RenderThreadImplC*
+ ...
+ fun:_ZN7content21WebRTCAudioDeviceTest5SetUpEv
+}
+{
+ bug_66853_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN11ProfileImpl14GetHostZoomMapEv
+ ...
+ fun:_ZNK17ProfileImplIOData6Handle27GetMainRequestContextGetterEv
+ fun:_ZN11ProfileImpl17GetRequestContextEv
+ fun:_ZN19SafeBrowsingService5StartEv
+ fun:_ZN19SafeBrowsingService10InitializeEv
+ fun:_ZN22ResourceDispatcherHost10InitializeEv
+ fun:_ZN18BrowserProcessImpl28CreateResourceDispatcherHostEv
+ fun:_ZN18BrowserProcessImpl24resource_dispatcher_hostEv
+ fun:_ZN16ExtensionService4InitEv
+ fun:_ZN11ProfileImpl14InitExtensionsE*
+ fun:_ZN14ProfileManager10AddProfileEP7Profileb
+}
+{
+ bug_67142
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN16ChildProcessHost13CreateChannelEv
+ fun:_ZN14GpuProcessHost4InitEv
+}
+{
+ bug_67261
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3sql10Connection18GetUniqueStatementEPKc
+ fun:_ZN3sql10Connection18GetCachedStatementERKNS_11StatementIDEPKc
+ fun:_ZN8appcache16AppCacheDatabase22PrepareCachedStatementERKN3sql11StatementIDEPKcPNS1_9StatementE
+}
+{
+ bug_67553
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZNSt3mapISs13scoped_refptrIK9ExtensionESt4lessISsESaISt4pairIKSsS3_EEEixERS7_
+ fun:_ZN16ExtensionInfoMap12AddExtensionEPK9Extension
+}
+{
+ bug_68553
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3net25DiskCacheBasedSSLHostInfoC1ERKSsRKNS_9SSLConfigEPNS_12CertVerifierEPNS_9HttpCacheE
+ fun:_ZN3net9HttpCache25SSLHostInfoFactoryAdaptor10GetForHostERKSsRKNS_9SSLConfigE
+ fun:_ZN3net13SSLConnectJob12DoTCPConnectEv
+ fun:_ZN3net13SSLConnectJob6DoLoopEi
+ fun:_ZN3net13SSLConnectJob15ConnectInternalEv
+ fun:_ZN3net10ConnectJob7ConnectEv
+ fun:_ZN3net8internal26ClientSocketPoolBaseHelper21RequestSocketInternalERKSsPKNS1_7RequestE
+ fun:_ZN3net8internal26ClientSocketPoolBaseHelper13RequestSocketERKSsPKNS1_7RequestE
+ fun:_ZN3net20ClientSocketPoolBaseINS_15SSLSocketParamsEE13RequestSocketERKSsRK13scoped_refptrIS1_ENS_15RequestPriorityEPNS_18ClientSocketHandleEP14CallbackRunnerI6Tuple1IiEERKNS_11BoundNetLogE
+ fun:_ZN3net19SSLClientSocketPool13RequestSocketERKSsPKvNS_15RequestPriorityEPNS_18ClientSocketHandleEP14CallbackRunnerI6Tuple1IiEERKNS_11BoundNetLogE
+ fun:_ZN3net18ClientSocketHandle4InitINS_15SSLSocketParamsENS_19SSLClientSocketPoolEEEiRKSsRK13scoped_refptrIT_ENS_15RequestPriorityEP14CallbackRunnerI6Tuple1IiEEPT0_RKNS_11BoundNetLogE
+}
+{
+ Bug_69934_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN*NPObjectProxy10NPAllocateEP4_NPPP7NPClass
+ fun:_NPN_CreateObject
+ fun:_ZN5blink11WebBindings12createObjectEP4_NPPP7NPClass
+}
+{
+ Bug_69934_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3IPC11SyncMessage13GenerateReplyEPKNS_7MessageE
+ fun:_ZN3IPC17SyncMessageSchema*
+}
+{
+ bug_71728
+ Memcheck:Leak
+ fun:_Znw*
+ fun:*DownloadFileTest5SetUpEv
+}
+{
+ bug_72544
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEj
+ fun:_ZN3WTF10RefCountedIN7blink14StyleSheetListEEnwEj
+ fun:_ZN5blink14StyleSheetList6createEPNS_8DocumentE
+ fun:_ZN5blink8DocumentC2EPNS_5FrameERKNS_4KURLEbbS5_
+ fun:_ZN5blink12HTMLDocumentC1EPNS_5FrameERKNS_4KURLES5_
+ fun:_ZN5blink12HTMLDocument6createEPNS_5FrameERKNS_4KURLES5_
+ fun:_ZN5blink17DOMImplementation14createDocumentERKN3WTF6StringEPNS_5FrameERKNS_4KURLEb
+ fun:_ZN5blink14DocumentWriter14createDocumentERKNS_4KURLE
+ fun:_ZN5blink14DocumentWriter5beginERKNS_4KURLEbPNS_14SecurityOriginE
+ fun:_ZN5blink11FrameLoader4initEv
+ fun:_ZN5blink5Frame4initEv
+ fun:_ZN5blink12WebFrameImpl21initializeAsMainFrameEPNS_11WebViewImplE
+ fun:_ZN5blink11WebViewImpl19initializeMainFrameEPNS_14WebFrameClientE
+ fun:_ZN10RenderViewC1EP16RenderThreadBaseiiRK19RendererPreferencesRK14WebPreferencesPN4base14RefCountedDataIiEEixRKSbItNS8_20string16_char_traitsESaItEE
+ fun:_ZN10RenderView6CreateEP16RenderThreadBaseiiRK19RendererPreferencesRK14WebPreferencesPN4base14RefCountedDataIiEEixRKSbItNS8_20string16_char_traitsESaItEE
+ fun:_ZN12RenderThread15OnCreateNewViewERK18ViewMsg_New_Params
+}
+{
+ bug_72698_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN13ProfileIOData20InitializeOnUIThreadEP7Profile
+}
+{
+ bug_73415
+ Memcheck:Unaddressable
+ fun:_ZN23AccessibilityController36shouldDumpAccessibilityNotificationsEv
+ fun:_ZN11WebViewHost29postAccessibilityNotificationERKN5blink22WebAccessibilityObjectENS0_28WebAccessibilityNotificationE
+ fun:_ZN5blink16ChromeClientImpl29postAccessibilityNotificationEPN7blink19AccessibilityObjectENS1_13AXObjectCache14AXNotificationE
+ fun:_ZN5blink13AXObjectCache24postPlatformNotificationEPNS_19AccessibilityObjectENS0_14AXNotificationE
+}
+{
+ bug_73675
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN20LayoutTestController13waitUntilDoneERKN3WTF6VectorI10CppVariantLj0EEEPS2_
+ fun:_ZN13CppBoundClass14MemberCallbackI20LayoutTestControllerE3runERKN3WTF6VectorI10CppVariantLj0EEEPS5_
+ fun:_ZN13CppBoundClass6invokeEPvPK10_NPVariantjPS1_
+ fun:_ZN11CppNPObject6invokeEP8NPObjectPvPK10_NPVariantjPS3_
+ fun:_ZN5blink18npObjectInvokeImplERKN2v89ArgumentsENS_18InvokeFunctionTypeE
+ fun:_ZN5blink21npObjectMethodHandlerERKN2v89ArgumentsE
+ fun:_ZN2v88internal19HandleApiCallHelperILb0EEEPNS0_11MaybeObjectENS0_47_GLOBAL__N_v8_src_builtins.cc_*BuiltinArgumentsILNS0_21BuiltinExtraArgumentsE1EEE
+ obj:*
+}
+{
+ bug_75019
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN14GpuDataManagerC1Ev
+ fun:_ZN22DefaultSingletonTraitsI14GpuDataManagerE3NewEv
+ fun:_ZN9SingletonI14GpuDataManager22DefaultSingletonTraitsIS0_ES0_E3getEv
+ fun:_ZN14GpuDataManager11GetInstanceEv
+ fun:_Z11BrowserMainRK18MainFunctionParams
+ fun:_ZN20InProcessBrowserTest5SetUpEv
+}
+{
+ bug_76197a
+ Memcheck:Unaddressable
+ fun:sqlite3DbFree
+ fun:releaseMemArray
+ fun:sqlite3VdbeDelete
+ fun:sqlite3VdbeFinalize
+ fun:sqlite3_finalize
+ fun:_ZN3sql10Connection12StatementRef5CloseEv
+ fun:_ZN3sql10Connection12StatementRefD2Ev
+ fun:_ZN3sql10Connection12StatementRefD1Ev
+ fun:_ZNK4base10RefCountedIN3sql10Connection12StatementRefEE7ReleaseEv
+ fun:_ZN13scoped_refptrIN3sql10Connection12StatementRefEED2Ev
+ fun:_ZN13scoped_refptrIN3sql10Connection12StatementRefEED1Ev
+ fun:_ZNSt4pairIKN3sql11StatementIDE13scoped_refptrINS0_10Connection12StatementRefEEED2Ev
+ fun:_ZNSt4pairIKN3sql11StatementIDE13scoped_refptrINS0_10Connection12StatementRefEEED1Ev
+ fun:_ZN9__gnu_cxx13new_allocatorISt4pairIKN3sql11StatementIDE13scoped_refptrINS2_10Connection12StatementRefEEEE7destroyEPS9_
+ fun:_ZNSt8_Rb_treeIN3sql11StatementIDESt4pairIKS1_13scoped_refptrINS0_10Connection12StatementRefEEESt10_Select1stIS8_ESt4lessIS1_ESaIS8_EE12destroy_nodeEPSt13_Rb_tree_nodeIS8_E
+ fun:_ZNSt8_Rb_treeIN3sql11StatementIDESt4pairIKS1_13scoped_refptrINS0_10Connection12StatementRefEEESt10_Select1stIS8_ESt4lessIS1_ESaIS8_EE8_M_eraseEPSt13_Rb_tree_nodeIS8_E
+ fun:_ZNSt8_Rb_treeIN3sql11StatementIDESt4pairIKS1_13scoped_refptrINS0_10Connection12StatementRefEEESt10_Select1stIS8_ESt4lessIS1_ESaIS8_EE5clearEv
+ fun:_ZNSt3mapIN3sql11StatementIDE13scoped_refptrINS0_10Connection12StatementRefEESt4lessIS1_ESaISt4pairIKS1_S5_EEE5clearEv
+ fun:_ZN3sql10Connection5CloseEv
+ fun:_ZN3sql10ConnectionD2Ev
+ fun:_ZN3sql10ConnectionD1Ev
+ fun:_ZN7history16InMemoryDatabaseD0Ev
+}
+{
+ bug_76197b
+ Memcheck:Unaddressable
+ ...
+ fun:sqlite3_step
+ fun:sqlite3_exec
+ fun:_ZN3sql10Connection7ExecuteEPKc
+ fun:_ZN7history11URLDatabase31CreateKeywordSearchTermsIndicesEv
+ fun:_ZN7history16InMemoryDatabase12InitFromDiskE*
+ fun:_ZN7history22InMemoryHistoryBackend4InitE*
+}
+{
+ bug_79654_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZNSt3setIP16RenderWidgetHostSt4lessIS1_ESaIS1_EE6insertERKS1_
+ fun:_ZN*9TabLoader12TabIsLoadingEP24NavigationControllerImpl
+ fun:_ZN*18SessionRestoreImpl21ProcessSessionWindowsEPSt6vectorIP13SessionWindowSaIS3_EE
+ fun:_ZN*18SessionRestoreImpl12OnGotSessionEiPSt6vectorIP13SessionWindowSaIS3_EE
+}
+{
+ bug_79654_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:*RenderWidgetHost*
+ ...
+ fun:_ZNSt3setIP16RenderWidgetHostSt4lessIS1_ESaIS1_EE6insertERKS1_
+ fun:*TabLoader7ObserveEiRKN7content18NotificationSourceERKNS1_19NotificationDetailsE
+ fun:_ZN23NotificationServiceImpl*
+ fun:_ZN15WebContentsImpl12SetIsLoading*
+ fun:_ZN15WebContentsImpl14RenderViewGone*
+}
+{
+ bug_84265
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN12LoginHandler6CreateEPN3net17AuthChallengeInfoEPNS0_10URLRequestE
+ fun:_Z17CreateLoginPromptPN3net17AuthChallengeInfoEPNS_10URLRequestE
+ fun:_ZN22ResourceDispatcherHost14OnAuthRequiredEPN3net10URLRequestEPNS0_17AuthChallengeInfoE
+ fun:_ZN3net13URLRequestJob21NotifyHeadersCompleteEv
+}
+{
+ bug_84770_a
+ Memcheck:Unaddressable
+ fun:_ZN5blink21FrameLoaderClientImpl12allowPluginsEb
+ fun:_ZN5blink14SubframeLoader12allowPluginsENS_28ReasonForCallingAllowPluginsE
+}
+{
+ bug_84770_b
+ Memcheck:Unaddressable
+ fun:_ZN5blink21FrameLoaderClientImpl15allowJavaScriptEb
+ fun:_ZN5blink16ScriptController17canExecuteScriptsENS_33ReasonForCallingCanExecuteScriptsE
+}
+{
+ bug_84770_c
+ Memcheck:Unaddressable
+ fun:_ZN5blink21FrameLoaderClientImpl20allowScriptExtensionERKN3WTF6StringEi
+ fun:_ZN5blink16V8DOMWindowShell16createNewContextEN2v86HandleINS1_6ObjectEEEi
+}
+{
+ bug_86481
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocator*FilePath*allocate*
+ fun:_ZNSt11_Deque_base*FilePath*_M_allocate_map*
+ fun:_ZNSt11_Deque_base*FilePath*_M_initialize_map*
+ fun:_ZNSt11_Deque_baseI*FilePath*
+ fun:_ZNSt5dequeI*FilePath*
+ fun:_ZNSt5stackI*FilePath*deque*
+ fun:_ZN9file_util14FileEnumeratorC1E*
+ fun:_ZN7history20ExpireHistoryBackend25DoExpireHistoryIndexFilesEv
+}
+{
+ bug_87423
+ Memcheck:Uninitialized
+ fun:_ZNK3net15HttpBasicStream23LogNumRttVsBytesMetricsEv
+ fun:_ZN3net22HttpNetworkTransaction18DoReadBodyCompleteEi
+ fun:_ZN3net22HttpNetworkTransaction6DoLoopEi
+ fun:_ZN3net22HttpNetworkTransaction4ReadEPNS_8IOBufferEiP14CallbackRunnerI6Tuple1IiEE
+ fun:_Z15ReadTransactionPN3net15HttpTransactionEPSs
+ fun:_ZN3net73HttpNetworkTransactionTest_ErrorResponseTofHttpsConnectViaHttpsProxy_Test8TestBodyEv
+}
+{
+ bug_90215_c
+ Memcheck:Leak
+ ...
+ fun:_ZN3net13URLRequestJob21NotifyRestartRequiredEv
+ fun:_ZN8appcache21AppCacheURLRequestJob13BeginDeliveryEv
+}
+{
+ bug_90215_d
+ Memcheck:Leak
+ ...
+ fun:_ZN8appcache19AppCacheStorageImpl23RunOnePendingSimpleTaskEv
+}
+{
+ bug_90215_e
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN8appcache15AppCacheService10InitializeE*
+ fun:_ZN21ChromeAppCacheService20InitializeOnIOThreadE*
+}
+{
+ bug_90215_f
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN26TransportSecurityPersisterC1EPN3net22TransportSecurityStateERKN4base8FilePathEb
+ fun:_ZNK13ProfileIOData4InitEPSt3mapISs10linked_ptrIN3net20URLRequestJobFactory15ProtocolHandlerEESt4lessISsESaISt4pairIKSsS5_EEE
+ fun:_ZN12_GLOBAL__N_114FactoryForMain6CreateEv
+ fun:_ZN29ChromeURLRequestContextGetter20GetURLRequestContextEv
+ fun:_ZN7content21ChromeAppCacheService20InitializeOnIOThreadERKN4base8FilePathEPNS_15ResourceContextEPN3net23URLRequestContextGetterE13scoped_refptrIN5quota20SpecialStoragePolicyEE
+}
+{
+ bug_90240
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN2pp5proxy26PPP_Instance_Private_Proxy22OnMsgGetInstanceObjectEiNS0_24SerializedVarReturnValueE
+}
+{
+ bug_90487a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISt10_List_nodeIPN5quota11QuotaClientEEE8allocateEjPKv
+ fun:_ZNSt10_List_baseIPN5quota11QuotaClientESaIS2_EE11_M_get_nodeEv
+ fun:_ZNSt4listIPN5quota11QuotaClientESaIS2_EE14_M_create_nodeERKS2_
+ fun:_ZNSt4listIPN5quota11QuotaClientESaIS2_EE9_M_insertESt14_List_iteratorIS2_ERKS2_
+ fun:_ZNSt4listIPN5quota11QuotaClientESaIS2_EE9push_backERKS2_
+ fun:_ZN5quota12QuotaManager14RegisterClientEPNS_11QuotaClientE
+ fun:_ZN5quota17QuotaManagerProxy14RegisterClientEPNS_11QuotaClientE
+}
+{
+ bug_93250a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:*SHA1Fingerprint*
+ fun:_ZN3net16CertVerifyResultaSERKS0_
+ fun:_ZN3net12CertVerifier12HandleResultEPNS_15X509CertificateERKSsiiRKNS_16CertVerifyResultE
+ fun:_ZN3net18CertVerifierWorker7DoReplyEv
+}
+{
+ bug_93250b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base4BindIMNS_6subtle18TaskClosureAdapterEFvvEPS2_EENS_8internal20InvokerStorageHolderINS6_15InvokerStorage1IT_T0_EEEES9_RKSA_
+ fun:_ZN4base11MessageLoop8PostTaskERKN15tracked_objects8LocationEP4Task
+ fun:_ZN3net18CertVerifierWorker6FinishEv
+ fun:_ZN3net18CertVerifierWorker3RunEv
+}
+{
+ bug_93730_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN14ServiceProcess10InitializeEP16MessageLoopForUIRK11CommandLineP19ServiceProcessState
+ fun:_Z18ServiceProcessMainRK18MainFunctionParams
+ ...
+ fun:ChromeMain
+ fun:main
+}
+{
+ bug_93730_b
+ Memcheck:Leak
+ fun:_Zna*
+ fun:_ZN4base13LaunchProcessERKSt6vectorISsSaISsEERKNS_13LaunchOptionsEPi
+ fun:_ZN4base13LaunchProcessERK11CommandLineRKNS_13LaunchOptionsEPi
+ fun:_ZN21ServiceProcessControl8Launcher5DoRunEv
+}
+{
+ bug_93730_c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_Z17NewRunnableMethodIN21ServiceProcessControl8LauncherEMS1_FvvEEP14CancelableTaskPT_T0_
+ fun:_ZN21ServiceProcessControl8Launcher5DoRunEv
+}
+{
+ bug_93730_d
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3IPC11SyncChannelC1ERKNS_13ChannelHandleENS_7Channel4ModeEPNS4_8ListenerEPN4base16MessageLoopProxyEbPNS8_13WaitableEventE
+ fun:_ZN16ServiceIPCServer13CreateChannelEv
+ fun:_ZN16ServiceIPCServer4InitEv
+ fun:_ZN14ServiceProcess10InitializeEP16MessageLoopForUIRK11CommandLineP19ServiceProcessState
+ fun:_Z18ServiceProcessMainRK18MainFunctionParams
+ ...
+ fun:ChromeMain
+ fun:main
+}
+{
+ bug_94764
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN8remoting13ClientSession11UnpressKeysEv
+ fun:_ZN8remoting34ClientSessionTest_UnpressKeys_Test8TestBodyEv
+}
+{
+ bug_95448
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISt13_Rb_tree_nodeISt4pairIKSsPN4base5ValueEEEE8allocateEjPKv
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN4base5ValueEESt10_Select1stIS5_ESt4lessISsESaIS5_EE11_M_get_nodeEv
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN4base5ValueEESt10_Select1stIS5_ESt4lessISsESaIS5_EE14_M_create_nodeERKS5_
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN4base5ValueEESt10_Select1stIS5_ESt4lessISsESaIS5_EE10_M_insert_EPKSt18_Rb_tree_node_baseSE_RKS5_
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN4base5ValueEESt10_Select1stIS5_ESt4lessISsESaIS5_EE17_M_insert_unique_ESt23_Rb_tree_const_iteratorIS5_ERKS5_
+ fun:_ZNSt3mapISsPN4base5ValueESt4lessISsESaISt4pairIKSsS2_EEE6insertESt17_Rb_tree_iteratorIS7_ERKS7_
+ fun:_ZNSt3mapISsPN4base5ValueESt4lessISsESaISt4pairIKSsS2_EEEixERS6_
+ fun:_ZN4base15DictionaryValue23SetWithoutPathExpansionERKSsPNS_5ValueE
+ fun:_ZN4base15DictionaryValue3SetERKSsPNS_5ValueE
+ fun:_ZN4base15DictionaryValue9SetStringERKSsRKSbItNS_20string16_char_traitsESaItEE
+ fun:_ZN11PluginPrefs23CreatePluginFileSummaryERKN6webkit13WebPluginInfoE
+ fun:_ZN11PluginPrefs19OnUpdatePreferencesESt6vectorIN6webkit13WebPluginInfoESaIS2_EES0_INS1_5npapi11PluginGroupESaIS6_EE
+}
+{
+ bug_98867
+ Memcheck:Jump
+ obj:*
+ obj:*
+ obj:*
+}
+{
+ bug_99307
+ Memcheck:Uninitialized
+ fun:modp_b64_encode
+ fun:_ZN4base12Base64Encode*
+ fun:_ZN11web_ui_util15GetImageDataUrlERK8SkBitmap
+ fun:_ZN12_GLOBAL__N_121NetworkInfoDictionary8set_iconERK8SkBitmap
+}
+{
+ bug_100982
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blink12RenderRegion22setRenderBoxRegionInfoEPKNS_9RenderBoxEiib
+ fun:_ZNK7blink9RenderBox19renderBoxRegionInfoEPNS_12RenderRegionEiNS0_24RenderBoxRegionInfoFlagsE
+ ...
+ fun:_ZN5blink11RenderBlock5paintERNS_9PaintInfoERKNS_8IntPointE
+}
+{
+ bug_101750
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEj
+ fun:_ZN3WTF7HashSetIPN7blink16SVGStyledElementENS_7PtrHashIS3_EENS_10HashTraitsIS3_EEEnwEj
+ fun:_ZN5blink21SVGDocumentExtensions18addPendingResourceERKN3WTF12AtomicStringEPNS_16SVGStyledElementE
+}
+{
+ bug_101781_d
+ Memcheck:Uninitialized
+ fun:_ZN7testing8internal11CmpHelperGEIddEENS_15AssertionResultEPKcS4_RKT_RKT0_
+ fun:_ZN3gfx31JPEGCodec_EncodeDecodeRGBA_Test8TestBodyEv
+}
+{
+ bug_102327a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN15tracked_objects10ThreadData10InitializeEv
+ fun:_ZN15tracked_objects10ThreadData30InitializeAndSetTrackingStatusEb
+ fun:_ZN15tracked_objects10ThreadData29ShutdownSingleThreadedCleanupEb
+}
+{
+ bug_102327d
+ Memcheck:Uninitialized
+ fun:_ZN15tracked_objects9DeathData11RecordDeathEiii
+ fun:_ZN15tracked_objects10ThreadData11TallyADeathERKNS_6BirthsEii
+ fun:_ZN15tracked_objects10ThreadData31TallyRunOnNamedThreadIfTrackingERKN4base12TrackingInfoERKNS_11TrackedTimeES7_
+}
+{
+ Intentional leak of stl map during thread cleanup in profiler
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNK15tracked_objects10ThreadData26OnThreadTerminationCleanupEv
+}
+{
+ bug_102831_a
+ Memcheck:Leak
+ ...
+ fun:_ZN17PluginLoaderPosix19LoadPluginsInternalEv
+}
+{
+ bug_104447
+ Memcheck:Leak
+ ...
+ fun:HB_OpenTypeShape
+ fun:arabicSyriacOpenTypeShape
+ fun:HB_ArabicShape
+ fun:HB_ShapeItem
+ fun:_ZN5blink21ComplexTextController11shapeGlyphsEv
+ fun:_ZN5blink21ComplexTextController13nextScriptRunEv
+ fun:_ZN5blink21ComplexTextController14widthOfFullRunEv
+ fun:_ZNK7blink4Font24floatWidthForComplexTextERKNS_7TextRunEPN3WTF7HashSetIPKNS_14SimpleFontDataENS4_7PtrHashIS8_EENS4_10HashTraitsIS8_EEEEPNS_13GlyphOverflowE
+ fun:_ZNK7blink4Font5widthERKNS_7TextRunERiRN3WTF6StringE
+ fun:_ZN5blink14SVGTextMetricsC1EPNS_19RenderSVGInlineTextERKNS_7TextRunE
+ fun:_ZN5blink14SVGTextMetrics21measureCharacterRangeEPNS_19RenderSVGInlineTextEjj
+ fun:_ZNK7blink30SVGTextLayoutAttributesBuilder25propagateLayoutAttributesEPNS_12RenderObjectERN3WTF6VectorINS_23SVGTextLayoutAttributesELm0EEERjRt
+}
+{
+ bug_104806_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN15tracked_objects10ThreadData19TallyABirthIfActiveERKNS_8LocationE
+ fun:_ZN4base12TrackingInfoC?ERKN15tracked_objects8LocationENS_9TimeTicksE
+}
+{
+ bug_104806_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZNSt3mapIPKN15tracked_objects6BirthsENS0_9DeathDataESt4lessIS3_ESaISt4pairIKS3_S4_EEEixERS8_
+ fun:_ZN15tracked_objects10ThreadData11TallyADeathERKNS_6BirthsEii
+ fun:_ZN15tracked_objects10ThreadData32TallyRunOnWorkerThreadIfTrackingEPKNS_6BirthsERKNS_11TrackedTimeES6_S6_
+}
+{
+ bug_105744b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZNSt6vector*9push_back*
+ fun:_ZN4skia19ConvolutionFilter1D9AddFilterEiPKsi
+ fun:_ZN4skia12_GLOBAL__N_112ResizeFilter14ComputeFiltersEiiiffPNS_19ConvolutionFilter1DE
+ fun:_ZN4skia12_GLOBAL__N_112ResizeFilterC1ENS_15ImageOperations12ResizeMethodEiiiiRK7SkIRect
+ fun:_ZN4skia15ImageOperations11ResizeBasicERK8SkBitmapNS0_12ResizeMethodEiiRK7SkIRect
+ fun:_ZN4skia15ImageOperations6ResizeERK8SkBitmapNS0_12ResizeMethodEiiRK7SkIRect
+ fun:_ZN4skia15ImageOperations6ResizeERK8SkBitmapNS0_12ResizeMethodEii
+ fun:_ZN24ChromeRenderViewObserver21CaptureFrameThumbnailEPN5blink7WebViewEiiP8SkBitmapP14ThumbnailScore
+ fun:_ZN24ChromeRenderViewObserver16CaptureThumbnailEv
+ fun:_ZN24ChromeRenderViewObserver15CapturePageInfoEib
+}
+{
+ bug_105907
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN4skia14BGRAConvolve2DEPKhibRKNS_19ConvolutionFilter1DES4_iPhb
+ fun:_ZN4skia15ImageOperations11ResizeBasicE*
+ fun:_ZN4skia15ImageOperations6ResizeE*
+}
+{
+ bug_106912
+ Memcheck:Leak
+ ...
+ fun:_ZN15tracked_objects10ThreadData23InitializeThreadContextERKSs
+ fun:_ZN4base14PlatformThread7SetNameEPKc
+}
+{
+ bug_111669
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN8appcache17AppCacheDiskCache10ActiveCall17OnAsyncCompletionEi
+}
+{
+ bug_112278
+ Memcheck:Uninitialized
+ fun:fetch_texel_2d_f_rgba8888
+ ...
+ fun:sample_nearest_2d
+ fun:fetch_texel_lod
+ fun:fetch_texel
+ fun:_mesa_execute_program
+ fun:run_program*
+ fun:_swrast_exec_fragment_program
+ fun:shade_texture_span
+ fun:_swrast_write_rgba_span
+ fun:general_triangle
+ ...
+ fun:_swrast_Triangle
+ fun:triangle_rgba
+ ...
+ fun:run_render
+ fun:_tnl_run_pipeline
+ fun:_tnl_draw_prims
+ fun:_tnl_vbo_draw_prims
+}
+{
+ bug_122457
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF10RefCountedIN5blink12MHTMLArchiveEEnwEm
+ fun:_ZN5blink12MHTMLArchive6createEv
+ ...
+ fun:_ZN5blink11MHTMLParser22parseArchiveWithHeaderEPNS_10MIMEHeaderE
+ fun:_ZN5blink11MHTMLParser12parseArchiveEv
+ fun:_ZN5blink12MHTMLArchive6createERKNS_4KURLEPNS_12SharedBufferE
+}
+{
+ bug_122717_use_after_free
+ Memcheck:Unaddressable
+ fun:__pthread_mutex_unlock_usercnt
+ fun:_ZN4base8internal8LockImpl6UnlockEv
+ fun:_ZN4base4Lock7ReleaseEv
+ fun:_ZN4base8AutoLockD1Ev
+ fun:_ZN5gdata15GDataFileSystem21RunTaskOnIOThreadPoolERKN4base8CallbackIFvvEEE
+}
+{
+ bug_122717_leak
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEESA_
+ fun:_ZN4base10TaskRunner16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEES9_
+ fun:_ZN7content13BrowserThread16PostTaskAndReplyENS0_2IDERKN15tracked_objects8LocationERKN4base8CallbackIFvvEEESB_
+}
+{
+ bug_123307
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF16fastZeroedMallocEm
+ ...
+ fun:_ZN5blink12_GLOBAL__N_111V8ObjectMapIN2v86ObjectEjE3setERKNS2_6HandleIS3_EERKj
+ fun:_ZN5blink12_GLOBAL__N_110Serializer10greyObjectERKN2v86HandleINS2_6ObjectEEE
+ fun:_ZN5blink12_GLOBAL__N_110Serializer11doSerializeEN2v86HandleINS2_5ValueEEEPNS1_9StateBaseE
+ fun:_ZN5blink12_GLOBAL__N_110Serializer9serializeEN2v86HandleINS2_5ValueEEE
+ fun:_ZN5blink21SerializedScriptValueC1EN2v86HandleINS1_5ValueEEEPN3WTF6VectorINS5_6RefPtrINS_11MessagePortEEELm1EEEPNS6_INS7_INS5_11ArrayBufferEEELm1EEERb
+ fun:_ZN5blink21SerializedScriptValue6createEN2v86HandleINS1_5ValueEEEPN3WTF6VectorINS5_6RefPtrINS_11MessagePortEEELm1EEEPNS6_INS7_INS5_11ArrayBufferEEELm1EEERb
+ fun:_ZN5blinkL25handlePostMessageCallbackERKN2v89ArgumentsEb
+ fun:_ZN5blink11V8DOMWindow19postMessageCallbackERKN2v89ArgumentsE
+ fun:_ZN2v88internalL19HandleApiCallHelperILb0EEEPNS0_11MaybeObjectENS0_12_GLOBAL__N_116BuiltinArgumentsILNS0_21BuiltinExtraArgumentsE1EEEPNS0_7IsolateE
+ fun:_ZN2v88internalL21Builtin_HandleApiCallENS0_12_GLOBAL__N_116BuiltinArgumentsILNS0_21BuiltinExtraArgumentsE1EEEPNS0_7IsolateE
+}
+{
+ bug_124488
+ Memcheck:Leak
+ fun:malloc
+ fun:strdup
+ ...
+ fun:_ZN34CopyTextureCHROMIUMResourceManager10InitializeEv
+ fun:_ZN3gpu5gles216GLES2DecoderImpl10InitializeERK13scoped_refptrIN3gfx9GLSurfaceEERKS2_INS3_9GLContextEERKNS3_4SizeERKNS0_18DisallowedFeaturesEPKcRKSt6vectorIiSaIiEE
+ fun:_ZN6webkit3gpu18GLInProcessContext10InitializeERKN3gfx4SizeEPS1_PKcPKiNS2_13GpuPreferenceE
+ fun:_ZN6webkit3gpu18GLInProcessContext22CreateOffscreenContextEPS1_RKN3gfx4SizeES2_PKcPKiNS3_13GpuPreferenceE
+ fun:_ZN6webkit3gpu46WebGraphicsContext3DInProcessCommandBufferImpl10InitializeEN5blink20WebGraphicsContext3D10AttributesEPS3_
+}
+{
+ bug_124496
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN8notifier26ProxyResolvingClientSocket23ProcessProxyResolveDoneEi
+}
+{
+ bug_127716
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3gfx5ImageC1ERK8SkBitmap
+ fun:_ZN16BrowserThemePack16LoadRawBitmapsToE*
+ fun:_ZN16BrowserThemePack18BuildFromExtensionEPK9Extension
+ fun:_ZN45BrowserThemePackTest_CanBuildAndReadPack_Test8TestBodyEv
+}
+{
+ bug_130362
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN12invalidation20NewPermanentCallbackINS_22InvalidationClientImplES1_St4pairINS_6StatusESsEEEPN4base8CallbackIFvT1_EEEPT_MT0_FvS7_E
+ fun:_ZN12invalidation22InvalidationClientImpl34ScheduleStartAfterReadingStateBlobEv
+ fun:_ZN12invalidation22InvalidationClientImpl5StartEv
+ fun:_ZN6syncer24SyncInvalidationListener5StartERKSsS2_S2_RKSt3mapIN8syncable9ModelTypeElSt4lessIS5_ESaISt4pairIKS5_lEEERKN12browser_sync10WeakHandleINS_24InvalidationStateTrackerEEEPNS0_8ListenerEPNS_11StateWriterE
+ fun:_ZN6syncer20InvalidationNotifier17UpdateCredentialsERKSsS2_
+ fun:_ZN6syncer31NonBlockingInvalidationNotifier4Core17UpdateCredentialsERKSsS3_
+}
+{
+ bug_130449
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN12invalidation20NewPermanentCallbackINS_22InvalidationClientImplES1_St4pairINS_6StatusESsEEEPN4base8CallbackIFvT1_EEEPT_MT0_FvS7_E
+ fun:_ZN12invalidation22InvalidationClientImpl34ScheduleStartAfterReadingStateBlobEv
+ fun:_ZN12invalidation22InvalidationClientImpl5StartEv
+ fun:_ZN6syncer24SyncInvalidationListener5StartERKSsS2_S2_RKSt3mapIN8syncable9ModelTypeElSt4lessIS5_ESaISt4pairIKS5_lEEERKN12browser_sync10WeakHandleINS_24InvalidationStateTrackerEEEPNS0_8ListenerE
+ fun:_ZN6syncer20InvalidationNotifier17UpdateCredentialsERKSsS2_
+ fun:_ZN6syncer31NonBlockingInvalidationNotifier4Core17UpdateCredentialsERKSsS3_
+}
+{
+ bug_130619
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blink9ClipRects6createERKS0_
+ fun:_ZN5blink11RenderLayer15updateClipRectsEPKS0_PNS_12RenderRegionENS_13ClipRectsTypeENS_29OverlayScrollbarSizeRelevancyE
+ ...
+ fun:_ZNK7blink11RenderLayer15parentClipRectsEPKS0_PNS_12RenderRegionENS_13ClipRectsTypeERNS_9ClipRectsENS_29OverlayScrollbarSizeRelevancyE
+ fun:_ZNK7blink11RenderLayer18backgroundClipRectEPKS0_PNS_12RenderRegionENS_13ClipRectsTypeENS_29OverlayScrollbarSizeRelevancyE
+}
+{
+ bug_138058
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink12WebVTTParser22constructTreeFromTokenEPNS_8DocumentE
+ fun:_ZN5blink12WebVTTParser33createDocumentFragmentFromCueTextERKN3WTF6StringE
+ fun:_ZN5blink12TextTrackCue12getCueAsHTMLEv
+ fun:_ZN5blink12TextTrackCue17updateDisplayTreeEf
+ fun:_ZN5blink16HTMLMediaElement25updateActiveTextTrackCuesEf
+}
+{
+ bug_138060
+ Memcheck:Uninitialized
+ fun:_NPN_EvaluateHelper
+ fun:_NPN_Evaluate
+ fun:_ZN5blink11WebBindings8evaluateEP4_NPPP8NPObjectP9_NPStringP10_NPVariant
+ fun:_ZL13executeScriptPK12PluginObjectPKc
+ fun:NPP_Destroy
+ fun:_ZN6webkit5npapi14PluginInstance11NPP_DestroyEv
+ fun:_ZN6webkit5npapi21WebPluginDelegateImpl15DestroyInstanceEv
+ fun:_ZN6webkit5npapi21WebPluginDelegateImplD0Ev
+ fun:_ZN6webkit5npapi21WebPluginDelegateImpl15PluginDestroyedEv
+ fun:_ZN6webkit5npapi13WebPluginImpl22TearDownPluginInstanceEPN5blink12WebURLLoaderE
+ fun:_ZN6webkit5npapi13WebPluginImpl12SetContainerEPN5blink18WebPluginContainerE
+ fun:_ZN6webkit5npapi13WebPluginImpl7destroyEv
+ fun:_ZN5blink22WebPluginContainerImplD0Ev
+ fun:_ZN3WTF10RefCountedIN7blink6WidgetEE5derefEv
+ fun:_ZNSt4pairIN3WTF6RefPtrIN7blink6WidgetEEEPNS2_9FrameViewEED1Ev
+ fun:_ZN3WTF9HashTableINS_6RefPtrIN7blink6WidgetEEESt4pairIS4_PNS2_9FrameViewEENS_18PairFirstExtractorIS8_EENS_7PtrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSE_IS7_EEEESF_E15deallocateTableEPS8_i
+ fun:_ZN3WTF9HashTableINS_6RefPtrIN7blink6WidgetEEESt4pairIS4_PNS2_9FrameViewEENS_18PairFirstExtractorIS8_EENS_7PtrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSE_IS7_EEEESF_ED1Ev
+ fun:_ZN3WTF7HashMapINS_6RefPtrIN7blink6WidgetEEEPNS2_9FrameViewENS_7PtrHashIS4_EENS_10HashTraitsIS4_EENS9_IS6_EEED1Ev
+ fun:_ZN5blink12RenderWidget28resumeWidgetHierarchyUpdatesEv
+ fun:_ZN5blink7Element6detachEv
+ fun:_ZN5blink13ContainerNode14detachChildrenEv
+ fun:_ZN5blink13ContainerNode6detachEv
+}
+{
+ bug_138220_a
+ Memcheck:Uninitialized
+ fun:_ZNK7blink16HTMLInputElement8dataListEv
+ fun:_ZNK7blink16HTMLInputElement4listEv
+ fun:_ZN5blink21RenderSliderContainer6layoutEv
+ fun:_ZN5blink11RenderBlock16layoutBlockChildEPNS_9RenderBoxERNS0_10MarginInfoERNS_20FractionalLayoutUnitES6_
+ fun:_ZN5blink11RenderBlock19layoutBlockChildrenEbRNS_20FractionalLayoutUnitE
+ fun:_ZN5blink11RenderBlock11layoutBlockEbNS_20FractionalLayoutUnitE
+ fun:_ZN5blink11RenderBlock6layoutEv
+ fun:_ZN5blink12RenderSlider6layoutEv
+}
+{
+ bug_138220_b
+ Memcheck:Uninitialized
+ fun:_ZNK7blink16HTMLInputElement8dataListEv
+ fun:_ZNK7blink16HTMLInputElement4listEv
+ fun:_ZN5blink11RenderTheme16paintSliderTicksEPNS_12RenderObjectERKNS_9PaintInfoERKNS_7IntRectE
+ fun:_ZN5blink24RenderThemeChromiumLinux16paintSliderTrackEPNS_12RenderObjectERKNS_9PaintInfoERKNS_7IntRectE
+ fun:_ZN5blink11RenderTheme5paintEPNS_12RenderObjectERKNS_9PaintInfoERKNS_7IntRectE
+ fun:_ZN5blink9RenderBox19paintBoxDecorationsERNS_9PaintInfoERKNS_21FractionalLayoutPointE
+ fun:_ZN5blink11RenderBlock11paintObjectERNS_9PaintInfoERKNS_21FractionalLayoutPointE
+}
+{
+ bug_138233_a
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF10RefCountedIN7blink17ScriptProfileNodeEEnwEm
+ fun:_ZN5blink17ScriptProfileNode6createEPKN2v814CpuProfileNodeE
+ fun:_ZNK7blink13ScriptProfile4headEv
+ fun:_ZN5blink23ScriptProfileV8InternalL14headAttrGetterEN2v85LocalINS1_6StringEEERKNS1_12AccessorInfoE
+ fun:_ZN2v88internal8JSObject23GetPropertyWithCallbackEPNS0_6ObjectES3_PNS0_6StringE
+ fun:_ZN2v88internal6Object11GetPropertyEPS1_PNS0_12LookupResultEPNS0_6StringEP18PropertyAttributes
+ fun:_ZN2v88internal6LoadIC4LoadENS0_16InlineCacheStateENS0_6HandleINS0_6ObjectEEENS3_INS0_6StringEEE
+ fun:_ZN2v88internal11LoadIC_MissENS0_9ArgumentsEPNS0_7IsolateE
+}
+{
+ bug_138233_b
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF10RefCountedIN7blink17ScriptProfileNodeEEnwEm
+ fun:_ZN5blink17ScriptProfileNode6createEPKN2v814CpuProfileNodeE
+ fun:_ZNK7blink17ScriptProfileNode8childrenEv
+ fun:_ZN5blink27ScriptProfileNodeV8InternalL16childrenCallbackERKN2v89ArgumentsE
+}
+{
+ bug_138712
+ Memcheck:Uninitialized
+ fun:_ZN7testing8internal11CmpHelperGEIddEENS_15AssertionResultEPKcS4_RKT_RKT0_
+ fun:_ZN3gfx30JPEGCodec_EncodeDecodeRGB_Test8TestBodyEv
+}
+{
+ bug_144118_b
+ Memcheck:Unaddressable
+ fun:_ZNK3WTF6OwnPtrIN5blink14ScrollbarGroupEEcvMS3_PS2_Ev
+ fun:_ZN5blink22WebPluginContainerImpl14reportGeometryEv
+ fun:_ZN5blink22WebPluginContainerImpl12setFrameRectERKN7blink7IntRectE
+ ...
+ fun:_ZN9TestShell4dumpEv
+}
+{
+ bug_144913_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN8chromeos17DBusThreadManager10InitializeEv
+ fun:_ZN8chromeos23KioskModeIdleLogoutTest5SetUpEv
+}
+{
+ bug_144913_c
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN8chromeos21DBusThreadManagerImplC1ENS_28DBusClientImplementationTypeE
+ fun:_ZN8chromeos17DBusThreadManager10InitializeEv
+ fun:_ZN8chromeos23KioskModeIdleLogoutTest5SetUpEv
+}
+{
+ bug_144930_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZL21cachedDeviceLuminancef
+}
+{
+ bug_145650a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN14WebDataService10AddKeywordERK15TemplateURLData
+ fun:_ZN18TemplateURLService11AddNoNotifyEP11TemplateURLb
+}
+{
+ bug_145650b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN14WebDataService13RemoveKeywordEl
+ fun:_ZN18TemplateURLService14RemoveNoNotifyEP11TemplateURL
+ fun:_ZN18TemplateURLService6RemoveEP11TemplateURL
+ fun:_ZN9protector71DefaultSearchProviderChangeTest_CurrentSearchProviderRemovedByUser_Test19RunTestOnMainThreadEv
+}
+{
+ bug_145650c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN14WebDataService13UpdateKeywordERK15TemplateURLData
+ fun:_ZN18TemplateURLService32SetDefaultSearchProviderNoNotifyEP11TemplateURL
+}
+{
+ bug_125692a
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal11StoreBuffer28IteratePointersInStoreBufferEPFvPPNS0_10HeapObjectES3_E
+ fun:_ZN2v88internal11StoreBuffer25IteratePointersToNewSpaceEPFvPPNS0_10HeapObjectES3_E
+ fun:_ZN2v88internal20MarkCompactCollector29EvacuateNewSpaceAndCandidatesEv
+ fun:_ZN2v88internal20MarkCompactCollector11SweepSpacesEv
+ fun:_ZN2v88internal20MarkCompactCollector14CollectGarbageEv
+ fun:_ZN2v88internal4Heap11MarkCompactEPNS0_8GCTracerE
+}
+{
+ bug_125692b
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal11StoreBuffer7CompactEv
+ fun:_ZN2v88internal11StoreBuffer19PrepareForIterationEv
+ fun:_ZN2v88internal11StoreBuffer25IteratePointersToNewSpaceEPFvPPNS0_10HeapObjectES3_E
+ fun:_ZN2v88internal20MarkCompactCollector29EvacuateNewSpaceAndCandidatesEv
+ fun:_ZN2v88internal20MarkCompactCollector11SweepSpacesEv
+ fun:_ZN2v88internal20MarkCompactCollector14CollectGarbageEv
+ fun:_ZN2v88internal4Heap11MarkCompactEPNS0_8GCTracerE
+ fun:_ZN2v88internal4Heap24PerformGarbageCollectionENS0_16GarbageCollectorEPNS0_8GCTracerE
+ fun:_ZN2v88internal4Heap14CollectGarbageENS0_15AllocationSpaceENS0_16GarbageCollectorEPKcS5_
+ fun:_ZN2v88internal4Heap14CollectGarbageENS0_15AllocationSpaceEPKc
+ fun:_ZN2v88internal4Heap17CollectAllGarbageEiPKc
+ fun:_ZN2v88internal4Heap16IdleNotificationEi
+ fun:_ZN2v88internal2V816IdleNotificationEi
+ fun:_ZN2v82V816IdleNotificationEi
+ fun:_ZN16RenderThreadImpl11IdleHandlerEv
+}
+{
+ bug_145693
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN10extensions18PermissionsUpdater17RecordOAuth2GrantEPKNS_9ExtensionE
+ fun:_ZN10extensions18PermissionsUpdater22GrantActivePermissionsEPKNS_9ExtensionEb
+ fun:_ZN10extensions12CrxInstaller25ReportSuccessFromUIThreadEv
+}
+{
+ bug_145695
+ Memcheck:Leak
+ fun:malloc
+ fun:NaClDescImcBoundDescAcceptConn
+ fun:RevRpcHandlerBase
+ fun:NaClThreadInterfaceStart
+}
+{
+ bug_145696
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN10extensions9TabHelper23OnInlineWebstoreInstallEiiRKSsRK4GURL
+}
+{
+ bug_145697
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN18SecurityFilterPeer40CreateSecurityFilterPeerForDeniedRequestEN12ResourceType4TypeEPN11webkit_glue20ResourceLoaderBridge4PeerEi
+ fun:_ZN12_GLOBAL__N_124RendererResourceDelegate17OnRequestCompleteEPN11webkit_glue20ResourceLoaderBridge4PeerEN12ResourceType4TypeERKN3net16URLRequestStatusE
+ fun:_ZN7content18ResourceDispatcher17OnRequestCompleteEiRKN3net16URLRequestStatusERKSsRKN4base9TimeTicksE
+}
+{
+ bug_145699
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN17OAuth2ApiCallFlow24CreateAccessTokenFetcherEv
+ fun:_ZN17OAuth2ApiCallFlow20BeginMintAccessTokenEv
+ fun:_ZN17OAuth2ApiCallFlow12BeginApiCallEv
+ fun:_ZN17OAuth2ApiCallFlow5StartEv
+ fun:_ZN19OAuth2MintTokenFlow13FireAndForgetEv
+}
+{
+ bug_145703
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN7content16SiteInstanceImpl10GetProcessEv
+ fun:_ZN7content*Render*SiteInstance*
+ fun:_ZN7content*Render*SiteInstance*
+ ...
+ fun:_ZN7content15WebContentsImpl4InitERKNS_11WebContents12CreateParamsE
+}
+{
+ bug_145708
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN27ExtensionDevToolsClientHostC1EPN7content11WebContentsERKSsS4_i
+ fun:_ZN22AttachDebuggerFunction7RunImplEv
+ fun:_ZN17ExtensionFunction3RunEv
+ fun:_ZN27ExtensionFunctionDispatcher8DispatchERK31ExtensionHostMsg_Request_ParamsPN7content14RenderViewHostE
+ fun:_ZN10extensions13ExtensionHost9OnRequestERK31ExtensionHostMsg_Request_Params
+}
+{
+ bug_145712
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blink25NotificationPresenterImpl17requestPermissionEPN7blink22ScriptExecutionContextEN3WTF10PassRefPtrINS1_12VoidCallbackEEE
+ fun:_ZN5blink18NotificationCenter17requestPermissionEN3WTF10PassRefPtrINS_12VoidCallbackEEE
+ fun:_ZN5blink20V8NotificationCenter25requestPermissionCallbackERKN2v89ArgumentsE
+}
+{
+ bug_145723
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_Z20NewExtensionFunctionI25TabsExecuteScriptFunctionEP17ExtensionFunctionv
+ fun:_ZN25ExtensionFunctionRegistry11NewFunctionERKSs
+ fun:_ZN27ExtensionFunctionDispatcher23CreateExtensionFunctionERK31ExtensionHostMsg_Request_ParamsPKN10extensions9ExtensionEiRKNS3_10ProcessMapEPNS3_12ExtensionAPIEPvPN3IPC6SenderEPN7content14RenderViewHostEi
+ fun:_ZN27ExtensionFunctionDispatcher8DispatchERK31ExtensionHostMsg_Request_ParamsPN7content14RenderViewHostE
+ fun:_ZN10extensions13ExtensionHost9OnRequestERK31ExtensionHostMsg_Request_Params
+}
+{
+ bug_145735
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorIcE8allocateEmPKv
+ fun:_ZNSt12_Vector_baseIcSaIcEE11_M_allocateEm
+ fun:_ZNSt12_Vector_baseIcSaIcEEC2EmRKS0_
+ fun:_ZNSt6vectorIcSaIcEEC1EmRKcRKS0_
+ fun:_ZN4base5files12_GLOBAL__N_121InotifyReaderCallbackEPNS1_13InotifyReaderEii
+}
+{
+ bug_146464
+ Memcheck:Leak
+ fun:realloc
+ fun:add_codeset.isra.10
+ ...
+ fun:XCreatePixmap
+ fun:XCreateBitmapFromData
+ ...
+ fun:_ZN4aura19RootWindowHostLinuxC1EPNS_22RootWindowHostDelegateERKN3gfx4RectE
+}
+{
+ bug_146950
+ Memcheck:Leak
+ fun:malloc
+ fun:get_peer_sock_name
+ fun:_xcb_get_auth_info
+ fun:xcb_connect_to_display_with_auth_info
+ fun:_XConnectXCB
+ fun:XOpenDisplay
+ fun:_ZN4base18MessagePumpAuraX1118GetDefaultXDisplayEv
+}
+{
+ bug_162825
+ Memcheck:Uninitialized
+ fun:bcmp
+ fun:_ZNK3gpu5gles221ShaderTranslatorCache26ShaderTranslatorInitParamsltERKS2_
+ fun:_ZNKSt4lessIN3gpu5gles221ShaderTranslatorCache26ShaderTranslatorInitParams*
+ ...
+ fun:*ShaderTranslatorInitParams*
+ ...
+ fun:_ZN3gpu5gles216GLES2DecoderImpl26InitializeShaderTranslatorEv
+}
+{
+ bug_163922
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN10extensions16SettingsFrontendC1ERK13scoped_refptrINS_22SettingsStorageFactoryEEP7Profile
+ fun:_ZN10extensions16SettingsFrontend6CreateEP7Profile
+ fun:_ZN16ExtensionServiceC1E*
+ fun:_ZN10extensions19ExtensionSystemImpl6Shared4InitEb
+ fun:_ZN10extensions19ExtensionSystemImpl21InitForRegularProfileEb
+ fun:_ZN14ProfileManager22DoFinalInitForServicesEP7Profileb
+ fun:_ZN14ProfileManager11DoFinalInitEP7Profileb
+ fun:_ZN14ProfileManager10AddProfileEP7Profile
+ fun:_ZN14ProfileManager10GetProfileE*
+}
+{
+ bug_163924
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN28JSONAsynchronousUnpackerImpl22StartProcessOnIOThreadEN7content13BrowserThread2IDERKSs
+}
+{
+ bug_164176
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN18BrowserProcessImpl21PreMainMessageLoopRunEv
+ fun:_ZN22ChromeBrowserMainParts25PreMainMessageLoopRunImplEv
+ fun:_ZN22ChromeBrowserMainParts21PreMainMessageLoopRunEv
+ fun:_ZN7content15BrowserMainLoop13CreateThreadsEv
+ fun:_ZN7content21BrowserMainRunnerImpl10InitializeERKNS_18MainFunctionParamsE
+ fun:_ZN7content11BrowserMainERKNS_18MainFunctionParamsE
+ fun:_ZN7content23RunNamedProcessTypeMainERKSsRKNS_18MainFunctionParamsEPNS_19ContentMainDelegateE
+ fun:_ZN7content21ContentMainRunnerImpl3RunEv
+ fun:_ZN7content11ContentMainEiPPKcPNS_19ContentMainDelegateE
+ fun:ChromeMain
+}
+{
+ bug_164178
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3net25MultiThreadedCertVerifier6VerifyEPNS_15X509CertificateERKSsiPNS_6CRLSetEPNS_16CertVerifyResultERKN4base8CallbackIFviEEEPPvRKNS_11BoundNetLogE
+ fun:_ZN3net25SingleRequestCertVerifier6VerifyEPNS_15X509CertificateERKSsiPNS_6CRLSetEPNS_16CertVerifyResultERKN4base8CallbackIFviEEERKNS_11BoundNetLogE
+ fun:_ZN3net18SSLClientSocketNSS12DoVerifyCertEi
+ fun:_ZN3net18SSLClientSocketNSS15DoHandshakeLoopEi
+ fun:_ZN3net18SSLClientSocketNSS21OnHandshakeIOCompleteEi
+}
+{
+ bug_164179
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3net10URLFetcher6CreateERK4GURLNS0_11RequestTypeEPNS_18URLFetcherDelegateE
+ fun:_ZN18WebResourceService10StartFetchEv
+}
+{
+ bug_166819
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNK3sql10Connection21GetUntrackedStatementEPKc
+ fun:_ZNK3sql10Connection21DoesTableOrIndexExistEPKcS2_
+ fun:_ZNK3sql10Connection14DoesTableExistEPKc
+ fun:_ZN3sql9MetaTable14DoesTableExistEPNS_10ConnectionE
+ ...
+ fun:_ZN7history16TopSitesDatabase4InitE*
+ fun:_ZN7history15TopSitesBackend16InitDBOnDBThreadE*
+}
+{
+ bug_166819b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNK3sql10Connection21GetUntrackedStatementEPKc
+ fun:_ZNK3sql10Connection21DoesTableOrIndexExistEPKcS2_
+ fun:_ZNK3sql10Connection14DoesTableExistEPKc
+ fun:_ZN7history17ShortcutsDatabase11EnsureTableEv
+ fun:_ZN7history17ShortcutsDatabase4InitEv
+ fun:_ZN7history16ShortcutsBackend12InitInternalEv
+}
+{
+ bug_167175a
+ Memcheck:Leak
+ ...
+ fun:g_*
+ ...
+ fun:_ZN16BrowserWindowGtk11InitWidgetsEv
+ fun:_ZN16BrowserWindowGtk4InitEv
+ fun:_ZN13BrowserWindow19CreateBrowserWindowEP7Browser
+}
+{
+ bug_167175b
+ Memcheck:Leak
+ fun:malloc
+ obj:/lib/libpng12.so.0.42.0
+ fun:png_create_read_struct_2
+ ...
+ fun:_ZN15ReloadButtonGtkC1EP18LocationBarViewGtkP7Browser
+ fun:_ZN17BrowserToolbarGtk4InitEP10_GtkWindow
+ fun:_ZN16BrowserWindowGtk11InitWidgetsEv
+ fun:_ZN16BrowserWindowGtk4InitEv
+ fun:_ZN13BrowserWindow19CreateBrowserWindowEP7Browser
+}
+{
+ bug_167175d
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISbItN4base20string16_char_traitsESaItEEE8allocateEmPKv
+ fun:_ZNSt12_Vector_baseISbItN4base20string16_char_traitsESaItEESaIS3_EE11_M_allocateEm
+ ...
+ fun:_ZN15WrenchMenuModel5BuildEbb
+ fun:_ZN15WrenchMenuModelC1EPN2ui19AcceleratorProviderEP7Browserbb
+ fun:_ZN17BrowserToolbarGtkC1EP7BrowserP16BrowserWindowGtk
+ fun:_ZN16BrowserWindowGtk11InitWidgetsEv
+ fun:_ZN16BrowserWindowGtk4InitEv
+ fun:_ZN13BrowserWindow19CreateBrowserWindowEP7Browser
+}
+{
+ bug_171722
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3net12_GLOBAL__N_120URLRequestFtpJobTest9AddSocketEPNS_13MockReadWriteILNS_17MockReadWriteTypeE0EEEmPNS2_ILS3_1EEEm
+}
+{
+ bug_172005
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7leveldb10VersionSet11LogAndApplyEPNS_11VersionEditEPNS_4port5MutexE
+ fun:_ZN7leveldb2DB4OpenERKNS_7OptionsERKSsPPS0_
+ fun:_ZN11dom_storage22SessionStorageDatabase9TryToOpenEPPN7leveldb2DBE
+ fun:_ZN11dom_storage22SessionStorageDatabase8LazyOpenEb
+ fun:_ZN11dom_storage22SessionStorageDatabase24ReadNamespacesAndOriginsEPSt3mapISsSt6vectorI4GURLSaIS3_EESt4lessISsESaISt4pairIKSsS5_EEE
+ fun:_ZN11dom_storage17DomStorageContext36FindUnusedNamespacesInCommitSequenceERKSt3setISsSt4lessISsESaISsEES7_
+}
+{
+ bug_172005b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7leveldb6DBImplC1ERKNS_7OptionsERKSs
+ fun:_ZN7leveldb2DB4OpenERKNS_7OptionsERKSsPPS0_
+ fun:_ZN11dom_storage22SessionStorageDatabase9TryToOpenEPPN7leveldb2DBE
+ fun:_ZN11dom_storage22SessionStorageDatabase8LazyOpenEb
+ fun:_ZN11dom_storage22SessionStorageDatabase24ReadNamespacesAndOriginsEPSt3mapISsSt6vectorI4GURLSaIS3_EESt4lessISsESaISt4pairIKSsS5_EEE
+ fun:_ZN11dom_storage17DomStorageContext36FindUnusedNamespacesInCommitSequenceERKSt3setISsSt4lessISsESaISsEES7_
+}
+{
+ bug_175823
+ Memcheck:Leak
+ ...
+ fun:_ZN18ValueStoreFrontend*
+}
+{
+ bug_176616_a
+ Memcheck:Uninitialized
+ fun:_ZN13WebTestRunner16WebTestProxyBase19didCreateDataSourceEPN5blink8WebFrameEPNS1_13WebDataSourceE
+ fun:_ZN13WebTestRunner12WebTestProxyI11WebViewHostP9TestShellE19didCreateDataSourceEPN5blink8WebFrameEPNS5_13WebDataSourceE
+ fun:_ZN5blink21FrameLoaderClientImpl20createDocumentLoaderERKN7blink15ResourceRequestERKNS1_14SubstituteDataE
+ fun:_ZN5blink11FrameLoader4initEv
+ fun:_ZN5blink5Frame4initEv
+ fun:_ZN5blink12WebFrameImpl21initializeAsMainFrameEPN7blink4PageE
+ fun:_ZN5blink11WebViewImpl19initializeMainFrameEPNS_14WebFrameClientE
+ fun:_ZN9TestShell15createNewWindowERKN5blink6WebURLEP16DRTDevToolsAgentPN13WebTestRunner17WebTestInterfacesE
+ fun:_ZN9TestShell16createMainWindowEv
+ fun:_ZN9TestShell10initializeEP25MockWebKitPlatformSupport
+}
+{
+ bug_176616_b
+ Memcheck:Uninitialized
+ fun:_ZN13WebTestRunner10TestRunner5resetEv
+ fun:_ZN13WebTestRunner14TestInterfaces8resetAllEv
+ fun:_ZN13WebTestRunner17WebTestInterfaces8resetAllEv
+ fun:_ZN9TestShell19resetTestControllerEv
+ fun:_ZL7runTestR9TestShellR10TestParamsRKSsb
+}
+{
+ bug_176619_a
+ Memcheck:Uninitialized
+ fun:_ZN3WTF6StringC1EPKt
+ fun:_ZN5blink12WebVTTParser22constructTreeFromTokenEPNS_8DocumentE
+ fun:_ZN5blink12WebVTTParser33createDocumentFragmentFromCueTextERKN3WTF6StringE
+ fun:_ZN5blink12TextTrackCue20createWebVTTNodeTreeEv
+ fun:_ZN5blink12TextTrackCue22createCueRenderingTreeEv
+ fun:_ZN5blink12TextTrackCue17updateDisplayTreeEf
+}
+{
+ bug_176619_b
+ Memcheck:Uninitialized
+ fun:_ZN5blink12WebVTTParser13collectDigitsERKN3WTF6StringEPj
+ fun:_ZN5blink12WebVTTParser16collectTimeStampERKN3WTF6StringEPj
+ fun:_ZN5blink12WebVTTParser22constructTreeFromTokenEPNS_8DocumentE
+ fun:_ZN5blink12WebVTTParser33createDocumentFragmentFromCueTextERKN3WTF6StringE
+ fun:_ZN5blink12TextTrackCue20createWebVTTNodeTreeEv
+ fun:_ZN5blink12TextTrackCue22createCueRenderingTreeEv
+ fun:_ZN5blink12TextTrackCue17updateDisplayTreeEf
+}
+{
+ bug_176621
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN13WebTestRunner10TestPlugin6createEPN5blink8WebFrameERKNS1_15WebPluginParamsEPNS_15WebTestDelegateE
+ fun:_ZN13WebTestRunner16WebTestProxyBase12createPluginEPN5blink8WebFrameERKNS1_15WebPluginParamsE
+ fun:_ZN13WebTestRunner12WebTestProxyI11WebViewHostP9TestShellE12createPluginEPN5blink8WebFrameERKNS5_15WebPluginParamsE
+ fun:_ZN5blink21FrameLoaderClientImpl12createPluginERKN7blink7IntSizeEPNS1_17HTMLPlugInElementERKNS1_4KURLERKN3WTF6VectorINSA_6String*
+ fun:_ZN5blink14SubframeLoader10loadPluginEPNS_22HTMLPlugInImageElementERKNS_4KURLERKN3WTF6StringERKNS6_6VectorIS7*
+}
+{
+ bug_176891a
+ Memcheck:Leak
+ fun:calloc
+ fun:nss_ZAlloc
+ fun:nssCryptokiObject_Create
+ fun:create_objects_from_handles
+ fun:find_objects
+ fun:find_objects_by_template
+ fun:nssToken_FindCertificateByEncodedCertificate
+ fun:PK11_FindCertFromDERCertItem
+ fun:_ZN24mozilla_security_manager12_GLOBAL__N_125nsPKCS12Blob_ImportHelper*
+}
+{
+ bug_176891b
+ Memcheck:Leak
+ ...
+ fun:nssPKIObject_Create
+ fun:nssTrustDomain_FindTrustForCertificate
+ fun:STAN_DeleteCertTrustMatchingSlot
+ fun:SEC_DeletePermCertificate
+}
+{
+ bug_177213
+ Memcheck:Leak
+ ...
+ fun:_ZN10extensionsL9SerializeERKSt6vectorINS_10UserScriptESaIS1_EE
+}
+{
+ bug_179758_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base54WeakPtrTest_NonOwnerThreadCanCopyAndAssignWeakPtr_Test8TestBodyEv
+}
+{
+ bug_179758_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base58WeakPtrTest_NonOwnerThreadCanCopyAndAssignWeakPtrBase_Test8TestBodyEv
+}
+{
+ bug_181680b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blink11ScriptState10forContextEN2v86HandleINS1_7ContextEEE
+ fun:_ZN5blink17ScriptDebugServer18handleProgramBreakEN2v86HandleINS1_6ObjectEEENS2_INS1_5ValueEEENS2_INS1_5ArrayEEE
+}
+{
+ bug_195160_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISt13_Rb_tree_nodeIiEE8allocateEmPKv
+ fun:_ZNSt8_Rb_treeIiiSt9_IdentityIiESt4lessIiESaIiEE11_M_get_nodeEv
+ fun:_ZNSt8_Rb_treeIiiSt9_IdentityIiESt4lessIiESaIiEE14_M_create_nodeERKi
+ fun:_ZNSt8_Rb_treeIiiSt9_IdentityIiESt4lessIiESaIiEE10_M_insert_EPKSt18_Rb_tree_node_baseS8_RKi
+ fun:_ZNSt8_Rb_treeIiiSt9_IdentityIiESt4lessIiESaIiEE16_M_insert_uniqueERKi
+ fun:_ZNSt3setIiSt4lessIiESaIiEE6insertERKi
+ fun:_ZN10extensions10URLMatcher14UpdateTriggersEv
+ fun:_ZN10extensions10URLMatcher28UpdateInternalDatastructuresEv
+ fun:_ZN10extensions10URLMatcher16AddConditionSetsERKSt6vectorI13scoped_refptrINS_22URLMatcherConditionSetEESaIS4_EE
+ fun:_ZN12_GLOBAL__N_113FilterBuilder5BuildEv
+ fun:_ZN12_GLOBAL__N_134LoadWhitelistsOnBlockingPoolThreadE12ScopedVectorI19ManagedModeSiteListE
+}
+{
+ bug_195160_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISt13_Rb_tree_nodeIPN10extensions13StringPatternEEE8allocateEmPKv
+ fun:_ZNSt8_Rb_treeIPN10extensions13StringPatternES2_St9_IdentityIS2_ENS0_26URLMatcherConditionFactory27StringPatternPointerCompareESaIS2_EE11_M_get_nodeEv
+ fun:_ZNSt8_Rb_treeIPN10extensions13StringPatternES2_St9_IdentityIS2_ENS0_26URLMatcherConditionFactory27StringPatternPointerCompareESaIS2_EE14_M_create_nodeERKS2_
+ fun:_ZNSt8_Rb_treeIPN10extensions13StringPatternES2_St9_IdentityIS2_ENS0_26URLMatcherConditionFactory27StringPatternPointerCompareESaIS2_EE10_M_insert_EPKSt18_Rb_tree_node_baseSB_RKS2_
+ fun:_ZNSt8_Rb_treeIPN10extensions13StringPatternES2_St9_IdentityIS2_ENS0_26URLMatcherConditionFactory27StringPatternPointerCompareESaIS2_EE16_M_insert_uniqueERKS2_
+ fun:_ZNSt3setIPN10extensions13StringPatternENS0_26URLMatcherConditionFactory27StringPatternPointerCompareESaIS2_EE6insertERKS2_
+ fun:_ZN10extensions26URLMatcherConditionFactory15CreateConditionENS_19URLMatcherCondition9CriterionERKSs
+ fun:_ZN10extensions26URLMatcherConditionFactory35CreateHostSuffixPathPrefixConditionERKSsS2_
+ fun:_ZN6policy12URLBlacklist18CreateConditionSetEPN10extensions10URLMatcherEiRKSsS5_btS5_
+ fun:_ZN12_GLOBAL__N_113FilterBuilder10AddPatternERKSsi
+ fun:_ZN12_GLOBAL__N_113FilterBuilder11AddSiteListEP19ManagedModeSiteList
+ fun:_ZN12_GLOBAL__N_134LoadWhitelistsOnBlockingPoolThreadE12ScopedVectorI19ManagedModeSiteListE
+}
+{
+ bug_195160_c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEESA_
+ fun:_ZN4base10TaskRunner16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEES9_
+ fun:_ZN4base26PostTaskAndReplyWithResultI10scoped_ptrIN20ManagedModeURLFilter8ContentsENS_14DefaultDeleterIS3_EEES6_EEbPNS_10TaskRunnerERKN15tracked_objects8LocationERKNS_8CallbackIFT_vEEERKNSD_IFvT0_EEE
+ fun:_ZN20ManagedModeURLFilter14LoadWhitelistsE12ScopedVectorI19ManagedModeSiteListE
+}
+{
+ bug_222876
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN21WebDataServiceWrapperC1EP7Profile
+ fun:_ZNK21WebDataServiceFactory23BuildServiceInstanceForEPN7content14BrowserContextE
+ fun:_ZN33BrowserContextKeyedServiceFactory27GetServiceForBrowserContextEPN7content14BrowserContextEb
+ fun:_ZN21WebDataServiceFactory13GetForProfileEP7ProfileNS0_17ServiceAccessTypeE
+ ...
+ fun:_ZN12TokenService10InitializeEPKcP7Profile
+}
+{
+ bug_222883
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal15ScavengeVisitor15ScavengePointerEPPNS0_6Object*
+ fun:_ZN2v88internal15ScavengeVisitor13VisitPointersEPPNS0_6ObjectES4_
+ fun:_ZNK2v88internal13StandardFrame18IterateExpressionsEPNS0_13ObjectVisitorE
+ ...
+ fun:_ZN2v88internal4Heap8ScavengeEv
+ fun:_ZN2v88internal4Heap24PerformGarbageCollectionENS0_16GarbageCollector*
+}
+{
+ bug_225028
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN31SafeBrowsingDatabaseFactoryImpl26CreateSafeBrowsingDatabaseEbbbb
+ fun:_ZN20SafeBrowsingDatabase6CreateEbbbb
+ fun:_ZN27SafeBrowsingDatabaseManager11GetDatabaseEv
+}
+{
+ bug_226254
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base4BindIMN10extensions16UserScriptMaster14ScriptReloader*
+ fun:_ZN10extensions16UserScriptMaster14ScriptReloader9StartLoad*
+ fun:_ZN10extensions16UserScriptMaster9StartLoadEv
+ fun:_ZN10extensions16UserScriptMaster7ObserveEiRKN7content18NotificationSourceERKNS1_19NotificationDetailsE
+}
+{
+ bug_234845
+ Memcheck:Leak
+ fun:malloc
+ fun:PORT_Alloc_Util
+ fun:pk11_CreateSymKey
+ fun:PK11_KeyGenWithTemplate
+ fun:pk11_TokenKeyGenWithFlagsAndKeyType
+ fun:pk11_RawPBEKeyGenWithKeyType
+ fun:PK11_PBEKeyGen
+ fun:PK11_ExportEncryptedPrivKeyInfo
+ fun:_ZN6crypto12ECPrivateKey25ExportEncryptedPrivateKeyERKSsiPSt6vectorIhSaIhEE
+}
+{
+ bug_235584
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base4Bind*
+ fun:_ZN3net18SSLClientSocketNSS4Core21OnHandshakeIOCompleteEi
+ fun:_ZN3net18SSLClientSocketNSS4Core28OnGetDomainBoundCertCompleteEi
+}
+{
+ bug_236791
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3ash4test53FocusCyclerTest_CycleFocusThroughWindowWithPanes_Test8TestBodyEv
+}
+{
+ bug_239141
+ Memcheck:Leak
+ fun:malloc
+ ...
+ fun:_ZN3WTF9BitVector13OutOfLineBits6createEm
+ fun:_ZN3WTF9BitVector15resizeOutOfLineEm
+ fun:_ZN3WTF9BitVector10ensureSizeEm
+ fun:_ZN3WTF9BitVectorC*
+ fun:_ZN5blink10UseCounter17recordMeasurementENS0_7FeatureE
+}
+{
+ bug_242672
+ Memcheck:Leak
+ fun:malloc
+ ...
+ fun:_ZN3WTF9BitVector13OutOfLineBits6createEm
+ fun:_ZN3WTF9BitVector15resizeOutOfLineEm
+ fun:_ZN3WTF9BitVector10ensureSizeEm
+ fun:_ZN5blink10UseCounterC1Ev
+ fun:_ZN5blink4PageC1ERNS0_11PageClientsE
+}
+{
+ bug_245714
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content17WorkerServiceImplC1Ev
+ fun:_ZN22DefaultSingletonTraitsIN7content17WorkerServiceImplEE3NewEv
+ fun:_ZN9SingletonIN7content17WorkerServiceImplE22DefaultSingletonTraitsIS1_ES1_E3getEv
+ fun:_ZN7content17WorkerServiceImpl11GetInstanceEv
+ fun:_ZN7content19WorkerMessageFilter16OnChannelClosingEv
+ fun:_ZN3IPC12ChannelProxy7Context15OnChannelClosedEv
+}
+{
+ bug_245714b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content17WorkerServiceImplC1Ev
+ fun:_ZN22DefaultSingletonTraitsIN7content17WorkerServiceImplEE3NewEv
+ fun:_ZN9SingletonIN7content17WorkerServiceImplE22DefaultSingletonTraitsIS1_ES1_E3getEv
+ fun:_ZN7content17WorkerServiceImpl11GetInstanceEv
+ fun:_ZN7content22ResourceRequestDetailsC1EPKN3net10URLRequestEi
+ fun:_ZN7content26ResourceDispatcherHostImpl18DidReceiveResponseEPNS_14ResourceLoaderE
+ fun:_ZN7content14ResourceLoader23CompleteResponseStartedEv
+ fun:_ZN7content14ResourceLoader17OnResponseStartedEPN3net10URLRequestE
+ fun:_ZN3net10URLRequest21NotifyResponseStartedEv
+}
+{
+ bug_245714c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content17WorkerServiceImplC1Ev
+ fun:_ZN22DefaultSingletonTraitsIN7content17WorkerServiceImplEE3NewEv
+ fun:_ZN9SingletonIN7content17WorkerServiceImplE22DefaultSingletonTraitsIS1_ES1_E3getEv
+ fun:_ZN7content17WorkerServiceImpl11GetInstanceEv
+ fun:_ZN7content22ResourceRequestDetailsC1EPKN3net10URLRequestEi
+ fun:_ZN7content23ResourceRedirectDetailsC1EPKN3net10URLRequestEiRK4GURL
+ fun:_ZN7content26ResourceDispatcherHostImpl18DidReceiveRedirectEPNS_14ResourceLoaderERK4GURL
+ fun:_ZN7content14ResourceLoader18OnReceivedRedirectEPN3net10URLRequestERK4GURLPb
+ fun:_ZN3net10URLRequest22NotifyReceivedRedirectERK4GURLPb
+ fun:_ZN3net13URLRequestJob21NotifyHeadersCompleteEv
+ fun:_ZN3net17URLRequestHttpJob21NotifyHeadersCompleteEv
+ fun:_ZN3net17URLRequestHttpJob14SaveNextCookieEv
+ fun:_ZN3net17URLRequestHttpJob35SaveCookiesAndNotifyHeadersCompleteEi
+ fun:_ZN3net17URLRequestHttpJob16OnStartCompletedEi
+}
+{
+ bug_245828
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEESA_
+ fun:_ZN4base10TaskRunner16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEES9_
+ fun:_ZN7content13BrowserThread16PostTaskAndReplyENS0_2IDERKN15tracked_objects8LocationERKN4base8CallbackIFvvEEESB_
+}
+{
+ bug_245866
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base23EnsureProcessTerminatedEi
+ fun:_ZN7content6Zygote17HandleReapRequestEiRK6Pickle14PickleIterator
+ fun:_ZN7content6Zygote24HandleRequestFromBrowserEi
+ fun:_ZN7content6Zygote15ProcessRequestsEv
+ fun:_ZN7content10ZygoteMainERKNS_18MainFunctionParamsEPNS_18ZygoteForkDelegateE
+ fun:_ZN7content9RunZygoteERKNS_18MainFunctionParamsEPNS_19ContentMainDelegateE
+ fun:_ZN7content23RunNamedProcessTypeMainERKSsRKNS_18MainFunctionParamsEPNS_19ContentMainDelegateE
+ fun:_ZN7content21ContentMainRunnerImpl3RunEv
+ fun:_ZN7content11ContentMainEiPPKcPNS_19ContentMainDelegateE
+}
+{
+ bug_250529_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN14TestingProfile20CreateRequestContextEv
+ fun:_ZN12_GLOBAL__N_130ProfileSyncServiceTypedUrlTest5SetUpEv
+}
+{
+ bug_250529_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN14TestingProfile20CreateRequestContextEv
+ fun:_ZN30ProfileSyncServicePasswordTest5SetUpEv
+}
+{
+ bug_250533_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3net18HttpNetworkSessionC1ERKNS0_6ParamsE
+ fun:_ZN11jingle_glue26ProxyResolvingClientSocketC1EPN3net19ClientSocketFactoryERK13scoped_refptrINS1_23URLRequestContextGetterEERKNS1_9SSLConfigERKNS1_12HostPortPairE
+ fun:_ZN11jingle_glue23XmppClientSocketFactory27CreateTransportClientSocketERKN3net12HostPortPairE
+ fun:_ZN11jingle_glue17ChromeAsyncSocket7ConnectERKN9talk_base13SocketAddressE
+}
+{
+ bug_251004_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEESA_
+ fun:_ZN4base10TaskRunner16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEES9_
+ fun:_ZN3net10FileStream7Context14CloseAndDeleteEv
+}
+{
+ bug_251034
+ Memcheck:Leak
+ ...
+ fun:_ZN3gpu5gles216ShaderTranslator4InitE12ShShaderType12ShShaderSpecPK18ShBuiltInResourcesNS0_25ShaderTranslatorInterface22GlslImplementationTypeENS7_27GlslBuiltInFunctionBehaviorE
+ fun:_ZN3gpu5gles221ShaderTranslatorCache13GetTranslatorE12ShShaderType12ShShaderSpecPK18ShBuiltInResourcesNS0_25ShaderTranslatorInterface22GlslImplementationTypeENS7_27GlslBuiltInFunctionBehaviorE
+ fun:_ZN3gpu5gles216GLES2DecoderImpl26InitializeShaderTranslatorEv
+ fun:_ZN3gpu5gles216GLES2DecoderImpl10InitializeERK13scoped_refptrIN3gfx9GLSurfaceEERKS2_INS3_9GLContextEEbRKNS3_4SizeERKNS0_18DisallowedFeaturesEPKcRKSt6vectorIiSaIiEE
+ fun:_ZN3gpu22InProcessCommandBuffer21InitializeOnGpuThreadEbmRKN3gfx4SizeEPKcRKSt6vectorIiSaIiEENS1_13GpuPreferenceE
+}
+{
+ bug_252054
+ Memcheck:Unaddressable
+ fun:_ZNK7blink32PlatformSpeechSynthesisUtterance6clientEv
+ fun:_ZN5blink15SpeechSynthesis17didFinishSpeakingEN3WTF10PassRefPtrINS_32PlatformSpeechSynthesisUtteranceEEE
+ fun:_ZN5blink29PlatformSpeechSynthesizerMock16speakingFinishedEPNS_5TimerIS0_EE
+ fun:_ZN5blink5TimerINS_29PlatformSpeechSynthesizerMockEE5firedEv
+ fun:_ZN5blink12ThreadTimers24sharedTimerFiredInternalEv
+ fun:_ZN5blink12ThreadTimers16sharedTimerFiredEv
+ fun:_ZN11webkit_glue25WebKitPlatformSupportImpl9DoTimeoutEv
+}
+{
+ bug_252036
+ Memcheck:Uninitialized
+ fun:_ZN2cc9Scheduler27SetupNextBeginFrameIfNeededEv
+ fun:_ZN2cc9Scheduler23ProcessScheduledActionsEv
+}
+{
+ bug_252241_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN7content20WebKitTestController20PrepareForLayoutTestERK4GURLRKN4base8FilePathEbRKSs
+ fun:_Z16ShellBrowserMainRKN7content18MainFunctionParams*
+ fun:_ZN7content17ShellMainDelegate10RunProcessERKSsRKNS_18MainFunctionParamsE
+ fun:_ZN7content23RunNamedProcessTypeMainERKSsRKNS_18MainFunctionParamsEPNS_19ContentMainDelegateE
+ fun:_ZN7content21ContentMainRunnerImpl3RunEv
+ fun:_ZN7content11ContentMainEiPPKcPNS_19ContentMainDelegateE
+}
+{
+ bug_252241_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content19ContentMainDelegate26CreateContentUtilityClientEv
+ fun:_ZN7content24ContentClientInitializer3SetERKSsPNS_19ContentMainDelegateE
+ fun:_ZN7content21ContentMainRunnerImpl10InitializeEiPPKcPNS_19ContentMainDelegateE
+ fun:_ZN7content11ContentMainEiPPKcPNS_19ContentMainDelegateE
+}
+{
+ bug_252641_a
+ Memcheck:Uninitialized
+ fun:pthread_rwlock_init$UNIX2003
+ fun:_ZN3re25MutexC2Ev
+ fun:_ZN3re25MutexC1Ev
+ ...
+ fun:_ZN11leveldb_env19ParseMethodAndErrorEPKcPNS_8MethodIDEPi
+}
+{
+ bug_252641_b
+ Memcheck:Uninitialized
+ fun:pthread_rwlock_init$UNIX2003
+ fun:_ZN3re25MutexC2Ev
+ fun:_ZN3re25MutexC1Ev
+ ...
+ fun:_ZN3gpu12_GLOBAL__N_114StringMismatchERKSsS2_
+}
+{
+ bug_258132a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5ppapi5proxy15PPP_Class_Proxy19CreateProxiedObjectEPK18PPB_Var_DeprecatedPNS0_10DispatcherEill
+ fun:_ZN5ppapi5proxy24PPB_Var_Deprecated_Proxy27OnMsgCreateObjectDeprecatedEillNS0_24SerializedVarReturnValueE
+}
+{
+ bug_258132b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5ppapi5proxy26PluginProxyMultiThreadTest7RunTestEv
+ fun:_ZN5ppapi*ThreadAwareCallback*Test_*
+}
+{
+ bug_259357d
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN3gpu5gles239ShaderTranslatorTest_OptionsString_Test8TestBodyEv
+}
+{
+ bug_259357f
+ Memcheck:Uninitialized
+ fun:_ZNK3gpu12AsyncAPIMock6IsArgsclEPKv
+ fun:_ZNK7testing8internal12TrulyMatcherIN3gpu12AsyncAPIMock6IsArgsEE15MatchAndExplainIPKvEEbRT_PNS_19MatchResultListenerE
+ fun:_ZNK7testing18PolymorphicMatcherINS_8internal12TrulyMatcherIN3gpu12AsyncAPIMock6IsArgsEEEE15MonomorphicImplIPKvE15MatchAndExplainESA_PNS_19MatchResultListenerE
+ fun:_ZNK7testing8internal11MatcherBaseIPKvE15MatchAndExplainES3_PNS_19MatchResultListenerE
+ fun:_ZNK7testing8internal11MatcherBaseIPKvE7MatchesES3_
+ fun:_ZN7testing8internal11TuplePrefixILm3EE7MatchesINSt3tr15tupleIINS_7MatcherIjEES7_NS6_IPKvEEEEENS5_IIjjS9_EEEEEbRKT_RKT0_
+ fun:_ZN7testing8internal12TupleMatchesINSt3tr15tupleIINS_7MatcherIjEES5_NS4_IPKvEEEEENS3_IIjjS7_EEEEEbRKT_RKT0_
+ fun:_ZNK7testing8internal16TypedExpectationIFN3gpu5error5ErrorEjjPKvEE7MatchesERKNSt3tr15tupleIIjjS6_EEE
+ fun:_ZNK7testing8internal16TypedExpectationIFN3gpu5error5ErrorEjjPKvEE21ShouldHandleArgumentsERKNSt3tr15tupleIIjjS6_EEE
+ fun:_ZNK7testing8internal18FunctionMockerBaseIFN3gpu5error5ErrorEjjPKvEE29FindMatchingExpectationLockedERKNSt3tr15tupleIIjjS6_EEE
+ fun:_ZN7testing8internal18FunctionMockerBaseIFN3gpu5error5ErrorEjjPKvEE30UntypedFindMatchingExpectationES6_PS6_PbPSoSB_
+ fun:_ZN7testing8internal25UntypedFunctionMockerBase17UntypedInvokeWithEPKv
+ fun:_ZN7testing8internal18FunctionMockerBaseIFN3gpu5error5ErrorEjjPKvEE10InvokeWithERKNSt3tr15tupleIIjjS6_EEE
+ fun:_ZN7testing8internal14FunctionMockerIFN3gpu5error5ErrorEjjPKvEE6InvokeEjjS6_
+ fun:_ZN3gpu12AsyncAPIMock9DoCommandEjjPKv
+ fun:_ZN3gpu13CommandParser14ProcessCommandEv
+ fun:_ZN3gpu12GpuScheduler10PutChangedEv
+}
+{
+ bug_259789b
+ Memcheck:Uninitialized
+ fun:_ZN5blink12_GLOBAL__N_116adjustAttributesERKNS_17GraphicsContext3D10AttributesEPNS_8SettingsE
+ fun:_ZN5blink21WebGLRenderingContext6createEPNS_17HTMLCanvasElementEPNS_22WebGLContextAttributesE
+ fun:_ZN5blink17HTMLCanvasElement10getContextERKN3WTF6StringEPNS_23CanvasContextAttributesE
+}
+{
+ bug_273398
+ Memcheck:Leak
+ ...
+ fun:_ZN6Pickle6ResizeEm
+ fun:_ZN6PickleC1Ev
+ fun:_ZN7content14ZygoteHostImpl20GetTerminationStatusEibPi
+ fun:_ZN7content20ChildProcessLauncher25GetChildTerminationStatusEbPi
+}
+{
+ bug_288804
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISt13_Rb_tree_nodeISt4pairIKN3net12HostPortPairESt3mapINS3_15SpdySettingsIdsES2_INS3_17SpdySettingsFlagsEjESt4lessIS7_ESaIS2_IKS7_S9_EEEEEE8allocateEmPKv
+ fun:_ZNSt8_Rb_treeIN3net12HostPortPairESt4pairIKS1_St3mapINS0_15SpdySettingsIdsES2_INS0_17SpdySettingsFlagsEjESt4lessIS5_ESaIS2_IKS5_S7_EEEESt10_Select1stISE_ES8_IS1_ESaISE_EE11_M_get_nodeEv
+ fun:_ZNSt8_Rb_treeIN3net12HostPortPairESt4pairIKS1_St3mapINS0_15SpdySettingsIdsES2_INS0_17SpdySettingsFlagsEjESt4lessIS5_ESaIS2_IKS5_S7_EEEESt10_Select1stISE_ES8_IS1_ESaISE_EE14_M_create_nodeERKSE_
+ fun:_ZNSt8_Rb_treeIN3net12HostPortPairESt4pairIKS1_St3mapINS0_15SpdySettingsIdsES2_INS0_17SpdySettingsFlagsEjESt4lessIS5_ESaIS2_IKS5_S7_EEEESt10_Select1stISE_ES8_IS1_ESaISE_EE13_M_clone_nodeEPKSt13_Rb_tree_nodeISE_E
+ fun:_ZNSt8_Rb_treeIN3net12HostPortPairESt4pairIKS1_St3mapINS0_15SpdySettingsIdsES2_INS0_17SpdySettingsFlagsEjESt4lessIS5_ESaIS2_IKS5_S7_EEEESt10_Select1stISE_ES8_IS1_ESaISE_EE7_M_copyEPKSt13_Rb_tree_nodeISE_EPSL_
+ fun:_ZNSt8_Rb_treeIN3net12HostPortPairESt4pairIKS1_St3mapINS0_15SpdySettingsIdsES2_INS0_17SpdySettingsFlagsEjESt4lessIS5_ESaIS2_IKS5_S7_EEEESt10_Select1stISE_ES8_IS1_ESaISE_EEaSERKSJ_
+ fun:_ZNSt3mapIN3net12HostPortPairES_INS0_15SpdySettingsIdsESt4pairINS0_17SpdySettingsFlagsEjESt4lessIS2_ESaIS3_IKS2_S5_EEES6_IS1_ESaIS3_IKS1_SB_EEEaSERKSG_
+ fun:_ZN18chrome_browser_net27HttpServerPropertiesManager24UpdatePrefsFromCacheOnIOERKN4base8CallbackIFvvEEE
+ fun:_ZN18chrome_browser_net27HttpServerPropertiesManager24UpdatePrefsFromCacheOnIOEv
+}
+{
+ bug_290407
+ Memcheck:Leak
+ fun:calloc
+ fun:_swrast_new_soft_renderbuffer
+ fun:_mesa_BindRenderbufferEXT
+ fun:shared_dispatch_stub_939
+ fun:_ZN3gfx9GLApiBase23glBindRenderbufferEXTFnEjj
+ fun:_ZN3gpu5gles216GLES2DecoderImpl18DoBindRenderbufferEjj
+ fun:_ZN3gpu5gles216GLES2DecoderImpl22HandleBindRenderbufferEjRKNS0_4cmds16BindRenderbufferE
+ fun:_ZN3gpu5gles216GLES2DecoderImpl9DoCommandEjjPKv
+ fun:_ZN3gpu13CommandParser14ProcessCommandEv
+}
+{
+ bug_293024_b
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF24ThreadSafeRefCountedBasenwEm
+ fun:_ZN5blink12_GLOBAL__N_131AllowFileSystemMainThreadBridge6createEPN7blink17WorkerGlobalScopeEPNS_13WebWorkerBaseERKN3WTF6StringE
+ fun:_ZN5blink22WorkerFileSystemClient15allowFileSystem*
+ ...
+ fun:_ZN2v88internal25FunctionCallbackArguments4CallEPFvRKNS_20FunctionCallbackInfoINS_5ValueEEEE
+}
+{
+ bug_298143
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink25TypeConversionsV8Internal*AttributeGetterE*
+}
+{
+ bug_298788
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN10extensions19TestExtensionSystem22CreateExtensionServiceEPKN4base11CommandLineERKNS1_8FilePathEb
+ fun:_ZN10extensions12_GLOBAL__N_130ExtensionActionIconFactoryTest5SetUpEv
+}
+{
+ bug_299804
+ Memcheck:Uninitialized
+ fun:_ZN24GrConfigConversionEffect30TestForPreservingPMConversionsEP9GrContextPNS_12PMConversionES3_
+ fun:_ZN12_GLOBAL__N_119test_pm_conversionsEP9GrContextPiS2_
+ fun:_ZN9GrContext19createPMToUPMEffectEP9GrTexturebRK8SkMatrix
+ fun:_ZN9GrContext22readRenderTargetPixelsEP14GrRenderTargetiiii13GrPixelConfigPvmj
+ fun:_ZN11SkGpuDevice12onReadPixelsE*
+ fun:_ZN12SkBaseDevice10readPixelsE*
+ fun:_ZN8SkCanvas10readPixelsE*
+ fun:_ZN*DeferredDevice12onReadPixelsE*
+ fun:_ZN12SkBaseDevice10readPixelsE*
+ fun:_ZN8SkCanvas10readPixelsE*
+ fun:_ZN5blink15GraphicsContext10readPixelsE*
+ ...
+ fun:_ZN*blink24CanvasRenderingContext2D12getImageDataE*
+ ...
+ fun:_ZN5blink34CanvasRenderingContext2DV8InternalL18getImageDataMethodERKN2v820FunctionCallbackInfoINS1_5ValueEEE
+ fun:_ZN5blink34CanvasRenderingContext2DV8InternalL26getImageDataMethodCallbackERKN2v820FunctionCallbackInfoINS1_5ValueEEE
+}
+{
+ bug_309477
+ Memcheck:Uninitialized
+ fun:_ZN13WebTestRunner11EventSender5resetEv
+ fun:_ZN13WebTestRunner14TestInterfaces26resetTestHelperControllersEv
+ fun:_ZN13WebTestRunner14TestInterfaces8resetAllEv
+ ...
+ fun:_ZN7content26ShellRenderProcessObserver17WebKitInitializedEv
+ fun:_ZN7content16RenderThreadImpl23EnsureWebKitInitializedEv
+ fun:_ZN7content16RenderThreadImpl15OnCreateNewViewERK18ViewMsg_New_Params
+}
+{
+ bug_317166
+ Memcheck:Leak
+ fun:malloc
+ fun:_dl_close_worker
+ fun:_dl_close
+ fun:_dl_catch_error
+ fun:_dlerror_run
+ fun:dlclose
+ obj:/usr/lib/x86_64-linux-gnu/libasound.so.2.0.0
+ fun:snd_config_searcha_hooks
+ fun:snd_config_searchva_hooks
+ obj:/usr/lib/x86_64-linux-gnu/libasound.so.2.0.0
+ fun:snd_config_search_definition
+ obj:/usr/lib/x86_64-linux-gnu/libasound.so.2.0.0
+ fun:_ZN5media11AlsaWrapper7PcmOpenEPP8_snd_pcmPKc15_snd_pcm_streami
+ fun:_ZN9alsa_utilL10OpenDeviceEPN5media11AlsaWrapperEPKc15_snd_pcm_streamii15_snd_pcm_formati
+ fun:_ZN9alsa_util18OpenPlaybackDeviceEPN5media11AlsaWrapperEPKcii15_snd_pcm_formati
+ fun:_ZN5media19AlsaPcmOutputStream16AutoSelectDeviceEj
+ fun:_ZN5media19AlsaPcmOutputStream4OpenEv
+ fun:_ZN5media25AudioOutputDispatcherImpl19CreateAndOpenStreamEv
+ fun:_ZN5media25AudioOutputDispatcherImpl10OpenStreamEv
+ fun:_ZN5media20AudioOutputResampler10OpenStreamEv
+ fun:_ZN5media16AudioOutputProxy4OpenEv
+ fun:_ZN5media18AudioStreamHandler20AudioStreamContainer4PlayEv
+}
+{
+ bug_318221
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base23EnsureProcessTerminatedEi
+}
+{
+ bug_321976
+ Memcheck:Leak
+ ...
+ fun:nssList_Create
+ fun:nssTrustDomain_UpdateCachedTokenCerts
+}
+{
+ bug_331063
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink11RenderImage12imageChangedEPvPKNS_7IntRectE
+ fun:_ZN5blink12RenderObject12imageChangedEPNS_13ImageResourceEPKNS_7IntRectE
+ fun:_ZN5blink13ImageResource12didAddClientEPNS_14ResourceClientE
+ fun:_ZN5blink8Resource9addClientEPNS_14ResourceClientE
+ fun:_ZN5blink19RenderImageResource16setImageResourceEPNS_13ImageResourceE
+ fun:_ZN5blink30PasswordGeneratorButtonElement11updateImageEv
+ fun:_ZN5blink30PasswordGeneratorButtonElement6attachERKNS_4Node13AttachContextE
+ fun:_ZN5blink13ContainerNode14attachChildrenERKNS_4Node13AttachContextE
+}
+{
+ bug_331925
+ Memcheck:Leak
+ ...
+ fun:_ZN3net27TestURLRequestContextGetter20GetURLRequestContextEv
+ fun:_ZN3net14URLFetcherCore30StartURLRequestWhenAppropriateEv
+ fun:_ZN3net14URLFetcherCore19DidInitializeWriterEi
+ fun:_ZN3net14URLFetcherCore15StartOnIOThreadEv
+}
+{
+ bug_332328
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN5blink14XMLHttpRequestnwEm
+ fun:_ZN5blink14XMLHttpRequest6createEPNS_16ExecutionContextEN3WTF10PassRefPtrINS_14SecurityOriginEEE
+ fun:_ZN5blink16V8XMLHttpRequest17constructorCustomERKN2v820FunctionCallbackInfoINS1_5ValueEEE
+ fun:_ZN5blink16V8XMLHttpRequest19constructorCallbackERKN2v820FunctionCallbackInfoINS1_5ValueEEE
+ fun:_ZN2v88internal25FunctionCallbackArguments4CallEPFvRKNS_20FunctionCallbackInfoINS_5ValueEEEE
+}
+{
+ bug_332330
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN5blink8ResourcenwEm
+ fun:_ZN5blinkL14createResourceENS_8Resource4TypeERKNS_15ResourceRequestERKN3WTF6StringE
+ fun:_ZN5blink15ResourceFetcher12loadResourceENS_8Resource4TypeERNS_12FetchRequestERKN3WTF6StringE
+ fun:_ZN5blink15ResourceFetcher15requestResourceENS_8Resource4TypeERNS_12FetchRequestE
+ fun:_ZN5blink15ResourceFetcher16fetchRawResourceERNS_12FetchRequestE
+ fun:_ZN5blink24DocumentThreadableLoader11loadRequestERKNS_15ResourceRequestENS_19SecurityCheckPolicyE
+}
+{
+ bug_340952
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blink15DOMWrapperWorldC2Eii
+ fun:_ZN5blink15DOMWrapperWorldC1Eii
+ fun:_ZN5blink15DOMWrapperWorld6createEii
+ fun:_ZN5blink15DOMWrapperWorld9mainWorldEv
+ ...
+ fun:_ZN7content22BufferedDataSourceTestC2Ev
+}
+{
+ bug_340752
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink4Heap19checkAndMarkPointerEPNS_7VisitorEPh
+ fun:_ZN5blink11ThreadState10visitStackEPNS_7VisitorE
+ ...
+ fun:_ZN5blink4Heap14collectGarbageENS_11ThreadState10StackState*
+}
+{
+ bug_342591
+ Memcheck:Param
+ write(buf)
+ obj:*libpthread*
+ fun:_ZN3IPC7Channel11ChannelImpl23ProcessOutgoingMessagesEv
+ fun:_ZN3IPC7Channel11ChannelImpl4SendEPNS_7MessageE
+ fun:_ZN3IPC7Channel4SendEPNS_7MessageE
+ fun:_ZN3IPC12ChannelProxy7Context13OnSendMessageE10scoped_ptrINS_7MessageEN4base14DefaultDeleterIS3_EEE
+}
+{
+ bug_345432
+ Memcheck:Leak
+ ...
+ fun:_ZN7content14GpuChannelHost7ConnectERKN3IPC13ChannelHandle*
+ fun:_ZN7content14GpuChannelHost6CreateEPNS_21GpuChannelHostFactoryERKN3gpu7GPUInfoERKN3IPC13ChannelHandle*
+ fun:_ZN7content28BrowserGpuChannelHostFactory21GpuChannelEstablishedEv
+ fun:_ZN7content28BrowserGpuChannelHostFactory16EstablishRequest12FinishOnMainEv
+ fun:_ZN7content28BrowserGpuChannelHostFactory16EstablishRequest4WaitEv
+}
+{
+ bug_346336_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content16SiteInstanceImpl10GetProcessEv
+ fun:_ZN7content22RenderFrameHostManager21CreateRenderFrameHostEPNS_12SiteInstanceEiibb
+ fun:_ZN7content22RenderFrameHostManager4InitEPNS_14BrowserContextEPNS_12SiteInstanceEii
+ fun:_ZN7content15WebContentsImpl4InitERKNS_11WebContents12CreateParamsE
+ fun:_ZN7content15WebContentsImpl16CreateWithOpenerERKNS_11WebContents12CreateParamsEPS0_
+ fun:_ZN7content11WebContents6CreateERKNS0_12CreateParamsE
+ fun:_ZN7content5Shell15CreateNewWindowEPNS_14BrowserContextERK4GURLPNS_12SiteInstanceEiRKN3gfx4SizeE
+ fun:_ZN7content20WebKitTestController20PrepareForLayoutTestERK4GURLRKN4base8FilePathEbRKSs
+ fun:_ZN12_GLOBAL__N_110RunOneTestERKSsPbRK10scoped_ptrIN7content17BrowserMainRunnerEN4base14DefaultDeleterIS5_EEE
+ fun:_Z16ShellBrowserMainRKN7content18MainFunctionParamsERK10scoped_ptrINS_17BrowserMainRunnerEN4base14DefaultDeleterIS4_EEE
+ fun:_ZN7content17ShellMainDelegate10RunProcessERKSsRKNS_18MainFunctionParamsE
+ fun:_ZN7content23RunNamedProcessTypeMainERKSsRKNS_18MainFunctionParamsEPNS_19ContentMainDelegateE
+ fun:_ZN7content21ContentMainRunnerImpl3RunEv
+ fun:_ZN7content11ContentMainEiPPKcPNS_19ContentMainDelegateE
+}
+{
+ bug_347683
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNK13LoginDatabase9GetLoginsERKN8autofill12PasswordFormEPSt6vectorIPS1_SaIS5_EE
+ fun:_ZN50LoginDatabaseTest_UpdateIncompleteCredentials_Test8TestBodyEv
+}
+{
+ bug_347967
+ Memcheck:Uninitialized
+ fun:unpack_RGB*888
+ fun:_mesa_unpack_rgba_row
+ fun:slow_read_rgba_pixels
+ fun:read_rgba_pixels
+ fun:_mesa_readpixels
+ ...
+ fun:shared_dispatch_stub_*
+ ...
+ fun:*gpu*gles*GLES2DecoderImpl*DoCommand*
+ fun:*gpu*CommandParser*ProcessCommand*
+ fun:*gpu*GpuScheduler*PutChanged*
+}
+{
+ bug_348863
+ Memcheck:Unaddressable
+ fun:_ZNK7blink32PlatformSpeechSynthesisUtterance6clientEv
+ fun:_ZN5blink15SpeechSynthesis17didFinishSpeakingEN3WTF10PassRefPtrINS_32PlatformSpeechSynthesisUtteranceEEE
+ fun:_ZN5blink29PlatformSpeechSynthesizerMock16speakingFinishedEPNS_5TimerIS0_EE
+ fun:_ZN5blink5TimerINS_29PlatformSpeechSynthesizerMockEE5firedEv
+ fun:_ZN5blink12ThreadTimers24sharedTimerFiredInternalEv
+ fun:_ZN5blink12ThreadTimers16sharedTimerFiredEv
+ fun:_ZN7content17BlinkPlatformImpl9DoTimeoutEv
+}
+{
+ bug_350809
+ Memcheck:Uninitialized
+ fun:_ZN5blink23ReplaceSelectionCommand7doApplyEv
+ fun:_ZN5blink20CompositeEditCommand5applyEv
+ fun:_ZN5blink6Editor28replaceSelectionWithFragmentEN3WTF10PassRefPtrINS_16DocumentFragmentEEEbbb
+ fun:_ZN5blink6Editor24replaceSelectionWithTextERKN3WTF6StringEbb
+}
+{
+ bug_361594
+ Memcheck:Uninitialized
+ ...
+ fun:*SkA8_Shader_Blitter*blitH*
+ ...
+ fun:*content*ScreenshotData*EncodeOnWorker*
+}
+{
+ bug_363819
+ Memcheck:Uninitialized
+ fun:strlen
+ fun:*
+ fun:_ZN3net12_GLOBAL__N_114TestHttpClient4ReadEPSs
+ fun:_ZN3net*HttpServerTest*
+}
+{
+ bug_364274
+ Memcheck:Uninitialized
+ fun:_ZN5blink21RenderLayerCompositor14updateIfNeededEv
+}
+{
+ bug_364724
+ Memcheck:Param
+ write(buf)
+ obj:/lib/x86_64-linux-gnu/libpthread-2.15.so
+ fun:_ZN3IPC12ChannelPosix23ProcessOutgoingMessagesEv
+ fun:_ZN3IPC12ChannelPosix29OnFileCanWriteWithoutBlockingEi
+ ...
+ fun:_ZN4base19MessagePumpLibevent21FileDescriptorWatcher29OnFileCanWriteWithoutBlockingEiPS0_
+ ...
+ fun:event_process_active
+ fun:event_base_loop
+}
+{
+ bug_364724b
+ Memcheck:Uninitialized
+ fun:_ZN4base17MD5DigestToBase16ERKNS_9MD5DigestE
+ fun:_ZN7content16WebKitTestRunner17CaptureDumpPixelsERK8SkBitmap
+}
+{
+ bug_364724c
+ Memcheck:Param
+ write(buf)
+ obj:/lib/x86_64-linux-gnu/libpthread-2.15.so
+ fun:_ZN3IPC12ChannelPosix23ProcessOutgoingMessagesEv
+ fun:_ZN3IPC12ChannelPosix4SendEPNS_7MessageE
+ fun:_ZN3IPC12ChannelProxy7Context13OnSendMessageE10scoped_ptrINS_7MessageEN4base14DefaultDeleterIS3_EEE
+}
+{
+ bug_364821
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF10RefCountedIN5blink11ScriptStateEEnwEm
+ fun:_ZN5blink11ScriptState6createEN2v86HandleINS1_7ContextEEEN3WTF10PassRefPtrINS_15DOMWrapperWorldEEE
+ ...
+ fun:_ZN3WTF15FunctionWrapperIMN5blink12WorkerThreadEFvvEEclEPS2_
+}
+{
+ bug_365258
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN5blink8ResourcenwEm
+ fun:_ZN5blinkL14createResourceENS_8Resource4TypeERKNS_15ResourceRequestERKN3WTF6StringE
+ fun:_ZN5blink15ResourceFetcher18revalidateResourceERKNS_12FetchRequestEPNS_8ResourceE
+ fun:_ZN5blink15ResourceFetcher15requestResourceENS_8Resource4TypeERNS_12FetchRequestE
+ fun:_ZN5blink15ResourceFetcher11fetchScriptERNS_12FetchRequestE
+ fun:_ZN5blink12ScriptLoader11fetchScriptERKN3WTF6StringE
+ fun:_ZN5blink12ScriptLoader13prepareScriptERKN3WTF12TextPositionENS0_17LegacyTypeSupportE
+ fun:_ZN5blink16HTMLScriptRunner9runScriptEPNS_7ElementERKN3WTF12TextPositionE
+ fun:_ZN5blink16HTMLScriptRunner7executeEN3WTF10PassRefPtrINS_7ElementEEERKNS1_12TextPositionE
+ fun:_ZN5blink18HTMLDocumentParser30runScriptsForPausedTreeBuilderEv
+ fun:_ZN5blink18HTMLDocumentParser38processParsedChunkFromBackgroundParserEN3WTF10PassOwnPtrINS0_11ParsedChunkEEE
+ fun:_ZN5blink18HTMLDocumentParser23pumpPendingSpeculationsEv
+ fun:_ZN5blink18HTMLDocumentParser41didReceiveParsedChunkFromBackgroundParserEN3WTF10PassOwnPtrINS0_11ParsedChunkEEE
+ fun:_ZN3WTF15FunctionWrapperIMN7blink18HTMLDocumentParserEFvNS_10PassOwnPtrINS2_11ParsedChunkEEEEEclERKNS_7WeakPtrIS2_EES5_
+ fun:_ZN3WTF17BoundFunctionImplINS_15FunctionWrapperIMN7blink18HTMLDocumentParserEFvNS_10PassOwnPtrINS3_11ParsedChunkEEEEEEFvNS_7WeakPtrIS3_EES6_EEclEv
+ fun:_ZNK3WTF8FunctionIFvvEEclEv
+ fun:_ZN3WTFL18callFunctionObjectEPv
+}
+{
+ bug_365259
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF10RefCountedIN7blink20IDBDatabaseCallbacksEEnwEm
+ fun:_ZN5blink20IDBDatabaseCallbacks6createEv
+ fun:_ZN5blink10IDBFactory12openInternalEPNS_16ExecutionContextERKN3WTF6StringElRNS_14ExceptionStateE
+ fun:_ZN5blink10IDBFactory4openEPNS_16ExecutionContextERKN3WTF6StringERNS_14ExceptionStateE
+ fun:_ZN5blink20IDBFactoryV8InternalL10openMethodERKN2v820FunctionCallbackInfoINS1_5ValueEEE
+ fun:_ZN5blink20IDBFactoryV8InternalL18openMethodCallbackERKN2v820FunctionCallbackInfoINS1_5ValueEEE
+}
+{
+ bug_367809_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4mojo6common13HandleWatcher5StartERKNS_6HandleEjmRKN4base8CallbackIFviEEE
+ fun:_ZN4mojo8internal12_GLOBAL__N_19AsyncWaitEP15MojoAsyncWaiterjjmPFvPviES4_
+ fun:_ZN4mojo8internal9Connector14WaitToReadMoreEv
+ fun:_ZN4mojo8internal9ConnectorC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo8internal6RouterC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5StateC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5resetENS_16ScopedHandleBaseINS_15InterfaceHandleIS1_EEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN7content19MojoApplicationHost4InitEv
+}
+{
+ bug_367809_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4mojo8internal12_GLOBAL__N_19AsyncWaitEP15MojoAsyncWaiterjjmPFvPviES4_
+ fun:_ZN4mojo8internal9Connector14WaitToReadMoreEv
+ fun:_ZN4mojo8internal9ConnectorC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo8internal6RouterC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5StateC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5resetENS_16ScopedHandleBaseINS_15InterfaceHandleIS1_EEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN7content19MojoApplicationHost4InitEv
+}
+{
+ bug_367809_c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4mojo8internal10SharedDataIPNS0_6RouterEEC1ERKS3_
+ fun:_ZN4mojo8internal6RouterC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5StateC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5resetENS_16ScopedHandleBaseINS_15InterfaceHandleIS1_EEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN7content19MojoApplicationHost4InitEv
+}
+{
+ bug_367809_d
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN7content21RenderProcessHostImpl4InitEv
+ ...
+ fun:_ZN7content5Shell7LoadURLERK4GURL
+ fun:_ZN7content20WebKitTestController20PrepareForLayoutTestERK4GURLRKN4base8FilePathEbRKSs
+ fun:_ZN12_GLOBAL__N_110RunOneTestERKSsPbRK10scoped_ptrIN7content17BrowserMainRunnerEN4base14DefaultDeleterIS5_EEE
+}
+{
+ bug_369843
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content27ServiceWorkerContextWrapper12InitInternalERKN4base8FilePathEPNS1_19SequencedTaskRunnerEPN5quota17QuotaManagerProxyE
+}
+{
+ bug_370206
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF16DefaultAllocator15backingAllocateEm
+ fun:_ZN3WTF16DefaultAllocator13backingMallocIPN7blink14InlineIteratorEvEET_m
+ ...
+ fun:_ZN3WTF17HashMapTranslatorINS_18HashMapValueTraitsINS_10HashTraitsIPN7blink7BidiRun*
+}
+{
+ bug_371844
+ Memcheck:Uninitialized
+ fun:bcmp
+ fun:_ZNK7content15GamepadProvider8PadState5MatchERKN5blink10WebGamepadE
+ fun:_ZN7content15GamepadProvider6DoPollEv
+}
+{
+ bug_371860
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN8feedback16FeedbackDataTestC1Ev
+ fun:_ZN8feedback*FeedbackDataTest*
+ fun:_ZN7testing8internal15TestFactoryImplIN8feedback*
+}
+{
+ bug_372487_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN4mojo10BindToPipeIN7content19MojoApplicationHost9ShellImplEEEPT_S5_NS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN7content19MojoApplicationHost4InitEv
+ fun:_ZN7content21RenderProcessHostImpl4InitEv
+ fun:_ZN7content18RenderViewHostImpl16CreateRenderViewERKSbItN4base20string16_char_traitsESaItEEiib
+ fun:_ZN7content15WebContentsImpl32CreateRenderViewForRenderManagerEPNS_14RenderViewHostEiPNS_26CrossProcessFrameConnectorE
+ fun:_ZN7content22RenderFrameHostManager14InitRenderViewEPNS_14RenderViewHostEi
+ fun:_ZN7content22RenderFrameHostManager8NavigateERKNS_19NavigationEntryImplE
+ fun:_ZN7content13NavigatorImpl15NavigateToEntryEPNS_19RenderFrameHostImplERKNS_19NavigationEntryImplENS_20NavigationController10ReloadTypeE
+ fun:_ZN7content13NavigatorImpl22NavigateToPendingEntryEPNS_19RenderFrameHostImplENS_20NavigationController10ReloadTypeE
+ fun:_ZN7content15WebContentsImpl22NavigateToPendingEntryENS_20NavigationController10ReloadTypeE
+ fun:_ZN7content24NavigationControllerImpl22NavigateToPendingEntryENS_20NavigationController10ReloadTypeE
+ fun:_ZN7content24NavigationControllerImpl9LoadEntryEPNS_19NavigationEntryImplE
+ fun:_ZN7content24NavigationControllerImpl17LoadURLWithParamsERKNS_20NavigationController13LoadURLParamsE
+ fun:_ZN7content5Shell15LoadURLForFrameERK4GURLRKSs
+ fun:_ZN7content5Shell7LoadURLERK4GURL
+ fun:_ZN7content20WebKitTestController20PrepareForLayoutTestERK4GURLRKN4base8FilePathEbRKSs
+ fun:_ZN12_GLOBAL__N_110RunOneTestERKSsPbRK10scoped_ptrIN7content17BrowserMainRunnerEN4base14DefaultDeleterIS5_EEE
+ fun:_Z16ShellBrowserMainRKN7content18MainFunctionParamsERK10scoped_ptrINS_17BrowserMainRunnerEN4base14DefaultDeleterIS4_EEE
+ fun:_ZN7content17ShellMainDelegate10RunProcessERKSsRKNS_18MainFunctionParamsE
+ fun:_ZN7content23RunNamedProcessTypeMainERKSsRKNS_18MainFunctionParamsEPNS_19ContentMainDelegateE
+}
+{
+ bug_372487_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN4mojo8internal6RouterC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo8internal18InterfaceImplStateINS_5ShellEE4BindENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo10BindToPipeIN7content19MojoApplicationHost9ShellImplEEEPT_S5_NS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN7content19MojoApplicationHost4InitEv
+ fun:_ZN7content21RenderProcessHostImpl4InitEv
+ fun:_ZN7content18RenderViewHostImpl16CreateRenderViewERKSbItN4base20string16_char_traitsESaItEEiib
+ fun:_ZN7content15WebContentsImpl32CreateRenderViewForRenderManagerEPNS_14RenderViewHostEiPNS_26CrossProcessFrameConnectorE
+ fun:_ZN7content22RenderFrameHostManager14InitRenderViewEPNS_14RenderViewHostEi
+ fun:_ZN7content22RenderFrameHostManager8NavigateERKNS_19NavigationEntryImplE
+ fun:_ZN7content13NavigatorImpl15NavigateToEntryEPNS_19RenderFrameHostImplERKNS_19NavigationEntryImplENS_20NavigationController10ReloadTypeE
+ fun:_ZN7content13NavigatorImpl22NavigateToPendingEntryEPNS_19RenderFrameHostImplENS_20NavigationController10ReloadTypeE
+ fun:_ZN7content15WebContentsImpl22NavigateToPendingEntryENS_20NavigationController10ReloadTypeE
+ fun:_ZN7content24NavigationControllerImpl22NavigateToPendingEntryENS_20NavigationController10ReloadTypeE
+ fun:_ZN7content24NavigationControllerImpl9LoadEntryEPNS_19NavigationEntryImplE
+ fun:_ZN7content24NavigationControllerImpl17LoadURLWithParamsERKNS_20NavigationController13LoadURLParamsE
+ fun:_ZN7content5Shell15LoadURLForFrameERK4GURLRKSs
+ fun:_ZN7content5Shell7LoadURLERK4GURL
+}
+{
+ bug_372832
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content27ServiceWorkerContextWrapper12InitInternalERKN4base8FilePathEPNS1_19SequencedTaskRunnerEPNS1_16MessageLoopProxyEPN5quota17QuotaManagerProxyE
+}
+{
+ bug_379359
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content27ServiceWorkerContextWrapperC1EPNS_14BrowserContextE
+ fun:_ZN7content20StoragePartitionImpl6CreateEPNS_14BrowserContextEbRKN4base8FilePathE
+ fun:_ZN7content23StoragePartitionImplMap3GetERKSsS2_b
+ fun:_ZN7content12_GLOBAL__N_129GetStoragePartitionFromConfigEPNS_14BrowserContext*
+ fun:_ZN7content14BrowserContext19GetStoragePartitionEPS0_PNS_12SiteInstanceE
+ fun:_ZN7content14BrowserContext26GetDefaultStoragePartitionEPS0_
+ fun:_ZN7content21ShellBrowserMainParts21PreMainMessageLoopRunEv
+ fun:_ZN7content15BrowserMainLoop21PreMainMessageLoopRunEv
+}
+{
+ bug_379943
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN7content20StoragePartitionImpl6CreateEPNS_14BrowserContextEbRKN4base8FilePathE
+ fun:_ZN7content23StoragePartitionImplMap3GetERKSsS2_b
+ fun:_ZN7content12_GLOBAL__N_129GetStoragePartitionFromConfigEPNS_14BrowserContext*
+ fun:_ZN7content14BrowserContext19GetStoragePartitionEPS0_PNS_12SiteInstanceE
+ fun:_ZN7content14BrowserContext26GetDefaultStoragePartitionEPS0_
+ fun:_ZN7content21ShellBrowserMainParts21PreMainMessageLoopRunEv
+ fun:_ZN7content15BrowserMainLoop21PreMainMessageLoopRunEv
+}
+{
+ bug_380575
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISt13_Rb_tree_nodeISt4pairIKSsPN3net20URLRequestJobFactory15ProtocolHandlerEEEE8allocateEmPKv
+ ...
+ fun:_ZNSt3mapISsPN3net20URLRequestJobFactory15ProtocolHandlerESt4lessISsESaISt4pairIKSsS3_EEEixERS7_
+ fun:_ZN3net24URLRequestJobFactoryImpl18SetProtocolHandlerERKSsPNS_20URLRequestJobFactory15ProtocolHandlerE
+ ...
+ fun:_ZN7content28ShellURLRequestContextGetter20GetURLRequestContextEv
+}
+{
+ bug_381065
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN5blink18ModulesInitializer20registerEventFactoryEv
+ fun:_ZN5blink15CoreInitializer4initEv
+ fun:_ZN5blink19initializeWithoutV8EPNS_8PlatformE
+ fun:_ZN5blink10initializeEPNS_8PlatformE
+ fun:_ZN7content25TestWebKitPlatformSupportC2Ev
+ fun:_ZN7content25TestWebKitPlatformSupportC1Ev
+ fun:_ZN7content17UnitTestTestSuiteC2EPN4base9TestSuiteE
+ fun:_ZN7content17UnitTestTestSuiteC1EPN4base9TestSuiteE
+}
+{
+ bug_381156
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN*14SkTDynamicHashI10SkFlatData*
+ fun:_ZN16SkFlatDictionaryI7SkPaintNS0_16FlatteningTraitsEE24findAndReturnMutableFlatERKS0_
+ fun:_ZN16SkFlatDictionaryI7SkPaintNS0_16FlatteningTraitsEE17findAndReturnFlatERKS0_
+ fun:_ZN15SkPictureRecord16getFlatPaintDataERK7SkPaint
+ fun:_ZN15SkPictureRecord11addPaintPtrEPK7SkPaint
+ fun:_ZN15SkPictureRecord8addPaintERK7SkPaint
+ fun:_ZN15SkPictureRecord8drawPathERK6SkPathRK7SkPaint
+ fun:_ZN12SkBBoxRecord8drawPathERK6SkPathRK7SkPaint
+ fun:_ZN5blink15GraphicsContext8drawPathERK6SkPathRK7SkPaint
+ fun:_ZN5blink15GraphicsContext10strokePathERKNS_4PathE
+ fun:_ZNK7blink14RenderSVGShape11strokeShapeEPNS_15GraphicsContextE
+ fun:_ZNK7blink13RenderSVGPath11strokeShapeEPNS_15GraphicsContextE
+ fun:_ZN5blink27RenderSVGResourceSolidColor17postApplyResourceEPNS_12RenderObjectERPNS_15GraphicsContextEtPKNS_4PathEPKNS_14RenderSVGShapeE
+ fun:_ZN5blink14RenderSVGShape11strokeShapeEPNS_11RenderStyleEPNS_15GraphicsContextE
+ fun:_ZN5blink14RenderSVGShape5paintERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink9RenderBox5paintERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink13RenderSVGRoot13paintReplacedERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink14RenderReplaced5paintERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock18paintAsInlineBlockEPNS_12RenderObjectERNS_9PaintInfoERKNS_11LayoutPointE
+}
+{
+ bug_385381
+ Memcheck:Unaddressable
+ fun:_ZN5blink23FrameLoaderStateMachine9advanceToENS0_5StateE
+ fun:_ZN5blink11FrameLoader4initEv
+ fun:_ZN5blink10LocalFrame4initEv
+ fun:_ZN5blink17WebLocalFrameImpl22initializeAsChildFrameEPN7blink9FrameHostEPNS1_10FrameOwnerERKN3WTF12AtomicStringES9_
+ fun:_ZN5blink17WebLocalFrameImpl16createChildFrameERKN7blink16FrameLoadRequestEPNS1_21HTMLFrameOwnerElementE
+}
+{
+ bug_385396a
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink11RenderLayer7hitTestERKNS_14HitTestRequestERKNS_15HitTestLocationERNS_13HitTestResultE
+ fun:_ZN5blink10RenderView7hitTestERKNS_14HitTestRequestERKNS_15HitTestLocationERNS_13HitTestResultE
+ fun:_ZN5blink10RenderView7hitTestERKNS_14HitTestRequestERNS_13HitTestResultE
+ ...
+ fun:_ZN5blink12EventHandler18handleGestureEventERKNS_20PlatformGestureEventE
+ fun:_ZN5blink11WebViewImpl18handleGestureEventERKNS_15WebGestureEventE
+}
+{
+ bug_385396b
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink11LayoutPointC*ERKNS_8IntPointE
+ ...
+ fun:_ZN5blink12EventHandler18handleGestureEventERKNS_20PlatformGestureEventE
+ fun:_ZN5blink11WebViewImpl18handleGestureEventERKNS_15WebGestureEventE
+}
+{
+ bug_385396c
+ Memcheck:Uninitialized
+ fun:_ZNK5blink7IntRect8containsEii
+ ...
+ fun:_ZN5blink12EventHandler18handleGestureEventERKNS_20PlatformGestureEventE
+ fun:_ZN5blink11WebViewImpl18handleGestureEventERKNS_15WebGestureEventE
+}
+{
+ bug_385396d
+ Memcheck:Uninitialized
+ fun:_ZNK7blink10LayoutUnit5floorEv
+ ...
+ fun:_ZN5blink12EventHandler18handleGestureEventERKNS_20PlatformGestureEventE
+ fun:_ZN5blink11WebViewImpl18handleGestureEventERKNS_15WebGestureEventE
+ fun:_ZN5blink18PageWidgetDelegate16handleInputEventEPN7blink4PageERNS_22PageWidgetEventHandlerERKNS_13WebInputEventE
+}
+{
+ bug_385396e
+ Memcheck:Uninitialized
+ fun:_ZN5blink15roundedIntPointERKNS_11LayoutPointE
+ fun:_ZNK5blink15HitTestLocation12roundedPointEv
+ fun:_ZN5blink10RenderView7hitTestERKNS_14HitTestRequestERKNS_15HitTestLocationERNS_13HitTestResultE
+ fun:_ZN5blink10RenderView7hitTestERKNS_14HitTestRequestERNS_13HitTestResultE
+ fun:_ZN5blink12EventHandler20hitTestResultAtPointERKNS_11LayoutPointEjRKNS_10LayoutSizeE
+ fun:_ZN5blink12EventHandler18targetGestureEventERKNS_20PlatformGestureEventEb
+}
+{
+ bug_387435
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content16WebURLLoaderImplC1Ev
+ fun:_ZN7content17BlinkPlatformImpl15createURLLoaderEv
+ fun:_ZN5blink10PingLoaderC1EPNS_10LocalFrameERNS_15ResourceRequestERKNS_18FetchInitiatorInfoENS_17StoredCredentialsE
+ fun:_ZN5blink10PingLoader5startEPNS_10LocalFrameERNS_15ResourceRequestERKNS_18FetchInitiatorInfoENS_17StoredCredentialsE
+ fun:_ZN5blink10PingLoader9loadImageEPNS_10LocalFrameERKNS_4KURLE
+ fun:_ZN5blink15ResourceFetcher10fetchImageERNS_12FetchRequestE
+ fun:_ZN5blink11ImageLoader19doUpdateFromElementEb
+ fun:_ZN5blink11ImageLoader4Task3runEv
+}
+{
+ bug_386418
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEESA_
+ fun:_ZN4base10WorkerPool16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEES9_b
+ fun:_ZN3net16HostResolverImpl16LoopbackProbeJobC1ERKN4base7WeakPtrIS0_EE
+ fun:_ZN3net16HostResolverImplC1ERKNS_12HostResolver7OptionsEPNS_6NetLogE
+ fun:_ZN8chromeos24HostResolverImplChromeOSC1E13*
+}
+{
+ bug_387993
+ Memcheck:Uninitialized
+ fun:_ZN11SkBaseMutex7acquireEv
+ fun:_ZN18SkAutoMutexAcquireC2EP11SkBaseMutex
+ fun:_ZN18SkAutoMutexAcquireC1EP11SkBaseMutex
+ fun:_ZN12SkGlyphCache10VisitCacheEP10SkTypefacePK12SkDescriptorPFbPKS_PvES7_
+ fun:_ZN12SkGlyphCache11DetachCacheEP10SkTypefacePK12SkDescriptor
+ fun:_ZL14DetachDescProcP10SkTypefacePK12SkDescriptorPv
+ fun:_ZNK7SkPaint14descriptorProcEPK18SkDevicePropertiesPK8SkMatrixPFvP10SkTypefacePK12SkDescriptorPvESB_b
+ fun:_ZNK7SkPaint11detachCacheEPK18SkDevicePropertiesPK8SkMatrixb
+ fun:_ZN16SkAutoGlyphCacheC2ERK7SkPaintPK18SkDevicePropertiesPK8SkMatrix
+ fun:_ZN16SkAutoGlyphCacheC1ERK7SkPaintPK18SkDevicePropertiesPK8SkMatrix
+}
+{
+ bug_388013
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blinkL33createInjectedScriptHostV8WrapperEPNS_18InjectedScriptHostEPN2v87IsolateE
+ fun:_ZN5blink21InjectedScriptManager20createInjectedScriptERKN3WTF6StringEPNS_11ScriptStateEi
+ fun:_ZN5blink21InjectedScriptManager17injectedScriptForEPNS_11ScriptStateE
+ fun:_ZN5blink22InspectorDebuggerAgent17currentCallFramesEv
+ fun:_ZN5blink22InspectorDebuggerAgent8didPauseEPNS_11ScriptStateERKNS_11ScriptValueES5_RKN3WTF6VectorINS6_6StringELm0ENS6_16DefaultAllocatorEEE
+ ...
+ fun:_ZN2v88internal5Debug17CallEventCallbackENS_10DebugEventENS0_6HandleINS0_6ObjectEEES5_PNS_5Debug10ClientDataE
+ fun:_ZN2v88internal5Debug17ProcessDebugEventENS_10DebugEventENS0_6HandleINS0_8JSObjectEEEb
+ fun:_ZN2v88internal5Debug12OnDebugBreakENS0_6HandleINS0_6ObjectEEEb
+}
+{
+ bug_388013_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blinkL33createInjectedScriptHostV8WrapperEPNS_18InjectedScriptHostEPN2v87IsolateE
+ fun:_ZN5blink21InjectedScriptManager20createInjectedScriptERKN3WTF6StringEPNS_11ScriptStateEi
+ fun:_ZN5blink21InjectedScriptManager17injectedScriptForEPNS_11ScriptStateE
+ fun:_ZN5blink18WorkerRuntimeAgent21injectedScriptForEvalEPN3WTF6StringEPKi
+ fun:_ZN5blink21InspectorRuntimeAgent8evaluateEPN3WTF6String*
+ fun:_ZThn32_N5blink21InspectorRuntimeAgent8evaluateEPN3WTF6String*
+ fun:_ZN5blink30InspectorBackendDispatcherImpl16Runtime_evaluateElPNS_10JSONObjectEPNS_9JSONArrayE
+ fun:_ZN5blink30InspectorBackendDispatcherImpl8dispatchERKN3WTF6StringE
+ fun:_ZN5blink25WorkerInspectorController27dispatchMessageFromFrontendERKN3WTF6StringE
+ fun:_ZN5blinkL30dispatchOnInspectorBackendTaskEPNS_16ExecutionContextERKN3WTF6StringE
+}
+{
+ bug_388668
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN20data_reduction_proxy69DataReductionProxyUsageStatsTest_isDataReductionProxyUnreachable_Test8TestBodyEv
+}
+{
+ bug_392912
+ Memcheck:Uninitialized
+ fun:_ZNK8SkStroke10strokePathERK6SkPathPS0_
+ fun:_ZNK11SkStrokeRec11applyToPathEP6SkPathRKS0_
+ fun:_ZNK7SkPaint11getFillPathERK6SkPathPS0_PK6SkRect
+ fun:_ZNK6SkDraw8drawPathERK6SkPathRK7SkPaintPK8SkMatrixbb
+ fun:_ZNK6SkDraw8drawPathERK6SkPathRK7SkPaintPK8SkMatrixb
+ fun:_ZN14SkBitmapDevice8drawPathERK6SkDrawRK6SkPathRK7SkPaintPK8SkMatrixb
+ fun:_ZN8SkCanvas8drawPathERK6SkPathRK7SkPaint
+}
+{
+ bug_392936
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEESA_
+ fun:_ZN4base10WorkerPool16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEES9_b
+ fun:_ZN3net16HostResolverImpl16LoopbackProbeJobC2ERKN4base7WeakPtrIS0_EE
+ fun:_ZN3net16HostResolverImplC1ERKNS_12HostResolver7OptionsEPNS_6NetLogE
+ fun:_ZN8chromeos24HostResolverImplChromeOSC1E13scoped_refptrIN4base16MessageLoopProxyEEPNS_19NetworkStateHandlerERKN3net12HostResolver7OptionsEPNS7_6NetLogE
+ fun:_ZN8chromeos24HostResolverImplChromeOS25CreateHostResolverForTestE13scoped_refptrIN4base16MessageLoopProxyEEPNS_19NetworkStateHandlerE
+ fun:_ZN28HostResolverImplChromeOSTest22InitializeHostResolverEv
+}
+{
+ bug_394558
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN32ProfileSyncComponentsFactoryMockC1Ev
+}
+{
+ bug_394624
+ Memcheck:Leak
+ ...
+ fun:_ZN3net24URLRequestJobFactoryImpl18SetProtocolHandlerERKSsPNS_20URLRequestJobFactory15ProtocolHandlerE
+ ...
+ fun:_ZN7content28ShellURLRequestContextGetter20GetURLRequestContextEv
+ fun:_ZN7content21ChromeAppCacheService20InitializeOnIOThreadERKN4base8FilePathEPNS_15ResourceContextEPN3net23URLRequestContextGetterE13scoped_refptrIN5quota20SpecialStoragePolicyEE
+}
+{
+ bug_396658
+ Memcheck:Uninitialized
+ ...
+ fun:wk_png_write_find_filter
+ fun:wk_png_write_row
+}
+{
+ bug_397066_a
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink13InlineFlowBox9addToLineEPNS_9InlineBoxE
+ ...
+ fun:_ZN5blink15RenderBlockFlow13constructLineERNS_11BidiRunListINS_7BidiRunEEERKNS_8LineInfoE
+ fun:_ZN5blink15RenderBlockFlow27createLineBoxesFromBidiRunsEjRNS_11BidiRunListINS_7BidiRunEEERKNS_14InlineIteratorERNS_8LineInfoERNS_21VerticalPositionCacheEPS2_RN3WTF6VectorINS_15WordMeasurementELm64ENSD_16DefaultAllocatorEEE
+ fun:_ZN5blink15RenderBlockFlow26layoutRunsAndFloatsInRangeERNS_15LineLayoutStateERNS_12BidiResolverINS_14InlineIteratorENS_7BidiRunEEERKS4_RKNS_10BidiStatusE
+ fun:_ZN5blink15RenderBlockFlow19layoutRunsAndFloatsERNS_15LineLayoutStateE
+ fun:_ZN5blink15RenderBlockFlow20layoutInlineChildrenEbRNS_10LayoutUnitES2_S1_
+ ...
+ fun:_ZN5blink15RenderBlockFlow11layoutBlockEb
+}
+{
+ bug_397066_b
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink13InlineFlowBox24computeLogicalBoxHeightsEPNS_13RootInlineBoxERNS_10LayoutUnitES4_RiS5_RbS6_bRN3WTF7HashMapIPKNS_13InlineTextBoxESt4pairINS7_6VectorIPKNS_14SimpleFontDataELm0ENS7_16DefaultAllocatorEEENS_13GlyphOverflowEENS7_7PtrHashISB_EENS7_10HashTraitsISB_EENSN_ISK_EESH_EENS_12FontBaselineERNS_21VerticalPositionCacheE
+ fun:_ZN5blink13RootInlineBox26alignBoxesInBlockDirectionENS_10LayoutUnitERN3WTF7HashMapIPKNS_13InlineTextBoxESt4pairINS2_6VectorIPKNS_14SimpleFontDataELm0ENS2_16DefaultAllocatorEEENS_13GlyphOverflowEENS2_7PtrHashIS6_EENS2_10HashTraitsIS6_EENSI_ISF_EESC_EERNS_21VerticalPositionCacheE
+ ...
+ fun:_ZN5blink15RenderBlockFlow19layoutRunsAndFloatsERNS_15LineLayoutStateE
+ fun:_ZN5blink15RenderBlockFlow20layoutInlineChildrenEbRNS_10LayoutUnitES2_S1_
+ fun:_ZN5blink15RenderBlockFlow15layoutBlockFlowEbRNS_10LayoutUnitERNS_18SubtreeLayoutScopeE
+ fun:_ZN5blink15RenderBlockFlow11layoutBlockEb
+}
+{
+ bug_397066_c
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink13InlineFlowBox26placeBoxesInBlockDirectionENS_10LayoutUnitES1_ibRS1_S2_S2_RbS2_S2_S3_S3_NS_12FontBaselineE
+ fun:_ZN5blink13RootInlineBox26alignBoxesInBlockDirectionENS_10LayoutUnitERN3WTF7HashMapIPKNS_13InlineTextBoxESt4pairINS2_6VectorIPKNS_14SimpleFontDataELm0ENS2_16DefaultAllocatorEEENS_13GlyphOverflowEENS2_7PtrHashIS6_EENS2_10HashTraitsIS6_EENSI_ISF_EESC_EERNS_21VerticalPositionCacheE
+ ...
+ fun:_ZN5blink15RenderBlockFlow19layoutRunsAndFloatsERNS_15LineLayoutStateE
+ fun:_ZN5blink15RenderBlockFlow20layoutInlineChildrenEbRNS_10LayoutUnitES2_S1_
+ fun:_ZN5blink15RenderBlockFlow15layoutBlockFlowEbRNS_10LayoutUnitERNS_18SubtreeLayoutScopeE
+ fun:_ZN5blink15RenderBlockFlow11layoutBlockEb
+}
+{
+ bug_397066_d
+ Memcheck:Uninitialized
+ fun:_ZN5blink13InlineFlowBox45clearDescendantsHaveSameLineHeightAndBaselineEv
+ fun:_ZN5blink13InlineFlowBox9addToLineEPNS_9InlineBoxE
+ ...
+ fun:_ZN5blink15RenderBlockFlow13constructLineERNS_11BidiRunListINS_7BidiRunEEERKNS_8LineInfoE
+ fun:_ZN5blink15RenderBlockFlow27createLineBoxesFromBidiRuns*
+}
+{
+ bug_397066_e
+ Memcheck:Uninitialized
+ fun:_ZN5blink13InlineFlowBox9addToLineEPNS_9InlineBox*
+ fun:_ZN5blink15RenderBlockFlow*
+ ...
+ fun:_ZN5blink15RenderBlockFlow27createLineBoxesFromBidiRunsEjRNS*
+ fun:_ZN5blink15RenderBlockFlow26layoutRunsAndFloatsInRangeERNS_15LineLayoutStateERNS_12BidiResolver*
+ fun:_ZN5blink15RenderBlockFlow19layoutRunsAndFloatsERNS_15LineLayoutState*
+}
+{
+ bug_397066_f
+ Memcheck:Uninitialized
+ fun:_ZNK5blink13InlineFlowBox35constrainToLineTopAndBottomIfNeededERNS_10LayoutRectE
+ fun:_ZN5blink13InlineFlowBox28paintBoxDecorationBackgroundERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink13InlineFlowBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13InlineFlowBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13RootInlineBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZNK5blink17RenderLineBoxList5paintEPNS_20RenderBoxModelObjectERNS_9PaintInfoERKNS_11LayoutPointE
+}
+{
+ bug_397075_a
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal6Object11SetPropertyEPNS0_14LookupIteratorENS0_6HandleIS1_EENS0_10StrictModeENS1_14StoreFromKeyedE
+ fun:_ZN2v88internal6Object11SetPropertyENS0_6HandleIS1_EENS2_INS0_4NameEEES3_NS0_10StrictModeENS1_14StoreFromKeyedE
+ fun:_ZN2v88internal7Runtime17SetObjectPropertyEPNS0_7IsolateENS0_6HandleINS0_6ObjectEEES6_S6_NS0_10StrictModeE
+ fun:_ZN2v86Object3SetENS_6HandleINS_5ValueEEES3_
+ fun:_ZN18WebCoreTestSupport21injectInternalsObjectEN2v85LocalINS0_7ContextEEE
+ fun:_ZN5blink17WebTestingSupport21injectInternalsObjectEPNS_13WebLocalFrameE
+ fun:_ZN7content16WebKitTestRunner20DidClearWindowObjectEPN5blink13WebLocalFrameE
+ fun:_ZN7content14RenderViewImpl20didClearWindowObjectEPN5blink13WebLocalFrameE
+ fun:_ZN7content15RenderFrameImpl20didClearWindowObjectEPN5blink13WebLocalFrameE
+ fun:_ZThn16_N7content15RenderFrameImpl20didClearWindowObjectEPN5blink13WebLocalFrameE
+ fun:_ZN5blink21FrameLoaderClientImpl39dispatchDidClearWindowObjectInMainWorldEv
+ fun:_ZN5blink11FrameLoader39dispatchDidClearWindowObjectInMainWorldEv
+ fun:_ZN5blink16ScriptController11windowShellERNS_15DOMWrapperWorldE
+ fun:_ZN5blink11toV8ContextEPNS_10LocalFrameERNS_15DOMWrapperWorldE
+ fun:_ZNK5blink17WebLocalFrameImpl22mainWorldScriptContextEv
+ fun:_ZN5blink17WebTestingSupport20resetInternalsObjectEPNS_13WebLocalFrameE
+ fun:_ZN7content16WebKitTestRunner5ResetEv
+ fun:_ZN7content26ShellContentRendererClient17RenderViewCreatedEPNS_10RenderViewE
+ fun:_ZN7content14RenderViewImpl10InitializeEPNS_20RenderViewImplParamsE
+ fun:_ZN7content14RenderViewImpl6CreateEibRKNS_19RendererPreferencesERKNS_14WebPreferencesEiiilRKSbItN4base20string16_char_traitsESaItEEbbibbiRKN5blink13WebScreenInfoE17AccessibilityMode
+ fun:_ZN7content16RenderThreadImpl15OnCreateNewViewERK18ViewMsg_New_Params
+}
+{
+ bug_397075_b
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal6Object11SetPropertyEPNS0_14LookupIteratorENS0_6HandleIS1_EENS0_10StrictModeENS1_14StoreFromKeyedE
+ fun:_ZN2v88internal6Object11SetPropertyENS0_6HandleIS1_EENS2_INS0_4NameEEES3_NS0_10StrictModeENS1_14StoreFromKeyedE
+ ...
+ fun:_ZN2v88internalL6InvokeEbNS0_6HandleINS0_10JSFunctionEEENS1_INS0_6ObjectEEEiPS5_
+ fun:_ZN2v88internal9Execution4CallEPNS0_7IsolateENS0_6HandleINS0_6ObjectEEES6_iPS6_b
+}
+{
+ bug_398349_a
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal19JSObjectWalkVisitorINS0_29AllocationSiteCreationContextEE13StructureWalkENS0_6HandleINS0_8JSObjectEEE
+ fun:_ZN2v88internal8JSObject8DeepWalkENS0_6HandleIS1_EEPNS0_29AllocationSiteCreationContextE
+ fun:_ZN2v88internal27Runtime_CreateObjectLiteralEiPPNS0_6ObjectEPNS0_7IsolateE
+}
+{
+ bug_398349_b
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal19JSObjectWalkVisitorINS0_26AllocationSiteUsageContextEE13StructureWalkENS0_6HandleINS0_8JSObjectEEE
+ fun:_ZN2v88internal8JSObject8DeepCopyENS0_6HandleIS1_EEPNS0_26AllocationSiteUsageContextENS1_13DeepCopyHintsE
+ fun:_ZN2v88internal27Runtime_CreateObjectLiteralEiPPNS0_6ObjectEPNS0_7IsolateE
+}
+{
+ bug_399853_a
+ Memcheck:Uninitialized
+ fun:_ZNK8SkStroke10strokePathERK6SkPathPS0_
+ fun:_ZNK11SkStrokeRec11applyToPathEP6SkPathRKS0_
+ fun:_ZNK7SkPaint11getFillPathERK6SkPathPS0_PK6SkRect
+}
+{
+ bug_399853_b
+ Memcheck:Uninitialized
+ fun:_ZNK8SkStroke10strokePathERK6SkPathPS0_
+ fun:_ZNK11SkStrokeRec11applyToPathEP6SkPathRKS0_
+ fun:_ZN15SkScalerContext15internalGetPathERK7SkGlyphP6SkPathS4_P8SkMatrix
+}
+{
+ bug_400547
+ Memcheck:Uninitialized
+ fun:_ZNK5blink22GraphicsLayerDebugInfo17appendOwnerNodeIdEPNS_10JSONObjectE
+ fun:_ZNK5blink22GraphicsLayerDebugInfo19appendAsTraceFormatEPNS_9WebStringE
+ fun:_ZNK7content15TracedDebugInfo19AppendAsTraceFormatEPSs
+ fun:_ZNK2cc9LayerImpl11AsValueIntoEPN4base5debug11TracedValueE
+ fun:_ZNK2cc*LayerImpl11AsValueIntoEPN4base5debug11TracedValueE
+}
+{
+ bug_402257_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN10extensions11ApiTestBase15RegisterModulesEv
+ fun:_ZN10extensions11ApiTestBase5SetUpEv
+ fun:_ZN10extensions16DataReceiverTest5SetUpEv
+}
+{
+ bug_402257_b
+ Memcheck:Leak
+ ...
+ fun:_ZN10extensions12_GLOBAL__N_111TestNatives24GetObjectTemplateBuilderEPN2v87IsolateE
+ fun:_ZN3gin13WrappableBase14GetWrapperImplEPN2v87IsolateEPNS_11WrapperInfoE
+ fun:_ZN3gin9WrappableIN10extensions12_GLOBAL__N_111TestNativesEE10GetWrapperEPN2v87IsolateE
+ fun:_ZN3gin12CreateHandleIN10extensions12_GLOBAL__N_111TestNativesEEENS_6HandleIT_EEPN2v87IsolateEPS5_
+ fun:_ZN10extensions12_GLOBAL__N_111TestNatives6CreateEPN2v87IsolateERKN4base8CallbackIFvvEEE
+ fun:_ZN10extensions11ApiTestBase7RunTestERKSsS2_
+ fun:_ZN10extensions29DataReceiverTest_Receive_Test8TestBodyEv
+}
+{
+ bug_402257_c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3gin13HandleWrapper6CreateEPN2v87IsolateEj
+ fun:_ZN3gin9ConverterIN4mojo6HandleEvE4ToV8EPN2v87IsolateERKS2_
+ fun:_ZN3gin11ConvertToV8IN4mojo6HandleEEEN2v86HandleINS3_5ValueEEEPNS3_7IsolateET_
+ fun:_ZN3gin10Dictionary3SetIN4mojo6HandleEEEbRKSsT_
+ fun:_ZN4mojo2js12_GLOBAL__N_114CreateDataPipeERKN3gin9ArgumentsE
+}
+{
+ bug_403774
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN5blink18ModulesInitializer4initEv
+ fun:_ZN5blink19initializeWithoutV8EPNS_8PlatformE
+ fun:_ZN5blink10initializeEPNS_8PlatformE
+ fun:_ZN7content25TestWebKitPlatformSupportC2Ev
+ fun:_ZN7content25TestWebKitPlatformSupportC1Ev
+ fun:_ZN7content17UnitTestTestSuiteC2EPN4base9TestSuiteE
+ fun:_ZN7content17UnitTestTestSuiteC1EPN4base9TestSuiteE
+}
+{
+ bug_403775
+ Memcheck:Leak
+ fun:_Zna*
+ fun:_ZN28NotificationConversionHelper28GfxImageToNotificationBitmapEPKN3gfx5ImageEPN10extensions3api13notifications18NotificationBitmapE
+ fun:_ZN28NotificationConversionHelper33NotificationToNotificationOptionsERK12NotificationPN10extensions3api13notifications19NotificationOptionsE
+ fun:_ZN71NotificationConversionHelperTest_NotificationToNotificationOptions_Test8TestBodyEv
+}
+{
+ bug_404182
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN8app_list4test12_GLOBAL__N_122AppListViewTestContext16RunStartPageTestEv
+}
+{
+ bug_405865
+ Memcheck:Uninitialized
+ fun:_ZNK5blink13InlineFlowBox20computeMaxLogicalTopERf
+ ...
+ fun:_ZNK5blink13RootInlineBox13maxLogicalTopEv
+ fun:_ZN5blinkL22computeUnderlineOffsetENS_21TextUnderlinePositionERKNS_11FontMetricsEPKNS_13InlineTextBoxEf
+ fun:_ZN5blink13InlineTextBox15paintDecorationEPNS_15GraphicsContextERKNS_10FloatPointENS_14TextDecorationEPKNS_10ShadowListE
+ fun:_ZN5blink13InlineTextBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13InlineFlowBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13RootInlineBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZNK5blink17RenderLineBoxList5paintEPNS_20RenderBoxModelObjectERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock13paintContentsERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock11paintObjectERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock5paintERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock10paintChildEPNS_9RenderBoxERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock13paintChildrenERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock13paintContentsERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock11paintObjectERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock5paintERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock10paintChildEPNS_9RenderBoxERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock13paintChildrenERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock13paintContentsERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink11RenderBlock11paintObjectERNS_9PaintInfoERKNS_11LayoutPointE
+}
+{
+ bug_411235
+ Memcheck:Overlap
+ fun:memcpy@@GLIBC_2.14
+ fun:IncrementalCopyFastPath
+ fun:BrotliDecompress
+ fun:BrotliDecompressBuffer
+ fun:_ZN12_GLOBAL__N_115Woff2UncompressEPhmPKhmj
+}
+{
+ bug_415092
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base22PosixDynamicThreadPool7AddTaskEPNS_11PendingTaskE
+ fun:_ZN4base22PosixDynamicThreadPool8PostTaskERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEE
+ fun:_ZN4base12_GLOBAL__N_114WorkerPoolImpl8PostTaskERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEEb
+ fun:_ZN4base10WorkerPool8PostTaskERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEEb
+}
+{
+ bug_416729
+ Memcheck:Unaddressable
+ fun:_ZNK5blink6MemberINS_38PlatformSpeechSynthesisUtteranceClientEEcvPS1_Ev
+ fun:_ZNK5blink32PlatformSpeechSynthesisUtterance6clientEv
+ fun:_ZN5blink15SpeechSynthesis17didFinishSpeakingEPNS_32PlatformSpeechSynthesisUtteranceE
+ fun:_ZN5blink29PlatformSpeechSynthesizerMock16speakingFinishedEPNS_5TimerIS0_EE
+ fun:_ZN5blink5TimerINS_29PlatformSpeechSynthesizerMockEE5firedEv
+ fun:_ZN5blink12ThreadTimers24sharedTimerFiredInternalEv
+ fun:_ZN5blink12ThreadTimers16sharedTimerFiredEv
+ fun:_ZN5blink9Scheduler15tickSharedTimerEv
+ fun:_ZN5blink9Scheduler18sharedTimerAdapterEv
+ fun:_ZN7content17BlinkPlatformImpl9DoTimeoutEv
+}
+{
+ bug_417048a
+ Memcheck:Uninitialized
+ ...
+ fun:_ZNK5blink17RenderLineBoxList7hitTestEPNS_20RenderBoxModelObjectERKNS_14HitTestRequestERNS_13HitTestResultERKNS_15HitTestLocationERKNS_11LayoutPointENS_13HitTestActionE
+ fun:_ZN5blink*Render*_14HitTestRequestERNS_13HitTestResultERKNS_15HitTestLocationERKNS_11LayoutPointENS_13HitTestActionE
+}
+{
+ bug_417048b
+ Memcheck:Uninitialized
+ fun:_ZN5blink13InlineFlowBox45clearDescendantsHaveSameLineHeightAndBaselineEv
+ fun:_ZN5blinkL25setLogicalWidthForTextRunEPNS_13RootInlineBoxEPNS_7BidiRunEPNS_10RenderTextEfRKNS*
+ fun:_ZN5blink15RenderBlockFlow41computeInlineDirectionPositionsForSegmentEPNS_13RootInlineBoxERKNS*
+ fun:_ZN5blink15RenderBlockFlow38computeInlineDirectionPositionsForLineEPNS_13RootInlineBoxERKNS*
+}
+{
+ bug_417048c
+ Memcheck:Uninitialized
+ fun:_ZNK5blink13InlineFlowBox35constrainToLineTopAndBottomIfNeededERNS_10LayoutRectE
+ fun:_ZN5blink13InlineFlowBox28paintBoxDecorationBackgroundERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink13InlineFlowBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13InlineFlowBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13RootInlineBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZNK5blink18LineBoxListPainter5paintEPNS_20RenderBoxModelObjectERNS_9PaintInfoERKNS_11LayoutPointE
+}
+{
+ bug_417048d
+ Memcheck:Uninitialized
+ ...
+ fun:_ZNK5blink13RootInlineBox13maxLogicalTopEv
+ fun:_ZN5blinkL22computeUnderlineOffsetENS_21TextUnderlinePositionERKNS_11FontMetricsEPKNS_13InlineTextBoxEf
+ fun:_ZN5blink13InlineTextBox15paintDecorationEPNS_15GraphicsContextERKNS_10FloatPointENS_14TextDecorationE
+ fun:_ZN5blink13InlineTextBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13InlineFlowBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13RootInlineBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZNK5blink17RenderLineBoxList5paintEPNS_20RenderBoxModelObjectERNS_9PaintInfoERKNS_11LayoutPointE
+}
+{
+ bug_417093
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF24ThreadSafeRefCountedBasenwEm
+ fun:_ZN5blink7RawData6createEv
+ fun:_ZN5blink6Stream7addDataEPKcm
+ fun:_ZN5blink14XMLHttpRequest14didReceiveDataEPKci
+ fun:_ZThn80_N5blink14XMLHttpRequest14didReceiveDataEPKci
+ fun:_ZN5blink24DocumentThreadableLoader18handleReceivedDataEPKci
+ fun:_ZN5blink24DocumentThreadableLoader12dataReceivedEPNS_8ResourceEPKci
+ fun:_ZThn16_N5blink24DocumentThreadableLoader12dataReceivedEPNS_8ResourceEPKci
+}
+{
+ bug_417119
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN7storage27TaskRunnerBoundObserverListINS_18FileUpdateObserverEPS1_EaSERKS3_
+ fun:_ZN7storage26FileSystemOperationContext20set_update_observersERKNS_27TaskRunnerBoundObserverListINS_18FileUpdateObserverEPS2_EE
+ fun:_ZNK7storage32SandboxFileSystemBackendDelegate32CreateFileSystemOperationContextERKNS_13FileSystemURLEPNS_17FileSystemContextEPN4base4File5ErrorE
+ fun:_ZNK7storage24SandboxFileSystemBackend25CreateFileSystemOperationERKNS_13FileSystemURLEPNS_17FileSystemContextEPN4base4File5ErrorE
+ fun:_ZN7storage17FileSystemContext25CreateFileSystemOperationERKNS_13FileSystemURLEPN4base4File5ErrorE
+}
+{
+ bug_417526
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base19SequencedWorkerPoolC1EmRKSs
+ fun:_ZN16sync_file_system13drive_backend14SyncEngineTest5SetUpEv
+}
+{
+ bug_419373
+ Memcheck:Uninitialized
+ fun:_ZNK5blink20InlineFlowBoxPainter49roundedFrameRectClampedToLineTopAndBottomIfNeededEv
+ fun:_ZN5blink20InlineFlowBoxPainter28paintBoxDecorationBackgroundERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink20InlineFlowBoxPainter5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13InlineFlowBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink20InlineFlowBoxPainter5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13InlineFlowBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13RootInlineBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+}
diff --git a/tools/valgrind/memcheck/suppressions_linux.txt b/tools/valgrind/memcheck/suppressions_linux.txt
new file mode 100644
index 0000000..6ce4607
--- /dev/null
+++ b/tools/valgrind/memcheck/suppressions_linux.txt
@@ -0,0 +1,24 @@
+# There are three kinds of suppressions in this file:
+# 1. Third party stuff we have no control over.
+#
+# 2. Intentional unit test errors, stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing.
+#
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+# These should all be in chromium's bug tracking system.
+# Periodically we should sweep this file and the bug tracker clean by
+# running overnight and removing outdated bugs/suppressions.
+#
+# TODO(rnk): Should we move all of the Linux-only system library suppressions
+# over from suppressions.txt? We'd avoid wasting time parsing and matching
+# suppressions on non-Linux, which is basically just Mac.
+#
+#-----------------------------------------------------------------------
+
+# 1. Third party stuff we have no control over.
+
+# 2. Intentional unit test errors, stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing.
+
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+
diff --git a/tools/valgrind/memcheck/suppressions_mac.txt b/tools/valgrind/memcheck/suppressions_mac.txt
new file mode 100644
index 0000000..66885a4
--- /dev/null
+++ b/tools/valgrind/memcheck/suppressions_mac.txt
@@ -0,0 +1,240 @@
+# There are three kinds of suppressions in this file:
+# 1. Third party stuff we have no control over.
+#
+# 2. Intentional unit test errors, stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing.
+#
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+# These should all be in chromium's bug tracking system.
+# Periodically we should sweep this file and the bug tracker clean by
+# running overnight and removing outdated bugs/suppressions.
+#-----------------------------------------------------------------------
+
+# 1. Third party stuff we have no control over.
+{
+ FIXME mac kevent libevent probably needs valgrind hooks
+ Memcheck:Param
+ kevent(changelist)
+ fun:kevent
+ fun:event_base_new
+}
+{
+ # CoreAudio leak. See http://crbug.com/9351
+ bug_9351
+ Memcheck:Leak
+ ...
+ fun:_ZN12HALCADClient19AddPropertyListenerEmPK26AudioObjectPropertyAddressPFlmmS2_PvES3_
+ ...
+ fun:_ZN9HALSystem16CheckOutInstanceEv
+ ...
+}
+{
+ bug_18215
+ Memcheck:Uninitialized
+ fun:_DPSNextEvent
+ fun:-[NSApplication nextEventMatchingMask:untilDate:inMode:dequeue:]
+ fun:-[NSApplication run]
+}
+{
+ # Also filed with Apple as rdar://7255382
+ bug_20459b
+ Memcheck:Leak
+ fun:malloc_zone_malloc
+ fun:_CFRuntimeCreateInstance
+ fun:__CFArrayInit
+ fun:CFArrayCreateMutableCopy
+ ...
+ fun:_ZN8Security12KeychainCore5Trust8evaluate*
+}
+{
+ # See also http://openradar.appspot.com/radar?id=1169404
+ bug_79533b
+ Memcheck:Uninitialized
+ ...
+ fun:_Z*19cssm_DataAbortQuery17cssm_dl_db_handlel
+ fun:CSSM_DL_DataAbortQuery
+ fun:_ZN11SSDLSession14DataAbortQueryEll
+ fun:_Z*19cssm_DataAbortQuery17cssm_dl_db_handlel
+ fun:CSSM_DL_DataAbortQuery
+ fun:tpDbFindIssuerCrl
+ fun:tpVerifyCertGroupWithCrls
+}
+{
+ # QTKit leak. See http://crbug.com/100772 and rdar://10319535.
+ bug_100772
+ Memcheck:Leak
+ fun:calloc
+ fun:QTMLCreateMutex
+ fun:WarholCreateGlobals
+ fun:INIT_QuickTimeLibInternal
+ fun:pthread_once
+ fun:INIT_QuickTimeLib
+ fun:EnterMovies_priv
+ fun:EnterMovies
+ fun:TundraUnitInputFromTSFileEntry
+ fun:TundraUnitVDIGInputEntry
+ fun:TundraUnitCreateFromDescription
+ fun:+[QTCaptureVDIGDevice _refreshDevices]
+ fun:+[QTCaptureVDIGDevice devicesWithIOType:]
+ fun:+[QTCaptureDevice devicesWithIOType:]
+ fun:+[QTCaptureDevice inputDevices]
+ fun:+[QTCaptureDevice inputDevicesWithMediaType:]
+ ...
+}
+{
+ # See http://crbug.com/385604
+ bug_385604_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNK11AEEventImpl9duplicateEv
+ fun:AESendMessage
+ fun:_ZL35HIToolboxLSNotificationCallbackFunc18LSNotificationCodedPKvPK7__LSASNS1_11LSSessionIDS1_
+ fun:_ZL48LSScheduleNotificationReceiveMessageCallbackFuncP12__CFMachPortPvlS1_
+ fun:__CFMachPortPerform
+ fun:__CFRunLoopRun
+ ...
+}
+# Intentional leaks in AppKit, for an OS-level cache. Only appear on the first
+# run of each reboot. See also issues 105525, 257276, 340847.
+{
+ bug_257276_b
+ Memcheck:Leak
+ fun:malloc_zone_malloc
+ ...
+ fun:setCursorFromBundle
+ fun:CoreCursorSet
+ fun:-[NSCursor set]
+}
+
+# 2. Intentional unit test errors, stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing.
+{
+ # Mac Sandbox test cases are registered in a global map. This code is only
+ # used in the unit test binary.
+ Mac_Sandbox_Intentional_Leak1
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content8internal19RegisterSandboxTestINS_*
+ ...
+ fun:_ZN16ImageLoaderMachO18doModInitFunctionsERKN11ImageLoader11LinkContextE
+ fun:_ZN11ImageLoader23recursiveInitializationERKNS_11LinkContextEj
+ fun:_ZN11ImageLoader15runInitializersERKNS_11LinkContextE
+ fun:_ZN4dyld24initializeMainExecutableEv
+}
+{
+ # __cxa_get_globals leaks a structure when called for the first time
+ __cxa_get_globals one-time leak
+ Memcheck:Leak
+ ...
+ fun:__cxa_get_globals
+}
+
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+{
+ bug_87629
+ Memcheck:Leak
+ ...
+ fun:realloc
+ fun:new_sem_from_pool
+}
+{
+ bug_93932_a
+ Memcheck:Overlap
+ fun:memcpy
+ fun:vp8_decode_update_thread_context
+ fun:update_context_from_thread
+ ...
+ fun:ff_thread_decode_frame
+ fun:avcodec_decode_video2
+ fun:_ZN5media23FFmpegVideoDecodeEngine6DecodeERK13scoped_refptrINS_6BufferEEPS1_INS_10VideoFrameEE
+ fun:_ZN5media18FFmpegVideoDecoder14DoDecodeBufferERK13scoped_refptrINS_6BufferEE
+}
+{
+ bug_93932_b
+ Memcheck:Overlap
+ fun:memcpy
+ fun:vp8_decode_update_thread_context
+ fun:update_context_from_thread
+ fun:frame_thread_free
+ fun:avcodec_close
+ ...
+ fun:_ZN5media23FFmpegVideoDecodeEngineD0Ev
+ fun:_ZN10scoped_ptrIN5media23FFmpegVideoDecodeEngineEE5resetEPS1_
+ fun:_ZN5media27FFmpegVideoDecodeEngineTestD2Ev
+ fun:_ZN5media51FFmpegVideoDecodeEngineTest_DecodeFrame_Normal_TestD0Ev
+}
+{
+ bug_159190
+ Memcheck:Uninitialized
+ ...
+ fun:_ZNK19TConcreteFontScaler15CopyGlyphBitmapEtjP6CGRectPm
+ ...
+ fun:_ZN9Offscreen5getCGERK19SkScalerContext_MacRK7SkGlyphtPmb
+ fun:_ZN19SkScalerContext_Mac13generateImageERK7SkGlyph
+}
+{
+ bug_173779
+ Memcheck:Uninitialized
+ ...
+ fun:img_data_lock
+ fun:CGSImageDataLock
+ fun:ripc_AcquireImage
+ fun:ripc_DrawImage*
+ fun:CGContextDrawImage*
+ ...
+ fun:_ZN11CUIRenderer4DrawE6CGRectP9CGContextPK14__CFDictionaryPS5_
+}
+{
+ bug_340726
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base17LoadNativeLibrary*
+ fun:_ZN3gfx26InitializeStaticGLBindings*
+ fun:_ZN3gfx9GLSurface30InitializeOneOffImplementation*
+ fun:_ZN3gfx9GLSurface24InitializeOneOffForTestsEv
+}
+{
+ bug_380568
+ Memcheck:Leak
+ fun:calloc
+ fun:_internal_class_createInstanceFromZone
+ fun:_internal_class_createInstance
+ fun:NSAllocateObject
+ fun:+[NSObject(NSObject) alloc]
+ fun:-[VideoCaptureDeviceQTKit initWithFrameReceiver:]
+ fun:_ZN5media21VideoCaptureDeviceMac4InitENS_18VideoCaptureDevice4Name14CaptureApiTypeE
+ fun:_ZN5media28VideoCaptureDeviceFactoryMac6CreateERKNS_18VideoCaptureDevice4NameE
+ fun:_ZN5media45VideoCaptureDeviceTest_OpenInvalidDevice_Test8TestBodyEv
+}
+{
+ bug_385604_b
+ Memcheck:Leak
+ fun:calloc
+ fun:_ZN18hb_object_header_t6createEj
+ fun:_ZL16hb_object_createI9hb_face_tEPT_v
+ fun:hb_face_create_for_tables
+ fun:_ZN3gfx12_GLOBAL__N_118CreateHarfBuzzFaceEP10SkTypeface
+ fun:_ZN3gfx12_GLOBAL__N_118CreateHarfBuzzFontEP10SkTypefacei
+ fun:_ZN3gfx18RenderTextHarfBuzz8ShapeRunEPNS_8internal15TextRunHarfBuzzE
+ fun:_ZN3gfx18RenderTextHarfBuzz12EnsureLayoutEv
+ fun:_ZN3gfx41RenderTextTest_HarfBuzz_RunDirection_Test8TestBodyEv
+}
+{
+ bug_416957
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISt13_Rb_tree_nodeISt4pairIKPKN15tracked_objects6BirthsENS3_9DeathDataEEEE8allocateEmPKv
+ fun:_ZNSt8_Rb_treeIPKN15tracked_objects6BirthsESt4pairIKS3_NS0_9DeathDataEESt10_Select1stIS7_ESt4lessIS3_ESaIS7_EE11_M_get_nodeEv
+ fun:_ZNSt8_Rb_treeIPKN15tracked_objects6BirthsESt4pairIKS3_NS0_9DeathDataEESt10_Select1stIS7_ESt4lessIS3_ESaIS7_EE14_M_create_nodeERKS7_
+ fun:_ZNSt8_Rb_treeIPKN15tracked_objects6BirthsESt4pairIKS3_NS0_9DeathDataEESt10_Select1stIS7_ESt4lessIS3_ESaIS7_EE9_M_insertEPSt18_Rb_tree_node_baseSF_RKS7_
+ fun:_ZNSt8_Rb_treeIPKN15tracked_objects6BirthsESt4pairIKS3_NS0_9DeathDataEESt10_Select1stIS7_ESt4lessIS3_ESaIS7_EE16_M_insert_uniqueESt17_Rb_tree_iteratorIS7_ERKS7_
+ fun:_ZNSt3mapIPKN15tracked_objects6BirthsENS0_9DeathDataESt4lessIS3_ESaISt4pairIKS3_S4_EEE6insertESt17_Rb_tree_iteratorIS9_ERKS9_
+ fun:_ZNSt3mapIPKN15tracked_objects6BirthsENS0_9DeathDataESt4lessIS3_ESaISt4pairIKS3_S4_EEEixERS8_
+ fun:_ZN15tracked_objects10ThreadData11TallyADeathERKNS_6BirthsEiRKNS_13TaskStopwatchE
+ fun:_ZN15tracked_objects10ThreadData31TallyRunOnNamedThreadIfTrackingERKN4base12TrackingInfoERKNS_13TaskStopwatchE
+ fun:_ZN4base5debug13TaskAnnotator7RunTaskEPKcS3_RKNS_11PendingTaskE
+ fun:_ZN4base11MessageLoop7RunTaskERKNS_11PendingTaskE
+ fun:_ZN4base11MessageLoop21DeferOrRunPendingTaskERKNS_11PendingTaskE
+ fun:_ZN4base11MessageLoop6DoWorkEv
+}
+
diff --git a/tools/valgrind/memcheck_analyze.py b/tools/valgrind/memcheck_analyze.py
new file mode 100755
index 0000000..ceeedef
--- /dev/null
+++ b/tools/valgrind/memcheck_analyze.py
@@ -0,0 +1,635 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# memcheck_analyze.py
+
+''' Given a valgrind XML file, parses errors and uniques them.'''
+
+import gdb_helper
+
+from collections import defaultdict
+import hashlib
+import logging
+import optparse
+import os
+import re
+import subprocess
+import sys
+import time
+from xml.dom.minidom import parse
+from xml.parsers.expat import ExpatError
+
+import common
+
+# Global symbol table (yuck)
+TheAddressTable = None
+
+# These are regexps that define functions (using C++ mangled names)
+# we don't want to see in stack traces while pretty printing
+# or generating suppressions.
+# Just stop printing the stack/suppression frames when the current one
+# matches any of these.
+_BORING_CALLERS = common.BoringCallers(mangled=True, use_re_wildcards=True)
+
+def getTextOf(top_node, name):
+ ''' Returns all text in all DOM nodes with a certain |name| that are children
+ of |top_node|.
+ '''
+
+ text = ""
+ for nodes_named in top_node.getElementsByTagName(name):
+ text += "".join([node.data for node in nodes_named.childNodes
+ if node.nodeType == node.TEXT_NODE])
+ return text
+
+def getCDATAOf(top_node, name):
+ ''' Returns all CDATA in all DOM nodes with a certain |name| that are children
+ of |top_node|.
+ '''
+
+ text = ""
+ for nodes_named in top_node.getElementsByTagName(name):
+ text += "".join([node.data for node in nodes_named.childNodes
+ if node.nodeType == node.CDATA_SECTION_NODE])
+ if (text == ""):
+ return None
+ return text
+
+def shortenFilePath(source_dir, directory):
+ '''Returns a string with the string prefix |source_dir| removed from
+ |directory|.'''
+ prefixes_to_cut = ["build/src/", "valgrind/coregrind/", "out/Release/../../"]
+
+ if source_dir:
+ prefixes_to_cut.append(source_dir)
+
+ for p in prefixes_to_cut:
+ index = directory.rfind(p)
+ if index != -1:
+ directory = directory[index + len(p):]
+
+ return directory
+
+# Constants that give real names to the abbreviations in valgrind XML output.
+INSTRUCTION_POINTER = "ip"
+OBJECT_FILE = "obj"
+FUNCTION_NAME = "fn"
+SRC_FILE_DIR = "dir"
+SRC_FILE_NAME = "file"
+SRC_LINE = "line"
+
+def gatherFrames(node, source_dir):
+ frames = []
+ for frame in node.getElementsByTagName("frame"):
+ frame_dict = {
+ INSTRUCTION_POINTER : getTextOf(frame, INSTRUCTION_POINTER),
+ OBJECT_FILE : getTextOf(frame, OBJECT_FILE),
+ FUNCTION_NAME : getTextOf(frame, FUNCTION_NAME),
+ SRC_FILE_DIR : shortenFilePath(
+ source_dir, getTextOf(frame, SRC_FILE_DIR)),
+ SRC_FILE_NAME : getTextOf(frame, SRC_FILE_NAME),
+ SRC_LINE : getTextOf(frame, SRC_LINE)
+ }
+
+ # Ignore this frame and all the following if it's a "boring" function.
+ enough_frames = False
+ for regexp in _BORING_CALLERS:
+ if re.match("^%s$" % regexp, frame_dict[FUNCTION_NAME]):
+ enough_frames = True
+ break
+ if enough_frames:
+ break
+
+ frames += [frame_dict]
+
+ global TheAddressTable
+ if TheAddressTable != None and frame_dict[SRC_LINE] == "":
+ # Try using gdb
+ TheAddressTable.Add(frame_dict[OBJECT_FILE],
+ frame_dict[INSTRUCTION_POINTER])
+ return frames
+
+class ValgrindError:
+ ''' Takes a <DOM Element: error> node and reads all the data from it. A
+ ValgrindError is immutable and is hashed on its pretty printed output.
+ '''
+
+ def __init__(self, source_dir, error_node, commandline, testcase):
+ ''' Copies all the relevant information out of the DOM and into object
+ properties.
+
+ Args:
+ error_node: The <error></error> DOM node we're extracting from.
+ source_dir: Prefix that should be stripped from the <dir> node.
+ commandline: The command that was run under valgrind
+ testcase: The test case name, if known.
+ '''
+
+ # Valgrind errors contain one <what><stack> pair, plus an optional
+ # <auxwhat><stack> pair, plus an optional <origin><what><stack></origin>,
+ # plus (since 3.5.0) a <suppression></suppression> pair.
+ # (Origin is nicely enclosed; too bad the other two aren't.)
+ # The most common way to see all three in one report is
+ # a syscall with a parameter that points to uninitialized memory, e.g.
+ # Format:
+ # <error>
+ # <unique>0x6d</unique>
+ # <tid>1</tid>
+ # <kind>SyscallParam</kind>
+ # <what>Syscall param write(buf) points to uninitialised byte(s)</what>
+ # <stack>
+ # <frame>
+ # ...
+ # </frame>
+ # </stack>
+ # <auxwhat>Address 0x5c9af4f is 7 bytes inside a block of ...</auxwhat>
+ # <stack>
+ # <frame>
+ # ...
+ # </frame>
+ # </stack>
+ # <origin>
+ # <what>Uninitialised value was created by a heap allocation</what>
+ # <stack>
+ # <frame>
+ # ...
+ # </frame>
+ # </stack>
+ # </origin>
+ # <suppression>
+ # <sname>insert_a_suppression_name_here</sname>
+ # <skind>Memcheck:Param</skind>
+ # <skaux>write(buf)</skaux>
+ # <sframe> <fun>__write_nocancel</fun> </sframe>
+ # ...
+ # <sframe> <fun>main</fun> </sframe>
+ # <rawtext>
+ # <![CDATA[
+ # {
+ # <insert_a_suppression_name_here>
+ # Memcheck:Param
+ # write(buf)
+ # fun:__write_nocancel
+ # ...
+ # fun:main
+ # }
+ # ]]>
+ # </rawtext>
+ # </suppression>
+ # </error>
+ #
+ # Each frame looks like this:
+ # <frame>
+ # <ip>0x83751BC</ip>
+ # <obj>/data/dkegel/chrome-build/src/out/Release/base_unittests</obj>
+ # <fn>_ZN7testing8internal12TestInfoImpl7RunTestEPNS_8TestInfoE</fn>
+ # <dir>/data/dkegel/chrome-build/src/testing/gtest/src</dir>
+ # <file>gtest-internal-inl.h</file>
+ # <line>655</line>
+ # </frame>
+ # although the dir, file, and line elements are missing if there is
+ # no debug info.
+
+ self._kind = getTextOf(error_node, "kind")
+ self._backtraces = []
+ self._suppression = None
+ self._commandline = commandline
+ self._testcase = testcase
+ self._additional = []
+
+ # Iterate through the nodes, parsing <what|auxwhat><stack> pairs.
+ description = None
+ for node in error_node.childNodes:
+ if node.localName == "what" or node.localName == "auxwhat":
+ description = "".join([n.data for n in node.childNodes
+ if n.nodeType == n.TEXT_NODE])
+ elif node.localName == "xwhat":
+ description = getTextOf(node, "text")
+ elif node.localName == "stack":
+ assert description
+ self._backtraces.append([description, gatherFrames(node, source_dir)])
+ description = None
+ elif node.localName == "origin":
+ description = getTextOf(node, "what")
+ stack = node.getElementsByTagName("stack")[0]
+ frames = gatherFrames(stack, source_dir)
+ self._backtraces.append([description, frames])
+ description = None
+ stack = None
+ frames = None
+ elif description and node.localName != None:
+ # The lastest description has no stack, e.g. "Address 0x28 is unknown"
+ self._additional.append(description)
+ description = None
+
+ if node.localName == "suppression":
+ self._suppression = getCDATAOf(node, "rawtext");
+
+ def __str__(self):
+ ''' Pretty print the type and backtrace(s) of this specific error,
+ including suppression (which is just a mangled backtrace).'''
+ output = ""
+ if (self._commandline):
+ output += self._commandline + "\n"
+
+ output += self._kind + "\n"
+ for backtrace in self._backtraces:
+ output += backtrace[0] + "\n"
+ filter = subprocess.Popen("c++filt -n", stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ shell=True,
+ close_fds=True)
+ buf = ""
+ for frame in backtrace[1]:
+ buf += (frame[FUNCTION_NAME] or frame[INSTRUCTION_POINTER]) + "\n"
+ (stdoutbuf, stderrbuf) = filter.communicate(buf.encode('latin-1'))
+ demangled_names = stdoutbuf.split("\n")
+
+ i = 0
+ for frame in backtrace[1]:
+ output += (" " + demangled_names[i])
+ i = i + 1
+
+ global TheAddressTable
+ if TheAddressTable != None and frame[SRC_FILE_DIR] == "":
+ # Try using gdb
+ foo = TheAddressTable.GetFileLine(frame[OBJECT_FILE],
+ frame[INSTRUCTION_POINTER])
+ if foo[0] != None:
+ output += (" (" + foo[0] + ":" + foo[1] + ")")
+ elif frame[SRC_FILE_DIR] != "":
+ output += (" (" + frame[SRC_FILE_DIR] + "/" + frame[SRC_FILE_NAME] +
+ ":" + frame[SRC_LINE] + ")")
+ else:
+ output += " (" + frame[OBJECT_FILE] + ")"
+ output += "\n"
+
+ for additional in self._additional:
+ output += additional + "\n"
+
+ assert self._suppression != None, "Your Valgrind doesn't generate " \
+ "suppressions - is it too old?"
+
+ if self._testcase:
+ output += "The report came from the `%s` test.\n" % self._testcase
+ output += "Suppression (error hash=#%016X#):\n" % self.ErrorHash()
+ output += (" For more info on using suppressions see "
+ "http://dev.chromium.org/developers/tree-sheriffs/sheriff-details-chromium/memory-sheriff#TOC-Suppressing-memory-reports")
+
+ # Widen suppression slightly to make portable between mac and linux
+ # TODO(timurrrr): Oops, these transformations should happen
+ # BEFORE calculating the hash!
+ supp = self._suppression;
+ supp = supp.replace("fun:_Znwj", "fun:_Znw*")
+ supp = supp.replace("fun:_Znwm", "fun:_Znw*")
+ supp = supp.replace("fun:_Znaj", "fun:_Zna*")
+ supp = supp.replace("fun:_Znam", "fun:_Zna*")
+
+ # Make suppressions even less platform-dependent.
+ for sz in [1, 2, 4, 8]:
+ supp = supp.replace("Memcheck:Addr%d" % sz, "Memcheck:Unaddressable")
+ supp = supp.replace("Memcheck:Value%d" % sz, "Memcheck:Uninitialized")
+ supp = supp.replace("Memcheck:Cond", "Memcheck:Uninitialized")
+
+ # Split into lines so we can enforce length limits
+ supplines = supp.split("\n")
+ supp = None # to avoid re-use
+
+ # Truncate at line 26 (VG_MAX_SUPP_CALLERS plus 2 for name and type)
+ # or at the first 'boring' caller.
+ # (https://bugs.kde.org/show_bug.cgi?id=199468 proposes raising
+ # VG_MAX_SUPP_CALLERS, but we're probably fine with it as is.)
+ newlen = min(26, len(supplines));
+
+ # Drop boring frames and all the following.
+ enough_frames = False
+ for frameno in range(newlen):
+ for boring_caller in _BORING_CALLERS:
+ if re.match("^ +fun:%s$" % boring_caller, supplines[frameno]):
+ newlen = frameno
+ enough_frames = True
+ break
+ if enough_frames:
+ break
+ if (len(supplines) > newlen):
+ supplines = supplines[0:newlen]
+ supplines.append("}")
+
+ for frame in range(len(supplines)):
+ # Replace the always-changing anonymous namespace prefix with "*".
+ m = re.match("( +fun:)_ZN.*_GLOBAL__N_.*\.cc_" +
+ "[0-9a-fA-F]{8}_[0-9a-fA-F]{8}(.*)",
+ supplines[frame])
+ if m:
+ supplines[frame] = "*".join(m.groups())
+
+ output += "\n".join(supplines) + "\n"
+
+ return output
+
+ def UniqueString(self):
+ ''' String to use for object identity. Don't print this, use str(obj)
+ instead.'''
+ rep = self._kind + " "
+ for backtrace in self._backtraces:
+ for frame in backtrace[1]:
+ rep += frame[FUNCTION_NAME]
+
+ if frame[SRC_FILE_DIR] != "":
+ rep += frame[SRC_FILE_DIR] + "/" + frame[SRC_FILE_NAME]
+ else:
+ rep += frame[OBJECT_FILE]
+
+ return rep
+
+ # This is a device-independent hash identifying the suppression.
+ # By printing out this hash we can find duplicate reports between tests and
+ # different shards running on multiple buildbots
+ def ErrorHash(self):
+ return int(hashlib.md5(self.UniqueString()).hexdigest()[:16], 16)
+
+ def __hash__(self):
+ return hash(self.UniqueString())
+ def __eq__(self, rhs):
+ return self.UniqueString() == rhs
+
+def log_is_finished(f, force_finish):
+ f.seek(0)
+ prev_line = ""
+ while True:
+ line = f.readline()
+ if line == "":
+ if not force_finish:
+ return False
+ # Okay, the log is not finished but we can make it up to be parseable:
+ if prev_line.strip() in ["</error>", "</errorcounts>", "</status>"]:
+ f.write("</valgrindoutput>\n")
+ return True
+ return False
+ if '</valgrindoutput>' in line:
+ # Valgrind often has garbage after </valgrindoutput> upon crash.
+ f.truncate()
+ return True
+ prev_line = line
+
+class MemcheckAnalyzer:
+ ''' Given a set of Valgrind XML files, parse all the errors out of them,
+ unique them and output the results.'''
+
+ SANITY_TEST_SUPPRESSIONS = {
+ "Memcheck sanity test 01 (memory leak).": 1,
+ "Memcheck sanity test 02 (malloc/read left).": 1,
+ "Memcheck sanity test 03 (malloc/read right).": 1,
+ "Memcheck sanity test 04 (malloc/write left).": 1,
+ "Memcheck sanity test 05 (malloc/write right).": 1,
+ "Memcheck sanity test 06 (new/read left).": 1,
+ "Memcheck sanity test 07 (new/read right).": 1,
+ "Memcheck sanity test 08 (new/write left).": 1,
+ "Memcheck sanity test 09 (new/write right).": 1,
+ "Memcheck sanity test 10 (write after free).": 1,
+ "Memcheck sanity test 11 (write after delete).": 1,
+ "Memcheck sanity test 12 (array deleted without []).": 1,
+ "Memcheck sanity test 13 (single element deleted with []).": 1,
+ "Memcheck sanity test 14 (malloc/read uninit).": 1,
+ "Memcheck sanity test 15 (new/read uninit).": 1,
+ }
+
+ # Max time to wait for memcheck logs to complete.
+ LOG_COMPLETION_TIMEOUT = 180.0
+
+ def __init__(self, source_dir, show_all_leaks=False, use_gdb=False):
+ '''Create a parser for Memcheck logs.
+
+ Args:
+ source_dir: Path to top of source tree for this build
+ show_all_leaks: Whether to show even less important leaks
+ use_gdb: Whether to use gdb to resolve source filenames and line numbers
+ in the report stacktraces
+ '''
+ self._source_dir = source_dir
+ self._show_all_leaks = show_all_leaks
+ self._use_gdb = use_gdb
+
+ # Contains the set of unique errors
+ self._errors = set()
+
+ # Contains the time when the we started analyzing the first log file.
+ # This variable is used to skip incomplete logs after some timeout.
+ self._analyze_start_time = None
+
+
+ def Report(self, files, testcase, check_sanity=False):
+ '''Reads in a set of files and prints Memcheck report.
+
+ Args:
+ files: A list of filenames.
+ check_sanity: if true, search for SANITY_TEST_SUPPRESSIONS
+ '''
+ # Beyond the detailed errors parsed by ValgrindError above,
+ # the xml file contain records describing suppressions that were used:
+ # <suppcounts>
+ # <pair>
+ # <count>28</count>
+ # <name>pango_font_leak_todo</name>
+ # </pair>
+ # <pair>
+ # <count>378</count>
+ # <name>bug_13243</name>
+ # </pair>
+ # </suppcounts
+ # Collect these and print them at the end.
+ #
+ # With our patch for https://bugs.kde.org/show_bug.cgi?id=205000 in,
+ # the file also includes records of the form
+ # <load_obj><obj>/usr/lib/libgcc_s.1.dylib</obj><ip>0x27000</ip></load_obj>
+ # giving the filename and load address of each binary that was mapped
+ # into the process.
+
+ global TheAddressTable
+ if self._use_gdb:
+ TheAddressTable = gdb_helper.AddressTable()
+ else:
+ TheAddressTable = None
+ cur_report_errors = set()
+ suppcounts = defaultdict(int)
+ badfiles = set()
+
+ if self._analyze_start_time == None:
+ self._analyze_start_time = time.time()
+ start_time = self._analyze_start_time
+
+ parse_failed = False
+ for file in files:
+ # Wait up to three minutes for valgrind to finish writing all files,
+ # but after that, just skip incomplete files and warn.
+ f = open(file, "r+")
+ pid = re.match(".*\.([0-9]+)$", file)
+ if pid:
+ pid = pid.groups()[0]
+ found = False
+ running = True
+ firstrun = True
+ skip = False
+ origsize = os.path.getsize(file)
+ while (running and not found and not skip and
+ (firstrun or
+ ((time.time() - start_time) < self.LOG_COMPLETION_TIMEOUT))):
+ firstrun = False
+ f.seek(0)
+ if pid:
+ # Make sure the process is still running so we don't wait for
+ # 3 minutes if it was killed. See http://crbug.com/17453
+ ps_out = subprocess.Popen("ps p %s" % pid, shell=True,
+ stdout=subprocess.PIPE).stdout
+ if len(ps_out.readlines()) < 2:
+ running = False
+ else:
+ skip = True
+ running = False
+ found = log_is_finished(f, False)
+ if not running and not found:
+ logging.warn("Valgrind process PID = %s is not running but its "
+ "XML log has not been finished correctly.\n"
+ "Make it up by adding some closing tags manually." % pid)
+ found = log_is_finished(f, not running)
+ if running and not found:
+ time.sleep(1)
+ f.close()
+ if not found:
+ badfiles.add(file)
+ else:
+ newsize = os.path.getsize(file)
+ if origsize > newsize+1:
+ logging.warn(str(origsize - newsize) +
+ " bytes of junk were after </valgrindoutput> in %s!" %
+ file)
+ try:
+ parsed_file = parse(file);
+ except ExpatError, e:
+ parse_failed = True
+ logging.warn("could not parse %s: %s" % (file, e))
+ lineno = e.lineno - 1
+ context_lines = 5
+ context_start = max(0, lineno - context_lines)
+ context_end = lineno + context_lines + 1
+ context_file = open(file, "r")
+ for i in range(0, context_start):
+ context_file.readline()
+ for i in range(context_start, context_end):
+ context_data = context_file.readline().rstrip()
+ if i != lineno:
+ logging.warn(" %s" % context_data)
+ else:
+ logging.warn("> %s" % context_data)
+ context_file.close()
+ continue
+ if TheAddressTable != None:
+ load_objs = parsed_file.getElementsByTagName("load_obj")
+ for load_obj in load_objs:
+ obj = getTextOf(load_obj, "obj")
+ ip = getTextOf(load_obj, "ip")
+ TheAddressTable.AddBinaryAt(obj, ip)
+
+ commandline = None
+ preamble = parsed_file.getElementsByTagName("preamble")[0];
+ for node in preamble.getElementsByTagName("line"):
+ if node.localName == "line":
+ for x in node.childNodes:
+ if x.nodeType == node.TEXT_NODE and "Command" in x.data:
+ commandline = x.data
+ break
+
+ raw_errors = parsed_file.getElementsByTagName("error")
+ for raw_error in raw_errors:
+ # Ignore "possible" leaks for now by default.
+ if (self._show_all_leaks or
+ getTextOf(raw_error, "kind") != "Leak_PossiblyLost"):
+ error = ValgrindError(self._source_dir,
+ raw_error, commandline, testcase)
+ if error not in cur_report_errors:
+ # We haven't seen such errors doing this report yet...
+ if error in self._errors:
+ # ... but we saw it in earlier reports, e.g. previous UI test
+ cur_report_errors.add("This error was already printed in "
+ "some other test, see 'hash=#%016X#'" % \
+ error.ErrorHash())
+ else:
+ # ... and we haven't seen it in other tests as well
+ self._errors.add(error)
+ cur_report_errors.add(error)
+
+ suppcountlist = parsed_file.getElementsByTagName("suppcounts")
+ if len(suppcountlist) > 0:
+ suppcountlist = suppcountlist[0]
+ for node in suppcountlist.getElementsByTagName("pair"):
+ count = getTextOf(node, "count");
+ name = getTextOf(node, "name");
+ suppcounts[name] += int(count)
+
+ if len(badfiles) > 0:
+ logging.warn("valgrind didn't finish writing %d files?!" % len(badfiles))
+ for file in badfiles:
+ logging.warn("Last 20 lines of %s :" % file)
+ os.system("tail -n 20 '%s' 1>&2" % file)
+
+ if parse_failed:
+ logging.error("FAIL! Couldn't parse Valgrind output file")
+ return -2
+
+ common.PrintUsedSuppressionsList(suppcounts)
+
+ retcode = 0
+ if cur_report_errors:
+ logging.error("FAIL! There were %s errors: " % len(cur_report_errors))
+
+ if TheAddressTable != None:
+ TheAddressTable.ResolveAll()
+
+ for error in cur_report_errors:
+ logging.error(error)
+
+ retcode = -1
+
+ # Report tool's insanity even if there were errors.
+ if check_sanity:
+ remaining_sanity_supp = MemcheckAnalyzer.SANITY_TEST_SUPPRESSIONS
+ for (name, count) in suppcounts.iteritems():
+ # Workaround for http://crbug.com/334074
+ if (name in remaining_sanity_supp and
+ remaining_sanity_supp[name] <= count):
+ del remaining_sanity_supp[name]
+ if remaining_sanity_supp:
+ logging.error("FAIL! Sanity check failed!")
+ logging.info("The following test errors were not handled: ")
+ for (name, count) in remaining_sanity_supp.iteritems():
+ logging.info(" * %dx %s" % (count, name))
+ retcode = -3
+
+ if retcode != 0:
+ return retcode
+
+ logging.info("PASS! No errors found!")
+ return 0
+
+
+def _main():
+ '''For testing only. The MemcheckAnalyzer class should be imported instead.'''
+ parser = optparse.OptionParser("usage: %prog [options] <files to analyze>")
+ parser.add_option("", "--source-dir",
+ help="path to top of source tree for this build"
+ "(used to normalize source paths in baseline)")
+
+ (options, args) = parser.parse_args()
+ if len(args) == 0:
+ parser.error("no filename specified")
+ filenames = args
+
+ analyzer = MemcheckAnalyzer(options.source_dir, use_gdb=True)
+ return analyzer.Report(filenames, None)
+
+
+if __name__ == "__main__":
+ sys.exit(_main())
diff --git a/tools/valgrind/regrind.sh b/tools/valgrind/regrind.sh
new file mode 100755
index 0000000..0f90ba7
--- /dev/null
+++ b/tools/valgrind/regrind.sh
@@ -0,0 +1,138 @@
+#!/bin/sh
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Scape errors from the valgrind bots, reproduce them locally,
+# save logs as regrind-TESTNAME.log, and display any errors found.
+# Also save files regrind-failed.txt listing failed tests,
+# and regrind-failed-map.txt showing which bot URLs have which failed tests
+# (handy when filing bugs).
+#
+# Only scrapes linux layout bot at the moment.
+# TODO: handle layout tests that don't have obvious path to test file
+# TODO: extend script to handle more kinds of errors and more tests
+
+# where the valgrind layout bot results live
+LAYOUT_URL="http://build.chromium.org/p/chromium.memory.fyi/builders/Webkit%20Linux%20(valgrind%20layout)"
+# how many builds back to check
+LAYOUT_COUNT=250
+
+# regexp to match valgrind errors
+PATTERN="are definitely|uninitialised|Unhandled exception|\
+Invalid read|Invalid write|Invalid free|Source and desti|Mismatched free|\
+unaddressable byte|vex x86|the 'impossible' happened|\
+valgrind:.*: Assertion.*failed|VALGRIND INTERNAL ERROR"
+
+usage() {
+ echo "Usage: regrind.sh [--noscrape][--norepro][--keep]"
+ echo "--noscrape: don't scrape bots, just use old regrind-failed.txt"
+ echo "--norepro: don't reproduce locally"
+ echo "--keep: keep temp files"
+ exit 1
+}
+
+# Given a log on stdin, list all the tests that failed in that log.
+layout_list_failed_tests() {
+ grep "Command:.*LayoutTests" |
+ sed 's/<.*>//' |
+ sed 's/.*LayoutTests/LayoutTests/' |
+ sort -u |
+ tr -d '\015'
+}
+
+# Generate a list of failed tests in regrind-failed.txt by scraping bot.
+# Scrape most recent first, so if user interrupts, he is left with fresh-ish data.
+scrape_layout() {
+ rm -f regrind-*.tmp* regrind-failed.txt regrind-failed-map.txt
+ touch regrind-failed.txt
+
+ # First, grab the number of the latest complete build.
+ wget -q -O regrind-builds.html "$LAYOUT_URL"
+ latest=`grep "<li><font .*" < regrind-builds.html | head -1 | sed 's/.*#//;s/<.*//'`
+
+ echo "Fetching $LAYOUT_COUNT logs from bot"
+ # Scrape the desired number of runs (150 is about one cycle)
+ first=`expr $latest - $LAYOUT_COUNT`
+ i=$latest
+ while test $i -ge $first
+ do
+ url="$LAYOUT_URL/builds/$i/steps/valgrind%20test:%20layout/logs/stdio"
+ wget -q -O regrind-$i.tmp "$url"
+ # Did any tests fail in this file?
+ layout_list_failed_tests < regrind-$i.tmp > regrind-$i.tmp.failed
+ if test -s regrind-$i.tmp.failed
+ then
+ # Yes. Log them to stdout,
+ echo "$url"
+ cat regrind-$i.tmp.failed
+ # to the table regrind-failed-map.txt,
+ cat regrind-$i.tmp.failed | sed "s,^,$url ," >> regrind-failed-map.txt
+ # and, if not already there, to regrind-failed.txt.
+ for test in `cat regrind-$i.tmp.failed`
+ do
+ fgrep "$test" regrind-failed.txt > /dev/null 2>&1 || echo "$test" >> regrind-failed.txt
+ done
+ else
+ rm regrind-$i.tmp.failed
+ fi
+ # Sleep 1/3 sec per fetch
+ case $i in
+ *[036]) sleep 1;;
+ esac
+ i=`expr $i - 1`
+ done
+
+ # Finally, munge the logs to identify tests that probably failed.
+ sh c.sh -l regrind-*.tmp > regrind-errfiles.txt
+ cat `cat regrind-errfiles.txt` | layout_list_failed_tests > regrind-failed.txt
+}
+
+# Run the tests identified in regrind-failed.txt locally under valgrind.
+# Save logs in regrind-$TESTNAME.log.
+repro_layout() {
+ echo Running `wc -l < regrind-failed.txt` layout tests.
+ for test in `cat regrind-failed.txt`
+ do
+ logname="`echo $test | tr / _`"
+ echo "sh tools/valgrind/valgrind_webkit_tests.sh $test"
+ sh tools/valgrind/valgrind_webkit_tests.sh "$test" > regrind-"$logname".log 2>&1
+ egrep "$PATTERN" < regrind-"$logname".log | sed 's/==.*==//'
+ done
+}
+
+do_repro=1
+do_scrape=1
+do_cleanup=1
+while test ! -z "$1"
+do
+ case "$1" in
+ --noscrape) do_scrape=0;;
+ --norepro) do_repro=0;;
+ --keep) do_cleanup=0;;
+ *) usage;;
+ esac
+ shift
+done
+
+echo "WARNING: This script is not supported and may be out of date"
+
+if test $do_scrape = 0 && test $do_repro = 0
+then
+ usage
+fi
+
+if test $do_scrape = 1
+then
+ scrape_layout
+fi
+
+if test $do_repro = 1
+then
+ repro_layout
+fi
+
+if test $do_cleanup = 1
+then
+ rm -f regrind-errfiles.txt regrind-*.tmp*
+fi
diff --git a/tools/valgrind/reliability/url_list.txt b/tools/valgrind/reliability/url_list.txt
new file mode 100644
index 0000000..ac53122
--- /dev/null
+++ b/tools/valgrind/reliability/url_list.txt
@@ -0,0 +1,11 @@
+www.google.com
+maps.google.com
+news.google.com
+www.youtube.com
+build.chromium.org/p/chromium/waterfall
+build.chromium.org/p/chromium.memory/console
+build.chromium.org/f/chromium/perf/dashboard/overview.html
+www.slashdot.org
+www.ibanez.co.jp/japan/index.html
+www.bbc.co.uk/arabic/
+www.uni.edu/becker/chinese2.html
diff --git a/tools/valgrind/scan-build.py b/tools/valgrind/scan-build.py
new file mode 100755
index 0000000..b58b6cc
--- /dev/null
+++ b/tools/valgrind/scan-build.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import errno
+import os
+import re
+import sys
+import urllib
+import urllib2
+
+# Where all the data lives.
+ROOT_URL = "http://build.chromium.org/p/chromium.memory.fyi/builders"
+
+# TODO(groby) - support multi-line search from the command line. Useful when
+# scanning for classes of failures, see below.
+SEARCH_STRING = """<p class=\"failure result\">
+Failed memory test: content
+</p>"""
+
+# Location of the log cache.
+CACHE_DIR = "buildlogs.tmp"
+
+# If we don't find anything after searching |CUTOFF| logs, we're probably done.
+CUTOFF = 100
+
+def EnsurePath(path):
+ """Makes sure |path| does exist, tries to create it if it doesn't."""
+ try:
+ os.makedirs(path)
+ except OSError as exception:
+ if exception.errno != errno.EEXIST:
+ raise
+
+
+class Cache(object):
+ def __init__(self, root_dir):
+ self._root_dir = os.path.abspath(root_dir)
+
+ def _LocalName(self, name):
+ """If name is a relative path, treat it as relative to cache root.
+ If it is absolute and under cache root, pass it through.
+ Otherwise, raise error.
+ """
+ if os.path.isabs(name):
+ assert os.path.commonprefix([name, self._root_dir]) == self._root_dir
+ else:
+ name = os.path.join(self._root_dir, name)
+ return name
+
+ def _FetchLocal(self, local_name):
+ local_name = self._LocalName(local_name)
+ EnsurePath(os.path.dirname(local_name))
+ if os.path.exists(local_name):
+ f = open(local_name, 'r')
+ return f.readlines();
+ return None
+
+ def _FetchRemote(self, remote_name):
+ try:
+ response = urllib2.urlopen(remote_name)
+ except:
+ print "Could not fetch", remote_name
+ raise
+ return response.read()
+
+ def Update(self, local_name, remote_name):
+ local_name = self._LocalName(local_name)
+ EnsurePath(os.path.dirname(local_name))
+ blob = self._FetchRemote(remote_name)
+ f = open(local_name, "w")
+ f.write(blob)
+ return blob.splitlines()
+
+ def FetchData(self, local_name, remote_name):
+ result = self._FetchLocal(local_name)
+ if result:
+ return result
+ # If we get here, the local cache does not exist yet. Fetch, and store.
+ return self.Update(local_name, remote_name)
+
+
+class Builder(object):
+ def __init__(self, waterfall, name):
+ self._name = name
+ self._waterfall = waterfall
+
+ def Name(self):
+ return self._name
+
+ def LatestBuild(self):
+ return self._waterfall.GetLatestBuild(self._name)
+
+ def GetBuildPath(self, build_num):
+ return "%s/%s/builds/%d" % (
+ self._waterfall._root_url, urllib.quote(self._name), build_num)
+
+ def _FetchBuildLog(self, build_num):
+ local_build_path = "builds/%s" % self._name
+ local_build_file = os.path.join(local_build_path, "%d.log" % build_num)
+ return self._waterfall._cache.FetchData(local_build_file,
+ self.GetBuildPath(build_num))
+
+ def _CheckLog(self, build_num, tester):
+ log_lines = self._FetchBuildLog(build_num)
+ return any(tester(line) for line in log_lines)
+
+ def ScanLogs(self, tester):
+ occurrences = []
+ build = self.LatestBuild()
+ no_results = 0
+ while build != 0 and no_results < CUTOFF:
+ if self._CheckLog(build, tester):
+ occurrences.append(build)
+ else:
+ no_results = no_results + 1
+ build = build - 1
+ return occurrences
+
+
+class Waterfall(object):
+ def __init__(self, root_url, cache_dir):
+ self._root_url = root_url
+ self._builders = {}
+ self._top_revision = {}
+ self._cache = Cache(cache_dir)
+
+ def Builders(self):
+ return self._builders.values()
+
+ def Update(self):
+ self._cache.Update("builders", self._root_url)
+ self.FetchInfo()
+
+ def FetchInfo(self):
+ if self._top_revision:
+ return
+
+ html = self._cache.FetchData("builders", self._root_url)
+
+ """ Search for both builders and latest build number in HTML
+ <td class="box"><a href="builders/<builder-name>"> identifies a builder
+ <a href="builders/<builder-name>/builds/<build-num>"> is the latest build.
+ """
+ box_matcher = re.compile('.*a href[^>]*>([^<]*)\<')
+ build_matcher = re.compile('.*a href=\"builders/(.*)/builds/([0-9]+)\".*')
+ last_builder = ""
+ for line in html:
+ if 'a href="builders/' in line:
+ if 'td class="box"' in line:
+ last_builder = box_matcher.match(line).group(1)
+ self._builders[last_builder] = Builder(self, last_builder)
+ else:
+ result = build_matcher.match(line)
+ builder = result.group(1)
+ assert builder == urllib.quote(last_builder)
+ self._top_revision[last_builder] = int(result.group(2))
+
+ def GetLatestBuild(self, name):
+ self.FetchInfo()
+ assert self._top_revision
+ return self._top_revision[name]
+
+
+class MultiLineChange(object):
+ def __init__(self, lines):
+ self._tracked_lines = lines
+ self._current = 0
+
+ def __call__(self, line):
+ """ Test a single line against multi-line change.
+
+ If it matches the currently active line, advance one line.
+ If the current line is the last line, report a match.
+ """
+ if self._tracked_lines[self._current] in line:
+ self._current = self._current + 1
+ if self._current == len(self._tracked_lines):
+ self._current = 0
+ return True
+ else:
+ self._current = 0
+ return False
+
+
+def main(argv):
+ # Create argument parser.
+ parser = argparse.ArgumentParser()
+ commands = parser.add_mutually_exclusive_group(required=True)
+ commands.add_argument("--update", action='store_true')
+ commands.add_argument("--find", metavar='search term')
+ args = parser.parse_args()
+
+ path = os.path.abspath(os.path.dirname(argv[0]))
+ cache_path = os.path.join(path, CACHE_DIR)
+
+ fyi = Waterfall(ROOT_URL, cache_path)
+
+ if args.update:
+ fyi.Update()
+ for builder in fyi.Builders():
+ print "Updating", builder.Name()
+ builder.ScanLogs(lambda x:False)
+
+ if args.find:
+ tester = MultiLineChange(args.find.splitlines())
+ fyi.FetchInfo()
+
+ print "SCANNING FOR ", args.find
+ for builder in fyi.Builders():
+ print "Scanning", builder.Name()
+ occurrences = builder.ScanLogs(tester)
+ if occurrences:
+ min_build = min(occurrences)
+ path = builder.GetBuildPath(min_build)
+ print "Earliest occurrence in build %d" % min_build
+ print "Latest occurrence in build %d" % max(occurrences)
+ print "Latest build: %d" % builder.LatestBuild()
+ print path
+ print "%d total" % len(occurrences)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
+
diff --git a/tools/valgrind/suppressions.py b/tools/valgrind/suppressions.py
new file mode 100755
index 0000000..c655e12
--- /dev/null
+++ b/tools/valgrind/suppressions.py
@@ -0,0 +1,989 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# suppressions.py
+
+"""Post-process Valgrind suppression matcher.
+
+Suppressions are defined as follows:
+
+# optional one-line comments anywhere in the suppressions file.
+{
+ <Short description of the error>
+ Toolname:Errortype
+ fun:function_name
+ obj:object_filename
+ fun:wildcarded_fun*_name
+ # an ellipsis wildcards zero or more functions in a stack.
+ ...
+ fun:some_other_function_name
+}
+
+If ran from the command line, suppressions.py does a self-test
+of the Suppression class.
+"""
+
+import os
+import re
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__),
+ '..', 'python', 'google'))
+import path_utils
+
+
+ELLIPSIS = '...'
+
+
+def GetSuppressions():
+ suppressions_root = path_utils.ScriptDir()
+ JOIN = os.path.join
+
+ result = {}
+
+ supp_filename = JOIN(suppressions_root, "memcheck", "suppressions.txt")
+ vg_common = ReadSuppressionsFromFile(supp_filename)
+ supp_filename = JOIN(suppressions_root, "tsan", "suppressions.txt")
+ tsan_common = ReadSuppressionsFromFile(supp_filename)
+ result['common_suppressions'] = vg_common + tsan_common
+
+ supp_filename = JOIN(suppressions_root, "memcheck", "suppressions_linux.txt")
+ vg_linux = ReadSuppressionsFromFile(supp_filename)
+ supp_filename = JOIN(suppressions_root, "tsan", "suppressions_linux.txt")
+ tsan_linux = ReadSuppressionsFromFile(supp_filename)
+ result['linux_suppressions'] = vg_linux + tsan_linux
+
+ supp_filename = JOIN(suppressions_root, "memcheck", "suppressions_mac.txt")
+ vg_mac = ReadSuppressionsFromFile(supp_filename)
+ supp_filename = JOIN(suppressions_root, "tsan", "suppressions_mac.txt")
+ tsan_mac = ReadSuppressionsFromFile(supp_filename)
+ result['mac_suppressions'] = vg_mac + tsan_mac
+
+ supp_filename = JOIN(suppressions_root, "tsan", "suppressions_win32.txt")
+ tsan_win = ReadSuppressionsFromFile(supp_filename)
+ result['win_suppressions'] = tsan_win
+
+ supp_filename = JOIN(suppressions_root, "drmemory", "suppressions.txt")
+ result['drmem_suppressions'] = ReadSuppressionsFromFile(supp_filename)
+ supp_filename = JOIN(suppressions_root, "drmemory", "suppressions_full.txt")
+ result['drmem_full_suppressions'] = ReadSuppressionsFromFile(supp_filename)
+
+ return result
+
+
+def GlobToRegex(glob_pattern, ignore_case=False):
+ """Translate glob wildcards (*?) into regex syntax. Escape the rest."""
+ regex = ''
+ for char in glob_pattern:
+ if char == '*':
+ regex += '.*'
+ elif char == '?':
+ regex += '.'
+ elif ignore_case and char.isalpha():
+ regex += '[%s%s]' % (char.lower(), char.upper())
+ else:
+ regex += re.escape(char)
+ return ''.join(regex)
+
+
+def StripAndSkipCommentsIterator(lines):
+ """Generator of (line_no, line) pairs that strips comments and whitespace."""
+ for (line_no, line) in enumerate(lines):
+ line = line.strip() # Drop \n
+ if line.startswith('#'):
+ continue # Comments
+ # Skip comment lines, but not empty lines, they indicate the end of a
+ # suppression. Add one to the line number as well, since most editors use
+ # 1-based numberings, and enumerate is 0-based.
+ yield (line_no + 1, line)
+
+
+class Suppression(object):
+ """This class represents a single stack trace suppression.
+
+ Attributes:
+ description: A string representing the error description.
+ type: A string representing the error type, e.g. Memcheck:Leak.
+ stack: The lines comprising the stack trace for the suppression.
+ regex: The actual regex used to match against scraped reports.
+ """
+
+ def __init__(self, description, type, stack, defined_at, regex):
+ """Inits Suppression.
+
+ description, type, stack, regex: same as class attributes
+ defined_at: file:line identifying where the suppression was defined
+ """
+ self.description = description
+ self.type = type
+ self.stack = stack
+ self.defined_at = defined_at
+ self.regex = re.compile(regex, re.MULTILINE)
+
+ def Match(self, suppression_from_report):
+ """Returns bool indicating whether this suppression matches
+ the suppression generated from Valgrind error report.
+
+ We match our suppressions against generated suppressions
+ (not against reports) since they have the same format
+ while the reports are taken from XML, contain filenames,
+ they are demangled, and are generally more difficult to
+ parse.
+
+ Args:
+ suppression_from_report: list of strings (function names).
+ Returns:
+ True if the suppression is not empty and matches the report.
+ """
+ if not self.stack:
+ return False
+ lines = [f.strip() for f in suppression_from_report]
+ return self.regex.match('\n'.join(lines) + '\n') is not None
+
+
+def FilenameToTool(filename):
+ """Return the name of the tool that a file is related to, or None.
+
+ Example mappings:
+ tools/valgrind/tsan/suppressions.txt -> tsan
+ tools/valgrind/drmemory/suppressions.txt -> drmemory
+ tools/valgrind/drmemory/suppressions_full.txt -> drmemory
+ tools/valgrind/memcheck/suppressions.txt -> memcheck
+ tools/valgrind/memcheck/suppressions_mac.txt -> memcheck
+ """
+ filename = os.path.abspath(filename)
+ parts = filename.split(os.sep)
+ tool = parts[-2]
+ if tool in ('drmemory', 'memcheck', 'tsan'):
+ return tool
+ return None
+
+
+def ReadSuppressionsFromFile(filename):
+ """Read suppressions from the given file and return them as a list"""
+ tool_to_parser = {
+ "drmemory": ReadDrMemorySuppressions,
+ "memcheck": ReadValgrindStyleSuppressions,
+ "tsan": ReadValgrindStyleSuppressions,
+ }
+ tool = FilenameToTool(filename)
+ assert tool in tool_to_parser, (
+ "unknown tool %s for filename %s" % (tool, filename))
+ parse_func = tool_to_parser[tool]
+
+ # Consider non-existent files to be empty.
+ if not os.path.exists(filename):
+ return []
+
+ input_file = file(filename, 'r')
+ try:
+ return parse_func(input_file, filename)
+ except SuppressionError:
+ input_file.close()
+ raise
+
+
+class ValgrindStyleSuppression(Suppression):
+ """A suppression using the Valgrind syntax.
+
+ Most tools, even ones that are not Valgrind-based, use this syntax, ie
+ TSan, etc.
+
+ Attributes:
+ Same as Suppression.
+ """
+
+ def __init__(self, description, type, stack, defined_at):
+ """Creates a suppression using the Memcheck and TSan, syntax."""
+ regex = '{\n.*\n%s\n' % type
+ for line in stack:
+ if line == ELLIPSIS:
+ regex += '(.*\n)*'
+ else:
+ regex += GlobToRegex(line)
+ regex += '\n'
+ regex += '(.*\n)*'
+ regex += '}'
+
+ # In the recent version of valgrind-variant we've switched
+ # from memcheck's default Addr[1248]/Value[1248]/Cond suppression types
+ # to simply Unaddressable/Uninitialized.
+ # The suppression generator no longer gives us "old" types thus
+ # for the "new-type" suppressions:
+ # * Memcheck:Unaddressable should also match Addr* reports,
+ # * Memcheck:Uninitialized should also match Cond and Value reports,
+ #
+ # We also want to support legacy suppressions (e.g. copied from
+ # upstream bugs etc), so:
+ # * Memcheck:Addr[1248] suppressions should match Unaddressable reports,
+ # * Memcheck:Cond and Memcheck:Value[1248] should match Uninitialized.
+ # Please note the latest two rules only apply to the
+ # tools/valgrind/waterfall.sh suppression matcher and the real
+ # valgrind-variant Memcheck will not suppress
+ # e.g. Addr1 printed as Unaddressable with Addr4 suppression.
+ # Be careful to check the access size while copying legacy suppressions!
+ for sz in [1, 2, 4, 8]:
+ regex = regex.replace("\nMemcheck:Addr%d\n" % sz,
+ "\nMemcheck:(Addr%d|Unaddressable)\n" % sz)
+ regex = regex.replace("\nMemcheck:Value%d\n" % sz,
+ "\nMemcheck:(Value%d|Uninitialized)\n" % sz)
+ regex = regex.replace("\nMemcheck:Cond\n",
+ "\nMemcheck:(Cond|Uninitialized)\n")
+ regex = regex.replace("\nMemcheck:Unaddressable\n",
+ "\nMemcheck:(Addr.|Unaddressable)\n")
+ regex = regex.replace("\nMemcheck:Uninitialized\n",
+ "\nMemcheck:(Cond|Value.|Uninitialized)\n")
+
+ return super(ValgrindStyleSuppression, self).__init__(
+ description, type, stack, defined_at, regex)
+
+ def __str__(self):
+ """Stringify."""
+ lines = [self.description, self.type] + self.stack
+ return "{\n %s\n}\n" % "\n ".join(lines)
+
+
+class SuppressionError(Exception):
+ def __init__(self, message, happened_at):
+ self._message = message
+ self._happened_at = happened_at
+
+ def __str__(self):
+ return 'Error reading suppressions at %s!\n%s' % (
+ self._happened_at, self._message)
+
+
+def ReadValgrindStyleSuppressions(lines, supp_descriptor):
+ """Given a list of lines, returns a list of suppressions.
+
+ Args:
+ lines: a list of lines containing suppressions.
+ supp_descriptor: should typically be a filename.
+ Used only when printing errors.
+ """
+ result = []
+ cur_descr = ''
+ cur_type = ''
+ cur_stack = []
+ in_suppression = False
+ nline = 0
+ for line in lines:
+ nline += 1
+ line = line.strip()
+ if line.startswith('#'):
+ continue
+ if not in_suppression:
+ if not line:
+ # empty lines between suppressions
+ pass
+ elif line.startswith('{'):
+ in_suppression = True
+ pass
+ else:
+ raise SuppressionError('Expected: "{"',
+ "%s:%d" % (supp_descriptor, nline))
+ elif line.startswith('}'):
+ result.append(
+ ValgrindStyleSuppression(cur_descr, cur_type, cur_stack,
+ "%s:%d" % (supp_descriptor, nline)))
+ cur_descr = ''
+ cur_type = ''
+ cur_stack = []
+ in_suppression = False
+ elif not cur_descr:
+ cur_descr = line
+ continue
+ elif not cur_type:
+ if (not line.startswith("Memcheck:") and
+ not line.startswith("ThreadSanitizer:")):
+ raise SuppressionError(
+ 'Expected "Memcheck:TYPE" or "ThreadSanitizer:TYPE", '
+ 'got "%s"' % line,
+ "%s:%d" % (supp_descriptor, nline))
+ supp_type = line.split(':')[1]
+ if not supp_type in ["Addr1", "Addr2", "Addr4", "Addr8",
+ "Cond", "Free", "Jump", "Leak", "Overlap", "Param",
+ "Value1", "Value2", "Value4", "Value8",
+ "Race", "UnlockNonLocked", "InvalidLock",
+ "Unaddressable", "Uninitialized"]:
+ raise SuppressionError('Unknown suppression type "%s"' % supp_type,
+ "%s:%d" % (supp_descriptor, nline))
+ cur_type = line
+ continue
+ elif re.match("^fun:.*|^obj:.*|^\.\.\.$", line):
+ cur_stack.append(line.strip())
+ elif len(cur_stack) == 0 and cur_type == "Memcheck:Param":
+ cur_stack.append(line.strip())
+ else:
+ raise SuppressionError(
+ '"fun:function_name" or "obj:object_file" or "..." expected',
+ "%s:%d" % (supp_descriptor, nline))
+ return result
+
+
+def PresubmitCheckSuppressions(supps):
+ """Check a list of suppressions and return a list of SuppressionErrors.
+
+ Mostly useful for separating the checking logic from the Presubmit API for
+ testing.
+ """
+ known_supp_names = {} # Key: name, Value: suppression.
+ errors = []
+ for s in supps:
+ if re.search("<.*suppression.name.here>", s.description):
+ # Suppression name line is
+ # <insert_a_suppression_name_here> for Memcheck,
+ # <Put your suppression name here> for TSan,
+ # name=<insert_a_suppression_name_here> for DrMemory
+ errors.append(
+ SuppressionError(
+ "You've forgotten to put a suppression name like bug_XXX",
+ s.defined_at))
+ continue
+
+ if s.description in known_supp_names:
+ errors.append(
+ SuppressionError(
+ 'Suppression named "%s" is defined more than once, '
+ 'see %s' % (s.description,
+ known_supp_names[s.description].defined_at),
+ s.defined_at))
+ else:
+ known_supp_names[s.description] = s
+ return errors
+
+
+def PresubmitCheck(input_api, output_api):
+ """A helper function useful in PRESUBMIT.py
+ Returns a list of errors or [].
+ """
+ sup_regex = re.compile('suppressions.*\.txt$')
+ filenames = [f.AbsoluteLocalPath() for f in input_api.AffectedFiles()
+ if sup_regex.search(f.LocalPath())]
+
+ errors = []
+
+ # TODO(timurrrr): warn on putting suppressions into a wrong file,
+ # e.g. TSan suppression in a memcheck file.
+
+ for f in filenames:
+ try:
+ supps = ReadSuppressionsFromFile(f)
+ errors.extend(PresubmitCheckSuppressions(supps))
+ except SuppressionError as e:
+ errors.append(e)
+
+ return [output_api.PresubmitError(str(e)) for e in errors]
+
+
+class DrMemorySuppression(Suppression):
+ """A suppression using the DrMemory syntax.
+
+ Attributes:
+ instr: The instruction to match.
+ Rest inherited from Suppression.
+ """
+
+ def __init__(self, name, report_type, instr, stack, defined_at):
+ """Constructor."""
+ self.instr = instr
+
+ # Construct the regex.
+ regex = '{\n'
+ if report_type == 'LEAK':
+ regex += '(POSSIBLE )?LEAK'
+ else:
+ regex += report_type
+ regex += '\nname=.*\n'
+
+ # TODO(rnk): Implement http://crbug.com/107416#c5 .
+ # drmemory_analyze.py doesn't generate suppressions with an instruction in
+ # them, so these suppressions will always fail to match. We should override
+ # Match to fetch the instruction from the report and try to match against
+ # that.
+ if instr:
+ regex += 'instruction=%s\n' % GlobToRegex(instr)
+
+ for line in stack:
+ if line == ELLIPSIS:
+ regex += '(.*\n)*'
+ elif '!' in line:
+ (mod, func) = line.split('!')
+ if func == ELLIPSIS: # mod!ellipsis frame
+ regex += '(%s\!.*\n)+' % GlobToRegex(mod, ignore_case=True)
+ else: # mod!func frame
+ # Ignore case for the module match, but not the function match.
+ regex += '%s\!%s\n' % (GlobToRegex(mod, ignore_case=True),
+ GlobToRegex(func, ignore_case=False))
+ else:
+ regex += GlobToRegex(line)
+ regex += '\n'
+ regex += '(.*\n)*' # Match anything left in the stack.
+ regex += '}'
+ return super(DrMemorySuppression, self).__init__(name, report_type, stack,
+ defined_at, regex)
+
+ def __str__(self):
+ """Stringify."""
+ text = self.type + "\n"
+ if self.description:
+ text += "name=%s\n" % self.description
+ if self.instr:
+ text += "instruction=%s\n" % self.instr
+ text += "\n".join(self.stack)
+ text += "\n"
+ return text
+
+
+# Possible DrMemory error report types. Keep consistent with suppress_name
+# array in drmemory/drmemory/report.c.
+DRMEMORY_ERROR_TYPES = [
+ 'UNADDRESSABLE ACCESS',
+ 'UNINITIALIZED READ',
+ 'INVALID HEAP ARGUMENT',
+ 'GDI USAGE ERROR',
+ 'HANDLE LEAK',
+ 'LEAK',
+ 'POSSIBLE LEAK',
+ 'WARNING',
+ ]
+
+
+# Regexes to match valid drmemory frames.
+DRMEMORY_FRAME_PATTERNS = [
+ re.compile(r"^.*\!.*$"), # mod!func
+ re.compile(r"^.*!\.\.\.$"), # mod!ellipsis
+ re.compile(r"^\<.*\+0x.*\>$"), # <mod+0xoffs>
+ re.compile(r"^\<not in a module\>$"),
+ re.compile(r"^system call .*$"),
+ re.compile(r"^\*$"), # wildcard
+ re.compile(r"^\.\.\.$"), # ellipsis
+ ]
+
+
+def ReadDrMemorySuppressions(lines, supp_descriptor):
+ """Given a list of lines, returns a list of DrMemory suppressions.
+
+ Args:
+ lines: a list of lines containing suppressions.
+ supp_descriptor: should typically be a filename.
+ Used only when parsing errors happen.
+ """
+ lines = StripAndSkipCommentsIterator(lines)
+ suppressions = []
+ for (line_no, line) in lines:
+ if not line:
+ continue
+ if line not in DRMEMORY_ERROR_TYPES:
+ raise SuppressionError('Expected a DrMemory error type, '
+ 'found %r instead\n Valid error types: %s' %
+ (line, ' '.join(DRMEMORY_ERROR_TYPES)),
+ "%s:%d" % (supp_descriptor, line_no))
+
+ # Suppression starts here.
+ report_type = line
+ name = ''
+ instr = None
+ stack = []
+ defined_at = "%s:%d" % (supp_descriptor, line_no)
+ found_stack = False
+ for (line_no, line) in lines:
+ if not found_stack and line.startswith('name='):
+ name = line.replace('name=', '')
+ elif not found_stack and line.startswith('instruction='):
+ instr = line.replace('instruction=', '')
+ else:
+ # Unrecognized prefix indicates start of stack trace.
+ found_stack = True
+ if not line:
+ # Blank line means end of suppression.
+ break
+ if not any([regex.match(line) for regex in DRMEMORY_FRAME_PATTERNS]):
+ raise SuppressionError(
+ ('Unexpected stack frame pattern at line %d\n' +
+ 'Frames should be one of the following:\n' +
+ ' module!function\n' +
+ ' module!...\n' +
+ ' <module+0xhexoffset>\n' +
+ ' <not in a module>\n' +
+ ' system call Name\n' +
+ ' *\n' +
+ ' ...\n') % line_no, defined_at)
+ stack.append(line)
+
+ if len(stack) == 0: # In case we hit EOF or blank without any stack frames.
+ raise SuppressionError('Suppression "%s" has no stack frames, ends at %d'
+ % (name, line_no), defined_at)
+ if stack[-1] == ELLIPSIS:
+ raise SuppressionError('Suppression "%s" ends in an ellipsis on line %d' %
+ (name, line_no), defined_at)
+
+ suppressions.append(
+ DrMemorySuppression(name, report_type, instr, stack, defined_at))
+
+ return suppressions
+
+
+def ParseSuppressionOfType(lines, supp_descriptor, def_line_no, report_type):
+ """Parse the suppression starting on this line.
+
+ Suppressions start with a type, have an optional name and instruction, and a
+ stack trace that ends in a blank line.
+ """
+
+
+
+def TestStack(stack, positive, negative, suppression_parser=None):
+ """A helper function for SelfTest() that checks a single stack.
+
+ Args:
+ stack: the stack to match the suppressions.
+ positive: the list of suppressions that must match the given stack.
+ negative: the list of suppressions that should not match.
+ suppression_parser: optional arg for the suppression parser, default is
+ ReadValgrindStyleSuppressions.
+ """
+ if not suppression_parser:
+ suppression_parser = ReadValgrindStyleSuppressions
+ for supp in positive:
+ parsed = suppression_parser(supp.split("\n"), "positive_suppression")
+ assert parsed[0].Match(stack.split("\n")), (
+ "Suppression:\n%s\ndidn't match stack:\n%s" % (supp, stack))
+ for supp in negative:
+ parsed = suppression_parser(supp.split("\n"), "negative_suppression")
+ assert not parsed[0].Match(stack.split("\n")), (
+ "Suppression:\n%s\ndid match stack:\n%s" % (supp, stack))
+
+
+def TestFailPresubmit(supp_text, error_text, suppression_parser=None):
+ """A helper function for SelfTest() that verifies a presubmit check fires.
+
+ Args:
+ supp_text: suppression text to parse.
+ error_text: text of the presubmit error we expect to find.
+ suppression_parser: optional arg for the suppression parser, default is
+ ReadValgrindStyleSuppressions.
+ """
+ if not suppression_parser:
+ suppression_parser = ReadValgrindStyleSuppressions
+ try:
+ supps = suppression_parser(supp_text.split("\n"), "<presubmit suppression>")
+ except SuppressionError, e:
+ # If parsing raised an exception, match the error text here.
+ assert error_text in str(e), (
+ "presubmit text %r not in SuppressionError:\n%r" %
+ (error_text, str(e)))
+ else:
+ # Otherwise, run the presubmit checks over the supps. We expect a single
+ # error that has text matching error_text.
+ errors = PresubmitCheckSuppressions(supps)
+ assert len(errors) == 1, (
+ "expected exactly one presubmit error, got:\n%s" % errors)
+ assert error_text in str(errors[0]), (
+ "presubmit text %r not in SuppressionError:\n%r" %
+ (error_text, str(errors[0])))
+
+
+def SelfTest():
+ """Tests the Suppression.Match() capabilities."""
+
+ test_memcheck_stack_1 = """{
+ test
+ Memcheck:Leak
+ fun:absolutly
+ fun:brilliant
+ obj:condition
+ fun:detection
+ fun:expression
+ }"""
+
+ test_memcheck_stack_2 = """{
+ test
+ Memcheck:Uninitialized
+ fun:absolutly
+ fun:brilliant
+ obj:condition
+ fun:detection
+ fun:expression
+ }"""
+
+ test_memcheck_stack_3 = """{
+ test
+ Memcheck:Unaddressable
+ fun:absolutly
+ fun:brilliant
+ obj:condition
+ fun:detection
+ fun:expression
+ }"""
+
+ test_memcheck_stack_4 = """{
+ test
+ Memcheck:Addr4
+ fun:absolutly
+ fun:brilliant
+ obj:condition
+ fun:detection
+ fun:expression
+ }"""
+
+ test_tsan_stack = """{
+ test
+ ThreadSanitizer:Race
+ fun:absolutly
+ fun:brilliant
+ obj:condition
+ fun:detection
+ fun:expression
+ }"""
+
+
+ positive_memcheck_suppressions_1 = [
+ "{\nzzz\nMemcheck:Leak\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:ab*ly\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:absolutly\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\n...\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\n...\nfun:detection\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:absolutly\n...\nfun:detection\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:ab*ly\n...\nfun:detection\n}",
+ "{\nzzz\nMemcheck:Leak\n...\nobj:condition\n}",
+ "{\nzzz\nMemcheck:Leak\n...\nobj:condition\nfun:detection\n}",
+ "{\nzzz\nMemcheck:Leak\n...\nfun:brilliant\nobj:condition\n}",
+ ]
+
+ positive_memcheck_suppressions_2 = [
+ "{\nzzz\nMemcheck:Uninitialized\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Uninitialized\nfun:ab*ly\n}",
+ "{\nzzz\nMemcheck:Uninitialized\nfun:absolutly\nfun:brilliant\n}",
+ # Legacy suppression types
+ "{\nzzz\nMemcheck:Value1\n...\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Cond\n...\nfun:detection\n}",
+ "{\nzzz\nMemcheck:Value8\nfun:absolutly\nfun:brilliant\n}",
+ ]
+
+ positive_memcheck_suppressions_3 = [
+ "{\nzzz\nMemcheck:Unaddressable\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Unaddressable\nfun:absolutly\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Unaddressable\nfun:absolutly\nfun:brilliant\n}",
+ # Legacy suppression types
+ "{\nzzz\nMemcheck:Addr1\n...\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Addr8\n...\nfun:detection\n}",
+ ]
+
+ positive_memcheck_suppressions_4 = [
+ "{\nzzz\nMemcheck:Addr4\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Unaddressable\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Addr4\nfun:absolutly\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Unaddressable\n...\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Addr4\n...\nfun:detection\n}",
+ ]
+
+ positive_tsan_suppressions = [
+ "{\nzzz\nThreadSanitizer:Race\n...\nobj:condition\n}",
+ "{\nzzz\nThreadSanitizer:Race\nfun:absolutly\n}",
+ ]
+
+ negative_memcheck_suppressions_1 = [
+ "{\nzzz\nMemcheck:Leak\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:ab*liant\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\nobj:condition\n}",
+ "{\nzzz\nMemcheck:Addr8\nfun:brilliant\n}",
+ ]
+
+ negative_memcheck_suppressions_2 = [
+ "{\nzzz\nMemcheck:Cond\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Value2\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Uninitialized\nfun:ab*liant\n}",
+ "{\nzzz\nMemcheck:Value4\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\nobj:condition\n}",
+ "{\nzzz\nMemcheck:Addr8\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Unaddressable\nfun:brilliant\n}",
+ ]
+
+ negative_memcheck_suppressions_3 = [
+ "{\nzzz\nMemcheck:Addr1\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Uninitialized\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Addr2\nfun:ab*liant\n}",
+ "{\nzzz\nMemcheck:Value4\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\nobj:condition\n}",
+ "{\nzzz\nMemcheck:Addr8\nfun:brilliant\n}",
+ ]
+
+ negative_memcheck_suppressions_4 = [
+ "{\nzzz\nMemcheck:Addr1\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Addr4\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Unaddressable\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Addr1\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Addr2\nfun:ab*liant\n}",
+ "{\nzzz\nMemcheck:Value4\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\nobj:condition\n}",
+ "{\nzzz\nMemcheck:Addr8\nfun:brilliant\n}",
+ ]
+
+ negative_tsan_suppressions = [
+ "{\nzzz\nThreadSanitizer:Leak\nfun:absolutly\n}",
+ "{\nzzz\nThreadSanitizer:Race\nfun:brilliant\n}",
+ ]
+
+ TestStack(test_memcheck_stack_1,
+ positive_memcheck_suppressions_1,
+ negative_memcheck_suppressions_1)
+ TestStack(test_memcheck_stack_2,
+ positive_memcheck_suppressions_2,
+ negative_memcheck_suppressions_2)
+ TestStack(test_memcheck_stack_3,
+ positive_memcheck_suppressions_3,
+ negative_memcheck_suppressions_3)
+ TestStack(test_memcheck_stack_4,
+ positive_memcheck_suppressions_4,
+ negative_memcheck_suppressions_4)
+ TestStack(test_tsan_stack, positive_tsan_suppressions,
+ negative_tsan_suppressions)
+
+ # TODO(timurrrr): add TestFailPresubmit tests.
+
+ ### DrMemory self tests.
+
+ # http://crbug.com/96010 suppression.
+ stack_96010 = """{
+ UNADDRESSABLE ACCESS
+ name=<insert_a_suppression_name_here>
+ *!TestingProfile::FinishInit
+ *!TestingProfile::TestingProfile
+ *!BrowserAboutHandlerTest_WillHandleBrowserAboutURL_Test::TestBody
+ *!testing::Test::Run
+ }"""
+
+ suppress_96010 = [
+ "UNADDRESSABLE ACCESS\nname=zzz\n...\n*!testing::Test::Run\n",
+ ("UNADDRESSABLE ACCESS\nname=zzz\n...\n" +
+ "*!BrowserAboutHandlerTest_WillHandleBrowserAboutURL_Test::TestBody\n"),
+ "UNADDRESSABLE ACCESS\nname=zzz\n...\n*!BrowserAboutHandlerTest*\n",
+ "UNADDRESSABLE ACCESS\nname=zzz\n*!TestingProfile::FinishInit\n",
+ # No name should be needed
+ "UNADDRESSABLE ACCESS\n*!TestingProfile::FinishInit\n",
+ # Whole trace
+ ("UNADDRESSABLE ACCESS\n" +
+ "*!TestingProfile::FinishInit\n" +
+ "*!TestingProfile::TestingProfile\n" +
+ "*!BrowserAboutHandlerTest_WillHandleBrowserAboutURL_Test::TestBody\n" +
+ "*!testing::Test::Run\n"),
+ ]
+
+ negative_96010 = [
+ # Wrong type
+ "UNINITIALIZED READ\nname=zzz\n*!TestingProfile::FinishInit\n",
+ # No ellipsis
+ "UNADDRESSABLE ACCESS\nname=zzz\n*!BrowserAboutHandlerTest*\n",
+ ]
+
+ TestStack(stack_96010, suppress_96010, negative_96010,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Invalid heap arg
+ stack_invalid = """{
+ INVALID HEAP ARGUMENT
+ name=asdf
+ *!foo
+ }"""
+ suppress_invalid = [
+ "INVALID HEAP ARGUMENT\n*!foo\n",
+ ]
+ negative_invalid = [
+ "UNADDRESSABLE ACCESS\n*!foo\n",
+ ]
+
+ TestStack(stack_invalid, suppress_invalid, negative_invalid,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Suppress only ntdll
+ stack_in_ntdll = """{
+ UNADDRESSABLE ACCESS
+ name=<insert_a_suppression_name_here>
+ ntdll.dll!RtlTryEnterCriticalSection
+ }"""
+ stack_not_ntdll = """{
+ UNADDRESSABLE ACCESS
+ name=<insert_a_suppression_name_here>
+ notntdll.dll!RtlTryEnterCriticalSection
+ }"""
+
+ suppress_in_ntdll = [
+ "UNADDRESSABLE ACCESS\nntdll.dll!RtlTryEnterCriticalSection\n",
+ ]
+ suppress_in_any = [
+ "UNADDRESSABLE ACCESS\n*!RtlTryEnterCriticalSection\n",
+ ]
+
+ TestStack(stack_in_ntdll, suppress_in_ntdll + suppress_in_any, [],
+ suppression_parser=ReadDrMemorySuppressions)
+ # Make sure we don't wildcard away the "not" part and match ntdll.dll by
+ # accident.
+ TestStack(stack_not_ntdll, suppress_in_any, suppress_in_ntdll,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Suppress a POSSIBLE LEAK with LEAK.
+ stack_foo_possible = """{
+ POSSIBLE LEAK
+ name=foo possible
+ *!foo
+ }"""
+ suppress_foo_possible = [ "POSSIBLE LEAK\n*!foo\n" ]
+ suppress_foo_leak = [ "LEAK\n*!foo\n" ]
+ TestStack(stack_foo_possible, suppress_foo_possible + suppress_foo_leak, [],
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Don't suppress LEAK with POSSIBLE LEAK.
+ stack_foo_leak = """{
+ LEAK
+ name=foo leak
+ *!foo
+ }"""
+ TestStack(stack_foo_leak, suppress_foo_leak, suppress_foo_possible,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Test case insensitivity of module names.
+ stack_user32_mixed_case = """{
+ LEAK
+ name=<insert>
+ USER32.dll!foo
+ user32.DLL!bar
+ user32.dll!baz
+ }"""
+ suppress_user32 = [ # Module name case doesn't matter.
+ "LEAK\nuser32.dll!foo\nuser32.dll!bar\nuser32.dll!baz\n",
+ "LEAK\nUSER32.DLL!foo\nUSER32.DLL!bar\nUSER32.DLL!baz\n",
+ ]
+ no_suppress_user32 = [ # Function name case matters.
+ "LEAK\nuser32.dll!FOO\nuser32.dll!BAR\nuser32.dll!BAZ\n",
+ "LEAK\nUSER32.DLL!FOO\nUSER32.DLL!BAR\nUSER32.DLL!BAZ\n",
+ ]
+ TestStack(stack_user32_mixed_case, suppress_user32, no_suppress_user32,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Test mod!... frames.
+ stack_kernel32_through_ntdll = """{
+ LEAK
+ name=<insert>
+ kernel32.dll!foo
+ KERNEL32.dll!bar
+ kernel32.DLL!baz
+ ntdll.dll!quux
+ }"""
+ suppress_mod_ellipsis = [
+ "LEAK\nkernel32.dll!...\nntdll.dll!quux\n",
+ "LEAK\nKERNEL32.DLL!...\nntdll.dll!quux\n",
+ ]
+ no_suppress_mod_ellipsis = [
+ # Need one or more matching frames, not zero, unlike regular ellipsis.
+ "LEAK\nuser32.dll!...\nkernel32.dll!...\nntdll.dll!quux\n",
+ ]
+ TestStack(stack_kernel32_through_ntdll, suppress_mod_ellipsis,
+ no_suppress_mod_ellipsis,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Test that the presubmit checks work.
+ forgot_to_name = """
+ UNADDRESSABLE ACCESS
+ name=<insert_a_suppression_name_here>
+ ntdll.dll!RtlTryEnterCriticalSection
+ """
+ TestFailPresubmit(forgot_to_name, 'forgotten to put a suppression',
+ suppression_parser=ReadDrMemorySuppressions)
+
+ named_twice = """
+ UNADDRESSABLE ACCESS
+ name=http://crbug.com/1234
+ *!foo
+
+ UNADDRESSABLE ACCESS
+ name=http://crbug.com/1234
+ *!bar
+ """
+ TestFailPresubmit(named_twice, 'defined more than once',
+ suppression_parser=ReadDrMemorySuppressions)
+
+ forgot_stack = """
+ UNADDRESSABLE ACCESS
+ name=http://crbug.com/1234
+ """
+ TestFailPresubmit(forgot_stack, 'has no stack frames',
+ suppression_parser=ReadDrMemorySuppressions)
+
+ ends_in_ellipsis = """
+ UNADDRESSABLE ACCESS
+ name=http://crbug.com/1234
+ ntdll.dll!RtlTryEnterCriticalSection
+ ...
+ """
+ TestFailPresubmit(ends_in_ellipsis, 'ends in an ellipsis',
+ suppression_parser=ReadDrMemorySuppressions)
+
+ bad_stack_frame = """
+ UNADDRESSABLE ACCESS
+ name=http://crbug.com/1234
+ fun:memcheck_style_frame
+ """
+ TestFailPresubmit(bad_stack_frame, 'Unexpected stack frame pattern',
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Test FilenameToTool.
+ filenames_to_tools = {
+ "tools/valgrind/tsan/suppressions.txt": "tsan",
+ "tools/valgrind/drmemory/suppressions.txt": "drmemory",
+ "tools/valgrind/drmemory/suppressions_full.txt": "drmemory",
+ "tools/valgrind/memcheck/suppressions.txt": "memcheck",
+ "tools/valgrind/memcheck/suppressions_mac.txt": "memcheck",
+ "asdf/tools/valgrind/memcheck/suppressions_mac.txt": "memcheck",
+ "foo/bar/baz/tools/valgrind/memcheck/suppressions_mac.txt": "memcheck",
+ "foo/bar/baz/tools/valgrind/suppressions.txt": None,
+ "tools/valgrind/suppressions.txt": None,
+ }
+ for (filename, expected_tool) in filenames_to_tools.items():
+ filename.replace('/', os.sep) # Make the path look native.
+ tool = FilenameToTool(filename)
+ assert tool == expected_tool, (
+ "failed to get expected tool for filename %r, expected %s, got %s" %
+ (filename, expected_tool, tool))
+
+ # Test ValgrindStyleSuppression.__str__.
+ supp = ValgrindStyleSuppression("http://crbug.com/1234", "Memcheck:Leak",
+ ["...", "fun:foo"], "supp.txt:1")
+ # Intentional 3-space indent. =/
+ supp_str = ("{\n"
+ " http://crbug.com/1234\n"
+ " Memcheck:Leak\n"
+ " ...\n"
+ " fun:foo\n"
+ "}\n")
+ assert str(supp) == supp_str, (
+ "str(supp) != supp_str:\nleft: %s\nright: %s" % (str(supp), supp_str))
+
+ # Test DrMemorySuppression.__str__.
+ supp = DrMemorySuppression(
+ "http://crbug.com/1234", "LEAK", None, ["...", "*!foo"], "supp.txt:1")
+ supp_str = ("LEAK\n"
+ "name=http://crbug.com/1234\n"
+ "...\n"
+ "*!foo\n")
+ assert str(supp) == supp_str, (
+ "str(supp) != supp_str:\nleft: %s\nright: %s" % (str(supp), supp_str))
+
+ supp = DrMemorySuppression(
+ "http://crbug.com/1234", "UNINITIALIZED READ", "test 0x08(%eax) $0x01",
+ ["ntdll.dll!*", "*!foo"], "supp.txt:1")
+ supp_str = ("UNINITIALIZED READ\n"
+ "name=http://crbug.com/1234\n"
+ "instruction=test 0x08(%eax) $0x01\n"
+ "ntdll.dll!*\n"
+ "*!foo\n")
+ assert str(supp) == supp_str, (
+ "str(supp) != supp_str:\nleft: %s\nright: %s" % (str(supp), supp_str))
+
+
+if __name__ == '__main__':
+ SelfTest()
+ print 'PASS'
diff --git a/tools/valgrind/test_suppressions.py b/tools/valgrind/test_suppressions.py
new file mode 100755
index 0000000..285ad21
--- /dev/null
+++ b/tools/valgrind/test_suppressions.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+from collections import defaultdict
+import json
+import os
+import re
+import subprocess
+import sys
+
+import suppressions
+
+
+def ReadReportsFromFile(filename):
+ """ Returns a list of (report_hash, report) and the URL of the report on the
+ waterfall.
+ """
+ input_file = file(filename, 'r')
+ # reports is a list of (error hash, report) pairs.
+ reports = []
+ in_suppression = False
+ cur_supp = []
+ # This stores the last error hash found while reading the file.
+ last_hash = ""
+ for line in input_file:
+ line = line.strip()
+ line = line.replace("</span><span class=\"stdout\">", "")
+ line = line.replace("</span><span class=\"stderr\">", "")
+ line = line.replace("<", "<")
+ line = line.replace(">", ">")
+ if in_suppression:
+ if line == "}":
+ cur_supp += ["}"]
+ reports += [[last_hash, "\n".join(cur_supp)]]
+ in_suppression = False
+ cur_supp = []
+ last_hash = ""
+ else:
+ cur_supp += [" "*3 + line]
+ elif line == "{":
+ in_suppression = True
+ cur_supp = ["{"]
+ elif line.find("Suppression (error hash=#") == 0:
+ last_hash = line[25:41]
+ # The line at the end of the file is assumed to store the URL of the report.
+ return reports,line
+
+def Demangle(names):
+ """ Demangle a list of C++ symbols, return a list of human-readable symbols.
+ """
+ # -n is not the default on Mac.
+ args = ['c++filt', '-n']
+ pipe = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ stdout, _ = pipe.communicate(input='\n'.join(names))
+ demangled = stdout.split("\n")
+ # Each line ends with a newline, so the final entry of the split output
+ # will always be ''.
+ assert len(demangled) == len(names)
+ return demangled
+
+def GetSymbolsFromReport(report):
+ """Extract all symbols from a suppression report."""
+ symbols = []
+ prefix = "fun:"
+ prefix_len = len(prefix)
+ for line in report.splitlines():
+ index = line.find(prefix)
+ if index != -1:
+ symbols.append(line[index + prefix_len:])
+ return symbols
+
+def PrintTopSymbols(symbol_reports, top_count):
+ """Print the |top_count| symbols with the most occurrences."""
+ boring_symbols=['malloc', '_Znw*', 'TestBody']
+ sorted_reports = sorted(filter(lambda x:x[0] not in boring_symbols,
+ symbol_reports.iteritems()),
+ key=lambda x:len(x[1]), reverse=True)
+ symbols = symbol_reports.keys()
+ demangled = Demangle(symbols)
+ assert len(demangled) == len(symbols)
+ symboltable = dict(zip(symbols, demangled))
+
+ print "\n"
+ print "Top %d symbols" % top_count
+ for (symbol, suppressions) in sorted_reports[:top_count]:
+ print "%4d occurrences : %s" % (len(suppressions), symboltable[symbol])
+
+def ReadHashExclusions(exclusions):
+ input_file = file(exclusions, 'r')
+ contents = json.load(input_file)
+ return contents['hashes']
+
+
+def main(argv):
+ supp = suppressions.GetSuppressions()
+
+ # all_reports is a map {report: list of urls containing this report}
+ all_reports = defaultdict(list)
+ report_hashes = {}
+ symbol_reports = defaultdict(list)
+
+ # Create argument parser.
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--top-symbols', type=int, default=0,
+ help='Print a list of the top <n> symbols')
+ parser.add_argument('--symbol-filter', action='append',
+ help='Filter out all suppressions not containing the specified symbol(s). '
+ 'Matches against the mangled names.')
+ parser.add_argument('--exclude-symbol', action='append',
+ help='Filter out all suppressions containing the specified symbol(s). '
+ 'Matches against the mangled names.')
+ parser.add_argument('--exclude-hashes', action='append',
+ help='Specify a .json file with a list of hashes to exclude.')
+
+ parser.add_argument('reports', metavar='report file', nargs='+',
+ help='List of report files')
+ args = parser.parse_args(argv)
+
+ # exclude_hashes is a list of strings, each string an error hash.
+ exclude_hashes = []
+
+ exclude_hashes = []
+ if args.exclude_hashes:
+ for excl in args.exclude_hashes:
+ print "reading exclusion", excl
+ exclude_hashes += ReadHashExclusions(excl)
+
+ for f in args.reports:
+ f_reports, url = ReadReportsFromFile(f)
+ for (hash, report) in f_reports:
+ if hash in exclude_hashes:
+ continue
+ all_reports[report] += [url]
+ report_hashes[report] = hash
+
+ reports_count = 0
+ for r in all_reports:
+ cur_supp = supp['common_suppressions']
+ if all([re.search("%20Mac%20|mac_valgrind", url)
+ for url in all_reports[r]]):
+ # Include mac suppressions if the report is only present on Mac
+ cur_supp += supp['mac_suppressions']
+ elif all([re.search("Windows%20", url) for url in all_reports[r]]):
+ # Include win32 suppressions if the report is only present on Windows
+ cur_supp += supp['win_suppressions']
+ elif all([re.search("Linux%20", url) for url in all_reports[r]]):
+ cur_supp += supp['linux_suppressions']
+ if all(["DrMemory" in url for url in all_reports[r]]):
+ cur_supp += supp['drmem_suppressions']
+ if all(["DrMemory%20full" in url for url in all_reports[r]]):
+ cur_supp += supp['drmem_full_suppressions']
+
+ # Test if this report is already suppressed
+ skip = False
+ for s in cur_supp:
+ if s.Match(r.split("\n")):
+ skip = True
+ break
+
+ # Skip reports if none of the symbols are in the report.
+ if args.symbol_filter and all(not s in r for s in args.symbol_filter):
+ skip = True
+ if args.exclude_symbol and any(s in r for s in args.exclude_symbol):
+ skip = True
+
+ if not skip:
+ reports_count += 1
+ print "==================================="
+ print "This report observed at"
+ for url in all_reports[r]:
+ print " %s" % url
+ print "didn't match any suppressions:"
+ print "Suppression (error hash=#%s#):" % (report_hashes[r])
+ print r
+ print "==================================="
+
+ if args.top_symbols > 0:
+ symbols = GetSymbolsFromReport(r)
+ for symbol in symbols:
+ symbol_reports[symbol].append(report_hashes[r])
+
+ if reports_count > 0:
+ print ("%d unique reports don't match any of the suppressions" %
+ reports_count)
+ if args.top_symbols > 0:
+ PrintTopSymbols(symbol_reports, args.top_symbols)
+
+ else:
+ print "Congratulations! All reports are suppressed!"
+ # TODO(timurrrr): also make sure none of the old suppressions
+ # were narrowed too much.
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/tools/valgrind/tsan/README b/tools/valgrind/tsan/README
new file mode 100644
index 0000000..132b5c6
--- /dev/null
+++ b/tools/valgrind/tsan/README
@@ -0,0 +1,5 @@
+Attention: ThreadSanitizer v1 has been retired and files in this dir
+should not be used anymore. Please refer to
+http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+for the instructions on using ThreadSanitizer v2.
+Namely, the suppressions now reside in base/debug/tsan_suppressions.cc
diff --git a/tools/valgrind/tsan_analyze.py b/tools/valgrind/tsan_analyze.py
new file mode 100755
index 0000000..2c744e2
--- /dev/null
+++ b/tools/valgrind/tsan_analyze.py
@@ -0,0 +1,271 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# tsan_analyze.py
+
+''' Given a ThreadSanitizer output file, parses errors and uniques them.'''
+
+import gdb_helper
+
+from collections import defaultdict
+import hashlib
+import logging
+import optparse
+import os
+import re
+import subprocess
+import sys
+import time
+
+import common
+
+# Global symbol table (ugh)
+TheAddressTable = None
+
+class _StackTraceLine(object):
+ def __init__(self, line, address, binary):
+ self.raw_line_ = line
+ self.address = address
+ self.binary = binary
+ def __str__(self):
+ global TheAddressTable
+ file, line = TheAddressTable.GetFileLine(self.binary, self.address)
+ if (file is None) or (line is None):
+ return self.raw_line_
+ else:
+ return self.raw_line_.replace(self.binary, '%s:%s' % (file, line))
+
+class TsanAnalyzer(object):
+ ''' Given a set of ThreadSanitizer output files, parse all the errors out of
+ them, unique them and output the results.'''
+
+ LOAD_LIB_RE = re.compile('--[0-9]+-- ([^(:]*) \((0x[0-9a-f]+)\)')
+ TSAN_LINE_RE = re.compile('==[0-9]+==\s*[#0-9]+\s*'
+ '([0-9A-Fa-fx]+):'
+ '(?:[^ ]* )*'
+ '([^ :\n]+)'
+ '')
+ THREAD_CREATION_STR = ("INFO: T.* "
+ "(has been created by T.* at this point|is program's main thread)")
+
+ SANITY_TEST_SUPPRESSION = ("ThreadSanitizer sanity test "
+ "(ToolsSanityTest.DataRace)")
+ TSAN_RACE_DESCRIPTION = "Possible data race"
+ TSAN_WARNING_DESCRIPTION = ("Unlocking a non-locked lock"
+ "|accessing an invalid lock"
+ "|which did not acquire this lock")
+ RACE_VERIFIER_LINE = "Confirmed a race|unexpected race"
+ TSAN_ASSERTION = "Assertion failed: "
+
+ def __init__(self, use_gdb=False):
+ '''Reads in a set of files.'''
+
+ self._use_gdb = use_gdb
+ self._cur_testcase = None
+
+ def ReadLine(self):
+ self.line_ = self.cur_fd_.readline()
+ self.stack_trace_line_ = None
+ if not self._use_gdb:
+ return
+ global TheAddressTable
+ match = TsanAnalyzer.LOAD_LIB_RE.match(self.line_)
+ if match:
+ binary, ip = match.groups()
+ TheAddressTable.AddBinaryAt(binary, ip)
+ return
+ match = TsanAnalyzer.TSAN_LINE_RE.match(self.line_)
+ if match:
+ address, binary_name = match.groups()
+ stack_trace_line = _StackTraceLine(self.line_, address, binary_name)
+ TheAddressTable.Add(stack_trace_line.binary, stack_trace_line.address)
+ self.stack_trace_line_ = stack_trace_line
+
+ def ReadSection(self):
+ """ Example of a section:
+ ==4528== WARNING: Possible data race: {{{
+ ==4528== T20 (L{}):
+ ==4528== #0 MyTest::Foo1
+ ==4528== #1 MyThread::ThreadBody
+ ==4528== Concurrent write happened at this point:
+ ==4528== T19 (L{}):
+ ==4528== #0 MyTest::Foo2
+ ==4528== #1 MyThread::ThreadBody
+ ==4528== }}}
+ ------- suppression -------
+ {
+ <Put your suppression name here>
+ ThreadSanitizer:Race
+ fun:MyTest::Foo1
+ fun:MyThread::ThreadBody
+ }
+ ------- end suppression -------
+ """
+ result = [self.line_]
+ if re.search("{{{", self.line_):
+ while not re.search('}}}', self.line_):
+ self.ReadLine()
+ if self.stack_trace_line_ is None:
+ result.append(self.line_)
+ else:
+ result.append(self.stack_trace_line_)
+ self.ReadLine()
+ if re.match('-+ suppression -+', self.line_):
+ # We need to calculate the suppression hash and prepend a line like
+ # "Suppression (error hash=#0123456789ABCDEF#):" so the buildbot can
+ # extract the suppression snippet.
+ supp = ""
+ while not re.match('-+ end suppression -+', self.line_):
+ self.ReadLine()
+ supp += self.line_
+ self.ReadLine()
+ if self._cur_testcase:
+ result.append("The report came from the `%s` test.\n" % \
+ self._cur_testcase)
+ result.append("Suppression (error hash=#%016X#):\n" % \
+ (int(hashlib.md5(supp).hexdigest()[:16], 16)))
+ result.append(" For more info on using suppressions see "
+ "http://dev.chromium.org/developers/how-tos/using-valgrind/threadsanitizer#TOC-Suppressing-data-races\n")
+ result.append(supp)
+ else:
+ self.ReadLine()
+
+ return result
+
+ def ReadTillTheEnd(self):
+ result = [self.line_]
+ while self.line_:
+ self.ReadLine()
+ result.append(self.line_)
+ return result
+
+ def ParseReportFile(self, filename):
+ '''Parses a report file and returns a list of ThreadSanitizer reports.
+
+
+ Args:
+ filename: report filename.
+ Returns:
+ list of (list of (str iff self._use_gdb, _StackTraceLine otherwise)).
+ '''
+ ret = []
+ self.cur_fd_ = open(filename, 'r')
+
+ while True:
+ # Read ThreadSanitizer reports.
+ self.ReadLine()
+ if not self.line_:
+ break
+
+ while True:
+ tmp = []
+ while re.search(TsanAnalyzer.RACE_VERIFIER_LINE, self.line_):
+ tmp.append(self.line_)
+ self.ReadLine()
+ while re.search(TsanAnalyzer.THREAD_CREATION_STR, self.line_):
+ tmp.extend(self.ReadSection())
+ if re.search(TsanAnalyzer.TSAN_RACE_DESCRIPTION, self.line_):
+ tmp.extend(self.ReadSection())
+ ret.append(tmp) # includes RaceVerifier and thread creation stacks
+ elif (re.search(TsanAnalyzer.TSAN_WARNING_DESCRIPTION, self.line_) and
+ not common.IsWindows()): # workaround for http://crbug.com/53198
+ tmp.extend(self.ReadSection())
+ ret.append(tmp)
+ else:
+ break
+
+ tmp = []
+ if re.search(TsanAnalyzer.TSAN_ASSERTION, self.line_):
+ tmp.extend(self.ReadTillTheEnd())
+ ret.append(tmp)
+ break
+
+ match = re.search("used_suppression:\s+([0-9]+)\s(.*)", self.line_)
+ if match:
+ count, supp_name = match.groups()
+ count = int(count)
+ self.used_suppressions[supp_name] += count
+ self.cur_fd_.close()
+ return ret
+
+ def GetReports(self, files):
+ '''Extracts reports from a set of files.
+
+ Reads a set of files and returns a list of all discovered
+ ThreadSanitizer race reports. As a side effect, populates
+ self.used_suppressions with appropriate info.
+ '''
+
+ global TheAddressTable
+ if self._use_gdb:
+ TheAddressTable = gdb_helper.AddressTable()
+ else:
+ TheAddressTable = None
+ reports = []
+ self.used_suppressions = defaultdict(int)
+ for file in files:
+ reports.extend(self.ParseReportFile(file))
+ if self._use_gdb:
+ TheAddressTable.ResolveAll()
+ # Make each line of each report a string.
+ reports = map(lambda(x): map(str, x), reports)
+ return [''.join(report_lines) for report_lines in reports]
+
+ def Report(self, files, testcase, check_sanity=False):
+ '''Reads in a set of files and prints ThreadSanitizer report.
+
+ Args:
+ files: A list of filenames.
+ check_sanity: if true, search for SANITY_TEST_SUPPRESSIONS
+ '''
+
+ # We set up _cur_testcase class-wide variable to avoid passing it through
+ # about 5 functions.
+ self._cur_testcase = testcase
+ reports = self.GetReports(files)
+ self._cur_testcase = None # just in case, shouldn't be used anymore
+
+ common.PrintUsedSuppressionsList(self.used_suppressions)
+
+
+ retcode = 0
+ if reports:
+ sys.stdout.flush()
+ sys.stderr.flush()
+ logging.info("FAIL! Found %i report(s)" % len(reports))
+ for report in reports:
+ logging.info('\n' + report)
+ sys.stdout.flush()
+ retcode = -1
+
+ # Report tool's insanity even if there were errors.
+ if (check_sanity and
+ TsanAnalyzer.SANITY_TEST_SUPPRESSION not in self.used_suppressions):
+ logging.error("FAIL! Sanity check failed!")
+ retcode = -3
+
+ if retcode != 0:
+ return retcode
+
+ logging.info("PASS: No reports found")
+ return 0
+
+
+def main():
+ '''For testing only. The TsanAnalyzer class should be imported instead.'''
+ parser = optparse.OptionParser("usage: %prog <files to analyze>")
+
+ (options, args) = parser.parse_args()
+ if not args:
+ parser.error("no filename specified")
+ filenames = args
+
+ logging.getLogger().setLevel(logging.INFO)
+ analyzer = TsanAnalyzer(use_gdb=True)
+ return analyzer.Report(filenames, None)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/valgrind/tsan_v2/suppressions.txt b/tools/valgrind/tsan_v2/suppressions.txt
new file mode 100644
index 0000000..80c4398
--- /dev/null
+++ b/tools/valgrind/tsan_v2/suppressions.txt
@@ -0,0 +1,2 @@
+# This file is deprecated; please add new suppressions to
+# build/sanitizers/tsan_suppressions.cc.
diff --git a/tools/valgrind/unused_suppressions.py b/tools/valgrind/unused_suppressions.py
new file mode 100755
index 0000000..0f336f1
--- /dev/null
+++ b/tools/valgrind/unused_suppressions.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+import urllib2
+
+import suppressions
+
+
+def main():
+ supp = suppressions.GetSuppressions()
+
+ all_supps = []
+ for supps in supp.values():
+ all_supps += [s.description for s in supps]
+ sys.stdout.write(urllib2.urlopen(
+ 'http://chromium-build-logs.appspot.com/unused_suppressions',
+ '\n'.join(all_supps)).read())
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/tools/valgrind/valgrind.sh b/tools/valgrind/valgrind.sh
new file mode 100755
index 0000000..52f634a
--- /dev/null
+++ b/tools/valgrind/valgrind.sh
@@ -0,0 +1,114 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a small script for manually launching valgrind, along with passing
+# it the suppression file, and some helpful arguments (automatically attaching
+# the debugger on failures, etc). Run it from your repo root, something like:
+# $ sh ./tools/valgrind/valgrind.sh ./out/Debug/chrome
+#
+# This is mostly intended for running the chrome browser interactively.
+# To run unit tests, you probably want to run chrome_tests.sh instead.
+# That's the script used by the valgrind buildbot.
+
+export THISDIR=`dirname $0`
+
+setup_memcheck() {
+ RUN_COMMAND="valgrind"
+
+ # Prompt to attach gdb when there was an error detected.
+ DEFAULT_TOOL_FLAGS=("--db-command=gdb -nw %f %p" "--db-attach=yes" \
+ # Keep the registers in gdb in sync with the code.
+ "--vex-iropt-register-updates=allregs-at-mem-access" \
+ # Overwrite newly allocated or freed objects
+ # with 0x41 to catch inproper use.
+ "--malloc-fill=41" "--free-fill=41" \
+ # Increase the size of stacks being tracked.
+ "--num-callers=30")
+}
+
+setup_tsan() {
+ RUN_COMMAND="valgrind-tsan.sh"
+ IGNORE_FILE="$THISDIR/tsan/ignores.txt"
+ DEFAULT_TOOL_FLAGS=("--announce-threads" "--pure-happens-before=yes" \
+ "--ignore=$IGNORE_FILE")
+}
+
+setup_unknown() {
+ echo "Unknown tool \"$TOOL_NAME\" specified, the result is not guaranteed"
+ DEFAULT_TOOL_FLAGS=()
+}
+
+set -e
+
+if [ $# -eq 0 ]; then
+ echo "usage: <command to run> <arguments ...>"
+ exit 1
+fi
+
+TOOL_NAME="memcheck"
+declare -a DEFAULT_TOOL_FLAGS[0]
+
+# Select a tool different from memcheck with --tool=TOOL as a first argument
+TMP_STR=`echo $1 | sed 's/^\-\-tool=//'`
+if [ "$TMP_STR" != "$1" ]; then
+ TOOL_NAME="$TMP_STR"
+ shift
+fi
+
+if echo "$@" | grep "\-\-tool" ; then
+ echo "--tool=TOOL must be the first argument" >&2
+ exit 1
+fi
+
+case $TOOL_NAME in
+ memcheck*) setup_memcheck "$1";;
+ tsan*) setup_tsan;;
+ *) setup_unknown;;
+esac
+
+
+SUPPRESSIONS="$THISDIR/$TOOL_NAME/suppressions.txt"
+
+CHROME_VALGRIND=`sh $THISDIR/locate_valgrind.sh`
+if [ "$CHROME_VALGRIND" = "" ]
+then
+ # locate_valgrind.sh failed
+ exit 1
+fi
+echo "Using valgrind binaries from ${CHROME_VALGRIND}"
+
+set -x
+PATH="${CHROME_VALGRIND}/bin:$PATH"
+# We need to set these variables to override default lib paths hard-coded into
+# Valgrind binary.
+export VALGRIND_LIB="$CHROME_VALGRIND/lib/valgrind"
+export VALGRIND_LIB_INNER="$CHROME_VALGRIND/lib/valgrind"
+
+# G_SLICE=always-malloc: make glib use system malloc
+# NSS_DISABLE_UNLOAD=1: make nss skip dlclosing dynamically loaded modules,
+# which would result in "obj:*" in backtraces.
+# NSS_DISABLE_ARENA_FREE_LIST=1: make nss use system malloc
+# G_DEBUG=fatal_warnings: make GTK abort on any critical or warning assertions.
+# If it crashes on you in the Options menu, you hit bug 19751,
+# comment out the G_DEBUG=fatal_warnings line.
+#
+# GTEST_DEATH_TEST_USE_FORK=1: make gtest death tests valgrind-friendly
+#
+# When everyone has the latest valgrind, we might want to add
+# --show-possibly-lost=no
+# to ignore possible but not definite leaks.
+
+G_SLICE=always-malloc \
+NSS_DISABLE_UNLOAD=1 \
+NSS_DISABLE_ARENA_FREE_LIST=1 \
+G_DEBUG=fatal_warnings \
+GTEST_DEATH_TEST_USE_FORK=1 \
+$RUN_COMMAND \
+ --trace-children=yes \
+ --leak-check=yes \
+ --suppressions="$SUPPRESSIONS" \
+ "${DEFAULT_TOOL_FLAGS[@]}" \
+ "$@"
diff --git a/tools/valgrind/valgrind_test.py b/tools/valgrind/valgrind_test.py
new file mode 100644
index 0000000..f568b5e
--- /dev/null
+++ b/tools/valgrind/valgrind_test.py
@@ -0,0 +1,1196 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs an exe through Valgrind and puts the intermediate files in a
+directory.
+"""
+
+import datetime
+import glob
+import logging
+import optparse
+import os
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+
+import common
+
+import drmemory_analyze
+import memcheck_analyze
+import tsan_analyze
+
+class BaseTool(object):
+ """Abstract class for running Valgrind-, PIN-based and other dynamic
+ error detector tools.
+
+ Always subclass this and implement ToolCommand with framework- and
+ tool-specific stuff.
+ """
+
+ def __init__(self):
+ temp_parent_dir = None
+ self.log_parent_dir = ""
+ if common.IsWindows():
+ # gpu process on Windows Vista+ runs at Low Integrity and can only
+ # write to certain directories (http://crbug.com/119131)
+ #
+ # TODO(bruening): if scripts die in middle and don't clean up temp
+ # dir, we'll accumulate files in profile dir. should remove
+ # really old files automatically.
+ profile = os.getenv("USERPROFILE")
+ if profile:
+ self.log_parent_dir = profile + "\\AppData\\LocalLow\\"
+ if os.path.exists(self.log_parent_dir):
+ self.log_parent_dir = common.NormalizeWindowsPath(self.log_parent_dir)
+ temp_parent_dir = self.log_parent_dir
+ # Generated every time (even when overridden)
+ self.temp_dir = tempfile.mkdtemp(prefix="vg_logs_", dir=temp_parent_dir)
+ self.log_dir = self.temp_dir # overridable by --keep_logs
+ self.option_parser_hooks = []
+ # TODO(glider): we may not need some of the env vars on some of the
+ # platforms.
+ self._env = {
+ "G_SLICE" : "always-malloc",
+ "NSS_DISABLE_UNLOAD" : "1",
+ "NSS_DISABLE_ARENA_FREE_LIST" : "1",
+ "GTEST_DEATH_TEST_USE_FORK": "1",
+ }
+
+ def ToolName(self):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def Analyze(self, check_sanity=False):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def RegisterOptionParserHook(self, hook):
+ # Frameworks and tools can add their own flags to the parser.
+ self.option_parser_hooks.append(hook)
+
+ def CreateOptionParser(self):
+ # Defines Chromium-specific flags.
+ self._parser = optparse.OptionParser("usage: %prog [options] <program to "
+ "test>")
+ self._parser.disable_interspersed_args()
+ self._parser.add_option("-t", "--timeout",
+ dest="timeout", metavar="TIMEOUT", default=10000,
+ help="timeout in seconds for the run (default 10000)")
+ self._parser.add_option("", "--build-dir",
+ help="the location of the compiler output")
+ self._parser.add_option("", "--source-dir",
+ help="path to top of source tree for this build"
+ "(used to normalize source paths in baseline)")
+ self._parser.add_option("", "--gtest_filter", default="",
+ help="which test case to run")
+ self._parser.add_option("", "--gtest_repeat",
+ help="how many times to run each test")
+ self._parser.add_option("", "--gtest_print_time", action="store_true",
+ default=False,
+ help="show how long each test takes")
+ self._parser.add_option("", "--ignore_exit_code", action="store_true",
+ default=False,
+ help="ignore exit code of the test "
+ "(e.g. test failures)")
+ self._parser.add_option("", "--keep_logs", action="store_true",
+ default=False,
+ help="store memory tool logs in the <tool>.logs "
+ "directory instead of /tmp.\nThis can be "
+ "useful for tool developers/maintainers.\n"
+ "Please note that the <tool>.logs directory "
+ "will be clobbered on tool startup.")
+
+ # To add framework- or tool-specific flags, please add a hook using
+ # RegisterOptionParserHook in the corresponding subclass.
+ # See ValgrindTool and ThreadSanitizerBase for examples.
+ for hook in self.option_parser_hooks:
+ hook(self, self._parser)
+
+ def ParseArgv(self, args):
+ self.CreateOptionParser()
+
+ # self._tool_flags will store those tool flags which we don't parse
+ # manually in this script.
+ self._tool_flags = []
+ known_args = []
+
+ """ We assume that the first argument not starting with "-" is a program
+ name and all the following flags should be passed to the program.
+ TODO(timurrrr): customize optparse instead
+ """
+ while len(args) > 0 and args[0][:1] == "-":
+ arg = args[0]
+ if (arg == "--"):
+ break
+ if self._parser.has_option(arg.split("=")[0]):
+ known_args += [arg]
+ else:
+ self._tool_flags += [arg]
+ args = args[1:]
+
+ if len(args) > 0:
+ known_args += args
+
+ self._options, self._args = self._parser.parse_args(known_args)
+
+ self._timeout = int(self._options.timeout)
+ self._source_dir = self._options.source_dir
+ if self._options.keep_logs:
+ # log_parent_dir has trailing slash if non-empty
+ self.log_dir = self.log_parent_dir + "%s.logs" % self.ToolName()
+ if os.path.exists(self.log_dir):
+ shutil.rmtree(self.log_dir)
+ os.mkdir(self.log_dir)
+ logging.info("Logs are in " + self.log_dir)
+
+ self._ignore_exit_code = self._options.ignore_exit_code
+ if self._options.gtest_filter != "":
+ self._args.append("--gtest_filter=%s" % self._options.gtest_filter)
+ if self._options.gtest_repeat:
+ self._args.append("--gtest_repeat=%s" % self._options.gtest_repeat)
+ if self._options.gtest_print_time:
+ self._args.append("--gtest_print_time")
+
+ return True
+
+ def Setup(self, args):
+ return self.ParseArgv(args)
+
+ def ToolCommand(self):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def Cleanup(self):
+ # You may override it in the tool-specific subclass
+ pass
+
+ def Execute(self):
+ """ Execute the app to be tested after successful instrumentation.
+ Full execution command-line provided by subclassers via proc."""
+ logging.info("starting execution...")
+ proc = self.ToolCommand()
+ for var in self._env:
+ common.PutEnvAndLog(var, self._env[var])
+ return common.RunSubprocess(proc, self._timeout)
+
+ def RunTestsAndAnalyze(self, check_sanity):
+ exec_retcode = self.Execute()
+ analyze_retcode = self.Analyze(check_sanity)
+
+ if analyze_retcode:
+ logging.error("Analyze failed.")
+ logging.info("Search the log for '[ERROR]' to see the error reports.")
+ return analyze_retcode
+
+ if exec_retcode:
+ if self._ignore_exit_code:
+ logging.info("Test execution failed, but the exit code is ignored.")
+ else:
+ logging.error("Test execution failed.")
+ return exec_retcode
+ else:
+ logging.info("Test execution completed successfully.")
+
+ if not analyze_retcode:
+ logging.info("Analysis completed successfully.")
+
+ return 0
+
+ def Main(self, args, check_sanity, min_runtime_in_seconds):
+ """Call this to run through the whole process: Setup, Execute, Analyze"""
+ start_time = datetime.datetime.now()
+ retcode = -1
+ if self.Setup(args):
+ retcode = self.RunTestsAndAnalyze(check_sanity)
+ shutil.rmtree(self.temp_dir, ignore_errors=True)
+ self.Cleanup()
+ else:
+ logging.error("Setup failed")
+ end_time = datetime.datetime.now()
+ runtime_in_seconds = (end_time - start_time).seconds
+ hours = runtime_in_seconds / 3600
+ seconds = runtime_in_seconds % 3600
+ minutes = seconds / 60
+ seconds = seconds % 60
+ logging.info("elapsed time: %02d:%02d:%02d" % (hours, minutes, seconds))
+ if (min_runtime_in_seconds > 0 and
+ runtime_in_seconds < min_runtime_in_seconds):
+ logging.error("Layout tests finished too quickly. "
+ "It should have taken at least %d seconds. "
+ "Something went wrong?" % min_runtime_in_seconds)
+ retcode = -1
+ return retcode
+
+ def Run(self, args, module, min_runtime_in_seconds=0):
+ MODULES_TO_SANITY_CHECK = ["base"]
+
+ # TODO(timurrrr): this is a temporary workaround for http://crbug.com/47844
+ if self.ToolName() == "tsan" and common.IsMac():
+ MODULES_TO_SANITY_CHECK = []
+
+ check_sanity = module in MODULES_TO_SANITY_CHECK
+ return self.Main(args, check_sanity, min_runtime_in_seconds)
+
+
+class ValgrindTool(BaseTool):
+ """Abstract class for running Valgrind tools.
+
+ Always subclass this and implement ToolSpecificFlags() and
+ ExtendOptionParser() for tool-specific stuff.
+ """
+ def __init__(self):
+ super(ValgrindTool, self).__init__()
+ self.RegisterOptionParserHook(ValgrindTool.ExtendOptionParser)
+
+ def UseXML(self):
+ # Override if tool prefers nonxml output
+ return True
+
+ def SelfContained(self):
+ # Returns true iff the tool is distibuted as a self-contained
+ # .sh script (e.g. ThreadSanitizer)
+ return False
+
+ def ExtendOptionParser(self, parser):
+ parser.add_option("", "--suppressions", default=[],
+ action="append",
+ help="path to a valgrind suppression file")
+ parser.add_option("", "--indirect", action="store_true",
+ default=False,
+ help="set BROWSER_WRAPPER rather than "
+ "running valgrind directly")
+ parser.add_option("", "--indirect_webkit_layout", action="store_true",
+ default=False,
+ help="set --wrapper rather than running Dr. Memory "
+ "directly.")
+ parser.add_option("", "--trace_children", action="store_true",
+ default=False,
+ help="also trace child processes")
+ parser.add_option("", "--num-callers",
+ dest="num_callers", default=30,
+ help="number of callers to show in stack traces")
+ parser.add_option("", "--generate_dsym", action="store_true",
+ default=False,
+ help="Generate .dSYM file on Mac if needed. Slow!")
+
+ def Setup(self, args):
+ if not BaseTool.Setup(self, args):
+ return False
+ if common.IsMac():
+ self.PrepareForTestMac()
+ return True
+
+ def PrepareForTestMac(self):
+ """Runs dsymutil if needed.
+
+ Valgrind for Mac OS X requires that debugging information be in a .dSYM
+ bundle generated by dsymutil. It is not currently able to chase DWARF
+ data into .o files like gdb does, so executables without .dSYM bundles or
+ with the Chromium-specific "fake_dsym" bundles generated by
+ build/mac/strip_save_dsym won't give source file and line number
+ information in valgrind.
+
+ This function will run dsymutil if the .dSYM bundle is missing or if
+ it looks like a fake_dsym. A non-fake dsym that already exists is assumed
+ to be up-to-date.
+ """
+ test_command = self._args[0]
+ dsym_bundle = self._args[0] + '.dSYM'
+ dsym_file = os.path.join(dsym_bundle, 'Contents', 'Resources', 'DWARF',
+ os.path.basename(test_command))
+ dsym_info_plist = os.path.join(dsym_bundle, 'Contents', 'Info.plist')
+
+ needs_dsymutil = True
+ saved_test_command = None
+
+ if os.path.exists(dsym_file) and os.path.exists(dsym_info_plist):
+ # Look for the special fake_dsym tag in dsym_info_plist.
+ dsym_info_plist_contents = open(dsym_info_plist).read()
+
+ if not re.search('^\s*<key>fake_dsym</key>$', dsym_info_plist_contents,
+ re.MULTILINE):
+ # fake_dsym is not set, this is a real .dSYM bundle produced by
+ # dsymutil. dsymutil does not need to be run again.
+ needs_dsymutil = False
+ else:
+ # fake_dsym is set. dsym_file is a copy of the original test_command
+ # before it was stripped. Copy it back to test_command so that
+ # dsymutil has unstripped input to work with. Move the stripped
+ # test_command out of the way, it will be restored when this is
+ # done.
+ saved_test_command = test_command + '.stripped'
+ os.rename(test_command, saved_test_command)
+ shutil.copyfile(dsym_file, test_command)
+ shutil.copymode(saved_test_command, test_command)
+
+ if needs_dsymutil:
+ if self._options.generate_dsym:
+ # Remove the .dSYM bundle if it exists.
+ shutil.rmtree(dsym_bundle, True)
+
+ dsymutil_command = ['dsymutil', test_command]
+
+ # dsymutil is crazy slow. Ideally we'd have a timeout here,
+ # but common.RunSubprocess' timeout is only checked
+ # after each line of output; dsymutil is silent
+ # until the end, and is then killed, which is silly.
+ common.RunSubprocess(dsymutil_command)
+
+ if saved_test_command:
+ os.rename(saved_test_command, test_command)
+ else:
+ logging.info("No real .dSYM for test_command. Line numbers will "
+ "not be shown. Either tell xcode to generate .dSYM "
+ "file, or use --generate_dsym option to this tool.")
+
+ def ToolCommand(self):
+ """Get the valgrind command to run."""
+ # Note that self._args begins with the exe to be run.
+ tool_name = self.ToolName()
+
+ # Construct the valgrind command.
+ if self.SelfContained():
+ proc = ["valgrind-%s.sh" % tool_name]
+ else:
+ if 'CHROME_VALGRIND' in os.environ:
+ path = os.path.join(os.environ['CHROME_VALGRIND'], "bin", "valgrind")
+ else:
+ path = "valgrind"
+ proc = [path, "--tool=%s" % tool_name]
+
+ proc += ["--num-callers=%i" % int(self._options.num_callers)]
+
+ if self._options.trace_children:
+ proc += ["--trace-children=yes"]
+ proc += ["--trace-children-skip='*dbus-daemon*'"]
+ proc += ["--trace-children-skip='*dbus-launch*'"]
+ proc += ["--trace-children-skip='*perl*'"]
+ proc += ["--trace-children-skip='*python*'"]
+ # This is really Python, but for some reason Valgrind follows it.
+ proc += ["--trace-children-skip='*lsb_release*'"]
+
+ proc += self.ToolSpecificFlags()
+ proc += self._tool_flags
+
+ suppression_count = 0
+ for suppression_file in self._options.suppressions:
+ if os.path.exists(suppression_file):
+ suppression_count += 1
+ proc += ["--suppressions=%s" % suppression_file]
+
+ if not suppression_count:
+ logging.warning("WARNING: NOT USING SUPPRESSIONS!")
+
+ logfilename = self.log_dir + ("/%s." % tool_name) + "%p"
+ if self.UseXML():
+ proc += ["--xml=yes", "--xml-file=" + logfilename]
+ else:
+ proc += ["--log-file=" + logfilename]
+
+ # The Valgrind command is constructed.
+
+ # Valgrind doesn't play nice with the Chrome sandbox. Empty this env var
+ # set by runtest.py to disable the sandbox.
+ if os.environ.get("CHROME_DEVEL_SANDBOX", None):
+ logging.info("Removing CHROME_DEVEL_SANDBOX from environment")
+ os.environ["CHROME_DEVEL_SANDBOX"] = ''
+
+ # Handle --indirect_webkit_layout separately.
+ if self._options.indirect_webkit_layout:
+ # Need to create the wrapper before modifying |proc|.
+ wrapper = self.CreateBrowserWrapper(proc, webkit=True)
+ proc = self._args
+ proc.append("--wrapper")
+ proc.append(wrapper)
+ return proc
+
+ if self._options.indirect:
+ wrapper = self.CreateBrowserWrapper(proc)
+ os.environ["BROWSER_WRAPPER"] = wrapper
+ logging.info('export BROWSER_WRAPPER=' + wrapper)
+ proc = []
+ proc += self._args
+ return proc
+
+ def ToolSpecificFlags(self):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def CreateBrowserWrapper(self, proc, webkit=False):
+ """The program being run invokes Python or something else that can't stand
+ to be valgrinded, and also invokes the Chrome browser. In this case, use a
+ magic wrapper to only valgrind the Chrome browser. Build the wrapper here.
+ Returns the path to the wrapper. It's up to the caller to use the wrapper
+ appropriately.
+ """
+ command = " ".join(proc)
+ # Add the PID of the browser wrapper to the logfile names so we can
+ # separate log files for different UI tests at the analyze stage.
+ command = command.replace("%p", "$$.%p")
+
+ (fd, indirect_fname) = tempfile.mkstemp(dir=self.log_dir,
+ prefix="browser_wrapper.",
+ text=True)
+ f = os.fdopen(fd, "w")
+ f.write('#!/bin/bash\n'
+ 'echo "Started Valgrind wrapper for this test, PID=$$" >&2\n')
+
+ f.write('DIR=`dirname $0`\n'
+ 'TESTNAME_FILE=$DIR/testcase.$$.name\n\n')
+
+ if webkit:
+ # Webkit layout_tests pass the URL as the first line of stdin.
+ f.write('tee $TESTNAME_FILE | %s "$@"\n' % command)
+ else:
+ # Try to get the test case name by looking at the program arguments.
+ # i.e. Chromium ui_tests used --test-name arg.
+ # TODO(timurrrr): This doesn't handle "--test-name Test.Name"
+ # TODO(timurrrr): ui_tests are dead. Where do we use the non-webkit
+ # wrapper now? browser_tests? What do they do?
+ f.write('for arg in $@\ndo\n'
+ ' if [[ "$arg" =~ --test-name=(.*) ]]\n then\n'
+ ' echo ${BASH_REMATCH[1]} >$TESTNAME_FILE\n'
+ ' fi\n'
+ 'done\n\n'
+ '%s "$@"\n' % command)
+
+ f.close()
+ os.chmod(indirect_fname, stat.S_IRUSR|stat.S_IXUSR)
+ return indirect_fname
+
+ def CreateAnalyzer(self):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def GetAnalyzeResults(self, check_sanity=False):
+ # Glob all the files in the log directory
+ filenames = glob.glob(self.log_dir + "/" + self.ToolName() + ".*")
+
+ # If we have browser wrapper, the logfiles are named as
+ # "toolname.wrapper_PID.valgrind_PID".
+ # Let's extract the list of wrapper_PIDs and name it ppids
+ ppids = set([int(f.split(".")[-2]) \
+ for f in filenames if re.search("\.[0-9]+\.[0-9]+$", f)])
+
+ analyzer = self.CreateAnalyzer()
+ if len(ppids) == 0:
+ # Fast path - no browser wrapper was set.
+ return analyzer.Report(filenames, None, check_sanity)
+
+ ret = 0
+ for ppid in ppids:
+ testcase_name = None
+ try:
+ f = open(self.log_dir + ("/testcase.%d.name" % ppid))
+ testcase_name = f.read().strip()
+ f.close()
+ wk_layout_prefix="third_party/WebKit/LayoutTests/"
+ wk_prefix_at = testcase_name.rfind(wk_layout_prefix)
+ if wk_prefix_at != -1:
+ testcase_name = testcase_name[wk_prefix_at + len(wk_layout_prefix):]
+ except IOError:
+ pass
+ print "====================================================="
+ print " Below is the report for valgrind wrapper PID=%d." % ppid
+ if testcase_name:
+ print " It was used while running the `%s` test." % testcase_name
+ else:
+ print " You can find the corresponding test"
+ print " by searching the above log for 'PID=%d'" % ppid
+ sys.stdout.flush()
+
+ ppid_filenames = [f for f in filenames \
+ if re.search("\.%d\.[0-9]+$" % ppid, f)]
+ # check_sanity won't work with browser wrappers
+ assert check_sanity == False
+ ret |= analyzer.Report(ppid_filenames, testcase_name)
+ print "====================================================="
+ sys.stdout.flush()
+
+ if ret != 0:
+ print ""
+ print "The Valgrind reports are grouped by test names."
+ print "Each test has its PID printed in the log when the test was run"
+ print "and at the beginning of its Valgrind report."
+ print "Hint: you can search for the reports by Ctrl+F -> `=#`"
+ sys.stdout.flush()
+
+ return ret
+
+
+# TODO(timurrrr): Split into a separate file.
+class Memcheck(ValgrindTool):
+ """Memcheck
+ Dynamic memory error detector for Linux & Mac
+
+ http://valgrind.org/info/tools.html#memcheck
+ """
+
+ def __init__(self):
+ super(Memcheck, self).__init__()
+ self.RegisterOptionParserHook(Memcheck.ExtendOptionParser)
+
+ def ToolName(self):
+ return "memcheck"
+
+ def ExtendOptionParser(self, parser):
+ parser.add_option("--leak-check", "--leak_check", type="string",
+ default="yes", # --leak-check=yes is equivalent of =full
+ help="perform leak checking at the end of the run")
+ parser.add_option("", "--show_all_leaks", action="store_true",
+ default=False,
+ help="also show less blatant leaks")
+ parser.add_option("", "--track_origins", action="store_true",
+ default=False,
+ help="Show whence uninitialized bytes came. 30% slower.")
+
+ def ToolSpecificFlags(self):
+ ret = ["--gen-suppressions=all", "--demangle=no"]
+ ret += ["--leak-check=%s" % self._options.leak_check]
+
+ if self._options.show_all_leaks:
+ ret += ["--show-reachable=yes"]
+ else:
+ ret += ["--show-possibly-lost=no"]
+
+ if self._options.track_origins:
+ ret += ["--track-origins=yes"]
+
+ # TODO(glider): this is a temporary workaround for http://crbug.com/51716
+ # Let's see whether it helps.
+ if common.IsMac():
+ ret += ["--smc-check=all"]
+
+ return ret
+
+ def CreateAnalyzer(self):
+ use_gdb = common.IsMac()
+ return memcheck_analyze.MemcheckAnalyzer(self._source_dir,
+ self._options.show_all_leaks,
+ use_gdb=use_gdb)
+
+ def Analyze(self, check_sanity=False):
+ ret = self.GetAnalyzeResults(check_sanity)
+
+ if ret != 0:
+ logging.info("Please see http://dev.chromium.org/developers/how-tos/"
+ "using-valgrind for the info on Memcheck/Valgrind")
+ return ret
+
+
+class PinTool(BaseTool):
+ """Abstract class for running PIN tools.
+
+ Always subclass this and implement ToolSpecificFlags() and
+ ExtendOptionParser() for tool-specific stuff.
+ """
+ def PrepareForTest(self):
+ pass
+
+ def ToolSpecificFlags(self):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def ToolCommand(self):
+ """Get the PIN command to run."""
+
+ # Construct the PIN command.
+ pin_cmd = os.getenv("PIN_COMMAND")
+ if not pin_cmd:
+ raise RuntimeError, "Please set PIN_COMMAND environment variable " \
+ "with the path to pin.exe"
+ proc = pin_cmd.split(" ")
+
+ proc += self.ToolSpecificFlags()
+
+ # The PIN command is constructed.
+
+ # PIN requires -- to separate PIN flags from the executable name.
+ # self._args begins with the exe to be run.
+ proc += ["--"]
+
+ proc += self._args
+ return proc
+
+
+class ThreadSanitizerBase(object):
+ """ThreadSanitizer
+ Dynamic data race detector for Linux, Mac and Windows.
+
+ http://code.google.com/p/data-race-test/wiki/ThreadSanitizer
+
+ Since TSan works on both Valgrind (Linux, Mac) and PIN (Windows), we need
+ to have multiple inheritance
+ """
+
+ INFO_MESSAGE="Please see http://dev.chromium.org/developers/how-tos/" \
+ "using-valgrind/threadsanitizer for the info on " \
+ "ThreadSanitizer"
+
+ def __init__(self):
+ super(ThreadSanitizerBase, self).__init__()
+ self.RegisterOptionParserHook(ThreadSanitizerBase.ExtendOptionParser)
+
+ def ToolName(self):
+ return "tsan"
+
+ def UseXML(self):
+ return False
+
+ def SelfContained(self):
+ return True
+
+ def ExtendOptionParser(self, parser):
+ parser.add_option("", "--hybrid", default="no",
+ dest="hybrid",
+ help="Finds more data races, may give false positive "
+ "reports unless the code is annotated")
+ parser.add_option("", "--announce-threads", default="yes",
+ dest="announce_threads",
+ help="Show the the stack traces of thread creation")
+ parser.add_option("", "--free-is-write", default="no",
+ dest="free_is_write",
+ help="Treat free()/operator delete as memory write. "
+ "This helps finding more data races, but (currently) "
+ "this may give false positive reports on std::string "
+ "internals, see http://code.google.com/p/data-race-test"
+ "/issues/detail?id=40")
+
+ def EvalBoolFlag(self, flag_value):
+ if (flag_value in ["1", "true", "yes"]):
+ return True
+ elif (flag_value in ["0", "false", "no"]):
+ return False
+ raise RuntimeError, "Can't parse flag value (%s)" % flag_value
+
+ def ToolSpecificFlags(self):
+ ret = []
+
+ ignore_files = ["ignores.txt"]
+ for platform_suffix in common.PlatformNames():
+ ignore_files.append("ignores_%s.txt" % platform_suffix)
+ for ignore_file in ignore_files:
+ fullname = os.path.join(self._source_dir,
+ "tools", "valgrind", "tsan", ignore_file)
+ if os.path.exists(fullname):
+ fullname = common.NormalizeWindowsPath(fullname)
+ ret += ["--ignore=%s" % fullname]
+
+ # This should shorten filepaths for local builds.
+ ret += ["--file-prefix-to-cut=%s/" % self._source_dir]
+
+ # This should shorten filepaths on bots.
+ ret += ["--file-prefix-to-cut=build/src/"]
+ ret += ["--file-prefix-to-cut=out/Release/../../"]
+
+ # This should shorten filepaths for functions intercepted in TSan.
+ ret += ["--file-prefix-to-cut=scripts/tsan/tsan/"]
+ ret += ["--file-prefix-to-cut=src/tsan/tsan/"]
+
+ ret += ["--gen-suppressions=true"]
+
+ if self.EvalBoolFlag(self._options.hybrid):
+ ret += ["--hybrid=yes"] # "no" is the default value for TSAN
+
+ if self.EvalBoolFlag(self._options.announce_threads):
+ ret += ["--announce-threads"]
+
+ if self.EvalBoolFlag(self._options.free_is_write):
+ ret += ["--free-is-write=yes"]
+ else:
+ ret += ["--free-is-write=no"]
+
+
+ # --show-pc flag is needed for parsing the error logs on Darwin.
+ if platform_suffix == 'mac':
+ ret += ["--show-pc=yes"]
+ ret += ["--show-pid=no"]
+
+ boring_callers = common.BoringCallers(mangled=False, use_re_wildcards=False)
+ # TODO(timurrrr): In fact, we want "starting from .." instead of "below .."
+ for bc in boring_callers:
+ ret += ["--cut_stack_below=%s" % bc]
+
+ return ret
+
+
+class ThreadSanitizerPosix(ThreadSanitizerBase, ValgrindTool):
+ def ToolSpecificFlags(self):
+ proc = ThreadSanitizerBase.ToolSpecificFlags(self)
+ # The -v flag is needed for printing the list of used suppressions and
+ # obtaining addresses for loaded shared libraries on Mac.
+ proc += ["-v"]
+ return proc
+
+ def CreateAnalyzer(self):
+ use_gdb = common.IsMac()
+ return tsan_analyze.TsanAnalyzer(use_gdb)
+
+ def Analyze(self, check_sanity=False):
+ ret = self.GetAnalyzeResults(check_sanity)
+
+ if ret != 0:
+ logging.info(self.INFO_MESSAGE)
+ return ret
+
+
+class ThreadSanitizerWindows(ThreadSanitizerBase, PinTool):
+
+ def __init__(self):
+ super(ThreadSanitizerWindows, self).__init__()
+ self.RegisterOptionParserHook(ThreadSanitizerWindows.ExtendOptionParser)
+
+ def ExtendOptionParser(self, parser):
+ parser.add_option("", "--suppressions", default=[],
+ action="append",
+ help="path to TSan suppression file")
+
+
+ def ToolSpecificFlags(self):
+ add_env = {
+ "CHROME_ALLOCATOR" : "WINHEAP",
+ }
+ for k,v in add_env.iteritems():
+ logging.info("export %s=%s", k, v)
+ os.putenv(k, v)
+
+ proc = ThreadSanitizerBase.ToolSpecificFlags(self)
+ # On PIN, ThreadSanitizer has its own suppression mechanism
+ # and --log-file flag which work exactly on Valgrind.
+ suppression_count = 0
+ for suppression_file in self._options.suppressions:
+ if os.path.exists(suppression_file):
+ suppression_count += 1
+ suppression_file = common.NormalizeWindowsPath(suppression_file)
+ proc += ["--suppressions=%s" % suppression_file]
+
+ if not suppression_count:
+ logging.warning("WARNING: NOT USING SUPPRESSIONS!")
+
+ logfilename = self.log_dir + "/tsan.%p"
+ proc += ["--log-file=" + common.NormalizeWindowsPath(logfilename)]
+
+ # TODO(timurrrr): Add flags for Valgrind trace children analog when we
+ # start running complex tests (e.g. UI) under TSan/Win.
+
+ return proc
+
+ def Analyze(self, check_sanity=False):
+ filenames = glob.glob(self.log_dir + "/tsan.*")
+ analyzer = tsan_analyze.TsanAnalyzer()
+ ret = analyzer.Report(filenames, None, check_sanity)
+ if ret != 0:
+ logging.info(self.INFO_MESSAGE)
+ return ret
+
+
+class DrMemory(BaseTool):
+ """Dr.Memory
+ Dynamic memory error detector for Windows.
+
+ http://dev.chromium.org/developers/how-tos/using-drmemory
+ It is not very mature at the moment, some things might not work properly.
+ """
+
+ def __init__(self, full_mode, pattern_mode):
+ super(DrMemory, self).__init__()
+ self.full_mode = full_mode
+ self.pattern_mode = pattern_mode
+ self.RegisterOptionParserHook(DrMemory.ExtendOptionParser)
+
+ def ToolName(self):
+ return "drmemory"
+
+ def ExtendOptionParser(self, parser):
+ parser.add_option("", "--suppressions", default=[],
+ action="append",
+ help="path to a drmemory suppression file")
+ parser.add_option("", "--follow_python", action="store_true",
+ default=False, dest="follow_python",
+ help="Monitor python child processes. If off, neither "
+ "python children nor any children of python children "
+ "will be monitored.")
+ parser.add_option("", "--indirect", action="store_true",
+ default=False,
+ help="set BROWSER_WRAPPER rather than "
+ "running Dr. Memory directly on the harness")
+ parser.add_option("", "--indirect_webkit_layout", action="store_true",
+ default=False,
+ help="set --wrapper rather than running valgrind "
+ "directly.")
+ parser.add_option("", "--use_debug", action="store_true",
+ default=False, dest="use_debug",
+ help="Run Dr. Memory debug build")
+ parser.add_option("", "--trace_children", action="store_true",
+ default=True,
+ help="TODO: default value differs from Valgrind")
+
+ def ToolCommand(self):
+ """Get the tool command to run."""
+ # WINHEAP is what Dr. Memory supports as there are issues w/ both
+ # jemalloc (http://code.google.com/p/drmemory/issues/detail?id=320) and
+ # tcmalloc (http://code.google.com/p/drmemory/issues/detail?id=314)
+ add_env = {
+ "CHROME_ALLOCATOR" : "WINHEAP",
+ "JSIMD_FORCEMMX" : "1", # http://code.google.com/p/drmemory/issues/detail?id=540
+ }
+ for k,v in add_env.iteritems():
+ logging.info("export %s=%s", k, v)
+ os.putenv(k, v)
+
+ drmem_cmd = os.getenv("DRMEMORY_COMMAND")
+ if not drmem_cmd:
+ raise RuntimeError, "Please set DRMEMORY_COMMAND environment variable " \
+ "with the path to drmemory.exe"
+ proc = drmem_cmd.split(" ")
+
+ # By default, don't run python (this will exclude python's children as well)
+ # to reduce runtime. We're not really interested in spending time finding
+ # bugs in the python implementation.
+ # With file-based config we must update the file every time, and
+ # it will affect simultaneous drmem uses by this user. While file-based
+ # config has many advantages, here we may want this-instance-only
+ # (http://code.google.com/p/drmemory/issues/detail?id=334).
+ drconfig_cmd = [ proc[0].replace("drmemory.exe", "drconfig.exe") ]
+ drconfig_cmd += ["-quiet"] # suppress errors about no 64-bit libs
+ run_drconfig = True
+ if self._options.follow_python:
+ logging.info("Following python children")
+ # -unreg fails if not already registered so query for that first
+ query_cmd = drconfig_cmd + ["-isreg", "python.exe"]
+ query_proc = subprocess.Popen(query_cmd, stdout=subprocess.PIPE,
+ shell=True)
+ (query_out, query_err) = query_proc.communicate()
+ if re.search("exe not registered", query_out):
+ run_drconfig = False # all set
+ else:
+ drconfig_cmd += ["-unreg", "python.exe"]
+ else:
+ logging.info("Excluding python children")
+ drconfig_cmd += ["-reg", "python.exe", "-norun"]
+ if run_drconfig:
+ drconfig_retcode = common.RunSubprocess(drconfig_cmd, self._timeout)
+ if drconfig_retcode:
+ logging.error("Configuring whether to follow python children failed " \
+ "with %d.", drconfig_retcode)
+ raise RuntimeError, "Configuring python children failed "
+
+ suppression_count = 0
+ supp_files = self._options.suppressions
+ if self.full_mode:
+ supp_files += [s.replace(".txt", "_full.txt") for s in supp_files]
+ for suppression_file in supp_files:
+ if os.path.exists(suppression_file):
+ suppression_count += 1
+ proc += ["-suppress", common.NormalizeWindowsPath(suppression_file)]
+
+ if not suppression_count:
+ logging.warning("WARNING: NOT USING SUPPRESSIONS!")
+
+ # Un-comment to dump Dr.Memory events on error
+ #proc += ["-dr_ops", "-dumpcore_mask", "-dr_ops", "0x8bff"]
+
+ # Un-comment and comment next line to debug Dr.Memory
+ #proc += ["-dr_ops", "-no_hide"]
+ #proc += ["-dr_ops", "-msgbox_mask", "-dr_ops", "15"]
+ #Proc += ["-dr_ops", "-stderr_mask", "-dr_ops", "15"]
+ # Ensure we see messages about Dr. Memory crashing!
+ proc += ["-dr_ops", "-stderr_mask", "-dr_ops", "12"]
+
+ if self._options.use_debug:
+ proc += ["-debug"]
+
+ proc += ["-logdir", common.NormalizeWindowsPath(self.log_dir)]
+
+ if self.log_parent_dir:
+ # gpu process on Windows Vista+ runs at Low Integrity and can only
+ # write to certain directories (http://crbug.com/119131)
+ symcache_dir = os.path.join(self.log_parent_dir, "drmemory.symcache")
+ elif self._options.build_dir:
+ # The other case is only possible with -t cmdline.
+ # Anyways, if we omit -symcache_dir the -logdir's value is used which
+ # should be fine.
+ symcache_dir = os.path.join(self._options.build_dir, "drmemory.symcache")
+ if symcache_dir:
+ if not os.path.exists(symcache_dir):
+ try:
+ os.mkdir(symcache_dir)
+ except OSError:
+ logging.warning("Can't create symcache dir?")
+ if os.path.exists(symcache_dir):
+ proc += ["-symcache_dir", common.NormalizeWindowsPath(symcache_dir)]
+
+ # Use -no_summary to suppress DrMemory's summary and init-time
+ # notifications. We generate our own with drmemory_analyze.py.
+ proc += ["-batch", "-no_summary"]
+
+ # Un-comment to disable interleaved output. Will also suppress error
+ # messages normally printed to stderr.
+ #proc += ["-quiet", "-no_results_to_stderr"]
+
+ proc += ["-callstack_max_frames", "40"]
+
+ # disable leak scan for now
+ proc += ["-no_count_leaks", "-no_leak_scan"]
+
+ # crbug.com/413215, no heap mismatch check for Windows release build binary
+ if common.IsWindows() and "Release" in self._options.build_dir:
+ proc += ["-no_check_delete_mismatch"]
+
+ # make callstacks easier to read
+ proc += ["-callstack_srcfile_prefix",
+ "build\\src,chromium\\src,crt_build\\self_x86"]
+ proc += ["-callstack_modname_hide",
+ "*drmemory*,chrome.dll"]
+
+ boring_callers = common.BoringCallers(mangled=False, use_re_wildcards=False)
+ # TODO(timurrrr): In fact, we want "starting from .." instead of "below .."
+ proc += ["-callstack_truncate_below", ",".join(boring_callers)]
+
+ if self.pattern_mode:
+ proc += ["-pattern", "0xf1fd", "-no_count_leaks", "-redzone_size", "0x20"]
+ elif not self.full_mode:
+ proc += ["-light"]
+
+ proc += self._tool_flags
+
+ # Dr.Memory requires -- to separate tool flags from the executable name.
+ proc += ["--"]
+
+ if self._options.indirect or self._options.indirect_webkit_layout:
+ # TODO(timurrrr): reuse for TSan on Windows
+ wrapper_path = os.path.join(self._source_dir,
+ "tools", "valgrind", "browser_wrapper_win.py")
+ wrapper = " ".join(["python", wrapper_path] + proc)
+ self.CreateBrowserWrapper(wrapper)
+ logging.info("browser wrapper = " + " ".join(proc))
+ if self._options.indirect_webkit_layout:
+ proc = self._args
+ # Layout tests want forward slashes.
+ wrapper = wrapper.replace('\\', '/')
+ proc += ["--wrapper", wrapper]
+ return proc
+ else:
+ proc = []
+
+ # Note that self._args begins with the name of the exe to be run.
+ self._args[0] = common.NormalizeWindowsPath(self._args[0])
+ proc += self._args
+ return proc
+
+ def CreateBrowserWrapper(self, command):
+ os.putenv("BROWSER_WRAPPER", command)
+
+ def Analyze(self, check_sanity=False):
+ # Use one analyzer for all the log files to avoid printing duplicate reports
+ #
+ # TODO(timurrrr): unify this with Valgrind and other tools when we have
+ # http://code.google.com/p/drmemory/issues/detail?id=684
+ analyzer = drmemory_analyze.DrMemoryAnalyzer()
+
+ ret = 0
+ if not self._options.indirect and not self._options.indirect_webkit_layout:
+ filenames = glob.glob(self.log_dir + "/*/results.txt")
+
+ ret = analyzer.Report(filenames, None, check_sanity)
+ else:
+ testcases = glob.glob(self.log_dir + "/testcase.*.logs")
+ # If we have browser wrapper, the per-test logdirs are named as
+ # "testcase.wrapper_PID.name".
+ # Let's extract the list of wrapper_PIDs and name it ppids.
+ # NOTE: ppids may contain '_', i.e. they are not ints!
+ ppids = set([f.split(".")[-2] for f in testcases])
+
+ for ppid in ppids:
+ testcase_name = None
+ try:
+ f = open("%s/testcase.%s.name" % (self.log_dir, ppid))
+ testcase_name = f.read().strip()
+ f.close()
+ except IOError:
+ pass
+ print "====================================================="
+ print " Below is the report for drmemory wrapper PID=%s." % ppid
+ if testcase_name:
+ print " It was used while running the `%s` test." % testcase_name
+ else:
+ # TODO(timurrrr): hm, the PID line is suppressed on Windows...
+ print " You can find the corresponding test"
+ print " by searching the above log for 'PID=%s'" % ppid
+ sys.stdout.flush()
+ ppid_filenames = glob.glob("%s/testcase.%s.logs/*/results.txt" %
+ (self.log_dir, ppid))
+ ret |= analyzer.Report(ppid_filenames, testcase_name, False)
+ print "====================================================="
+ sys.stdout.flush()
+
+ logging.info("Please see http://dev.chromium.org/developers/how-tos/"
+ "using-drmemory for the info on Dr. Memory")
+ return ret
+
+
+# RaceVerifier support. See
+# http://code.google.com/p/data-race-test/wiki/RaceVerifier for more details.
+class ThreadSanitizerRV1Analyzer(tsan_analyze.TsanAnalyzer):
+ """ TsanAnalyzer that saves race reports to a file. """
+
+ TMP_FILE = "rvlog.tmp"
+
+ def __init__(self, source_dir, use_gdb):
+ super(ThreadSanitizerRV1Analyzer, self).__init__(use_gdb)
+ self.out = open(self.TMP_FILE, "w")
+
+ def Report(self, files, testcase, check_sanity=False):
+ reports = self.GetReports(files)
+ for report in reports:
+ print >>self.out, report
+ if len(reports) > 0:
+ logging.info("RaceVerifier pass 1 of 2, found %i reports" % len(reports))
+ return -1
+ return 0
+
+ def CloseOutputFile(self):
+ self.out.close()
+
+
+class ThreadSanitizerRV1Mixin(object):
+ """RaceVerifier first pass.
+
+ Runs ThreadSanitizer as usual, but hides race reports and collects them in a
+ temporary file"""
+
+ def __init__(self):
+ super(ThreadSanitizerRV1Mixin, self).__init__()
+ self.RegisterOptionParserHook(ThreadSanitizerRV1Mixin.ExtendOptionParser)
+
+ def ExtendOptionParser(self, parser):
+ parser.set_defaults(hybrid="yes")
+
+ def CreateAnalyzer(self):
+ use_gdb = common.IsMac()
+ self.analyzer = ThreadSanitizerRV1Analyzer(self._source_dir, use_gdb)
+ return self.analyzer
+
+ def Cleanup(self):
+ super(ThreadSanitizerRV1Mixin, self).Cleanup()
+ self.analyzer.CloseOutputFile()
+
+
+class ThreadSanitizerRV2Mixin(object):
+ """RaceVerifier second pass."""
+
+ def __init__(self):
+ super(ThreadSanitizerRV2Mixin, self).__init__()
+ self.RegisterOptionParserHook(ThreadSanitizerRV2Mixin.ExtendOptionParser)
+
+ def ExtendOptionParser(self, parser):
+ parser.add_option("", "--race-verifier-sleep-ms",
+ dest="race_verifier_sleep_ms", default=10,
+ help="duration of RaceVerifier delays")
+
+ def ToolSpecificFlags(self):
+ proc = super(ThreadSanitizerRV2Mixin, self).ToolSpecificFlags()
+ proc += ['--race-verifier=%s' % ThreadSanitizerRV1Analyzer.TMP_FILE,
+ '--race-verifier-sleep-ms=%d' %
+ int(self._options.race_verifier_sleep_ms)]
+ return proc
+
+ def Cleanup(self):
+ super(ThreadSanitizerRV2Mixin, self).Cleanup()
+ os.unlink(ThreadSanitizerRV1Analyzer.TMP_FILE)
+
+
+class ThreadSanitizerRV1Posix(ThreadSanitizerRV1Mixin, ThreadSanitizerPosix):
+ pass
+
+
+class ThreadSanitizerRV2Posix(ThreadSanitizerRV2Mixin, ThreadSanitizerPosix):
+ pass
+
+
+class ThreadSanitizerRV1Windows(ThreadSanitizerRV1Mixin,
+ ThreadSanitizerWindows):
+ pass
+
+
+class ThreadSanitizerRV2Windows(ThreadSanitizerRV2Mixin,
+ ThreadSanitizerWindows):
+ pass
+
+
+class RaceVerifier(object):
+ """Runs tests under RaceVerifier/Valgrind."""
+
+ MORE_INFO_URL = "http://code.google.com/p/data-race-test/wiki/RaceVerifier"
+
+ def RV1Factory(self):
+ if common.IsWindows():
+ return ThreadSanitizerRV1Windows()
+ else:
+ return ThreadSanitizerRV1Posix()
+
+ def RV2Factory(self):
+ if common.IsWindows():
+ return ThreadSanitizerRV2Windows()
+ else:
+ return ThreadSanitizerRV2Posix()
+
+ def ToolName(self):
+ return "tsan"
+
+ def Main(self, args, check_sanity, min_runtime_in_seconds):
+ logging.info("Running a TSan + RaceVerifier test. For more information, " +
+ "see " + self.MORE_INFO_URL)
+ cmd1 = self.RV1Factory()
+ ret = cmd1.Main(args, check_sanity, min_runtime_in_seconds)
+ # Verify race reports, if there are any.
+ if ret == -1:
+ logging.info("Starting pass 2 of 2. Running the same binary in " +
+ "RaceVerifier mode to confirm possible race reports.")
+ logging.info("For more information, see " + self.MORE_INFO_URL)
+ cmd2 = self.RV2Factory()
+ ret = cmd2.Main(args, check_sanity, min_runtime_in_seconds)
+ else:
+ logging.info("No reports, skipping RaceVerifier second pass")
+ logging.info("Please see " + self.MORE_INFO_URL + " for more information " +
+ "on RaceVerifier")
+ return ret
+
+ def Run(self, args, module, min_runtime_in_seconds=0):
+ return self.Main(args, False, min_runtime_in_seconds)
+
+
+class ToolFactory:
+ def Create(self, tool_name):
+ if tool_name == "memcheck":
+ return Memcheck()
+ if tool_name == "tsan":
+ if common.IsWindows():
+ return ThreadSanitizerWindows()
+ else:
+ return ThreadSanitizerPosix()
+ if tool_name == "drmemory" or tool_name == "drmemory_light":
+ # TODO(timurrrr): remove support for "drmemory" when buildbots are
+ # switched to drmemory_light OR make drmemory==drmemory_full the default
+ # mode when the tool is mature enough.
+ return DrMemory(False, False)
+ if tool_name == "drmemory_full":
+ return DrMemory(True, False)
+ if tool_name == "drmemory_pattern":
+ return DrMemory(False, True)
+ if tool_name == "tsan_rv":
+ return RaceVerifier()
+ try:
+ platform_name = common.PlatformNames()[0]
+ except common.NotImplementedError:
+ platform_name = sys.platform + "(Unknown)"
+ raise RuntimeError, "Unknown tool (tool=%s, platform=%s)" % (tool_name,
+ platform_name)
+
+def CreateTool(tool):
+ return ToolFactory().Create(tool)
diff --git a/tools/valgrind/waterfall.sh b/tools/valgrind/waterfall.sh
new file mode 100755
index 0000000..1ae6733
--- /dev/null
+++ b/tools/valgrind/waterfall.sh
@@ -0,0 +1,222 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script can be used by waterfall sheriffs to fetch the status
+# of Valgrind bots on the memory waterfall and test if their local
+# suppressions match the reports on the waterfall.
+
+set -e
+
+THISDIR=$(dirname "${0}")
+LOGS_DIR=$THISDIR/waterfall.tmp
+WATERFALL_PAGE="http://build.chromium.org/p/chromium.memory/builders"
+WATERFALL_FYI_PAGE="http://build.chromium.org/p/chromium.memory.fyi/builders"
+
+download() {
+ # Download a file.
+ # $1 = URL to download
+ # $2 = Path to the output file
+ # {{{1
+ if [ "$(which curl)" != "" ]
+ then
+ if ! curl -s -o "$2" "$1"
+ then
+ echo
+ echo "Failed to download '$1'... aborting"
+ exit 1
+ fi
+ elif [ "$(which wget)" != "" ]
+ then
+ if ! wget "$1" -O "$2" -q
+ then
+ echo
+ echo "Failed to download '$1'... aborting"
+ exit 1
+ fi
+ else
+ echo "Need either curl or wget to download stuff... aborting"
+ exit 1
+ fi
+ # }}}
+}
+
+fetch_logs() {
+ # Fetch Valgrind logs from the waterfall {{{1
+
+ # TODO(timurrrr,maruel): use JSON, see
+ # http://build.chromium.org/p/chromium.memory/json/help
+
+ rm -rf "$LOGS_DIR" # Delete old logs
+ mkdir "$LOGS_DIR"
+
+ echo "Fetching the list of builders..."
+ download $1 "$LOGS_DIR/builders"
+ SLAVES=$(grep "<a href=\"builders\/" "$LOGS_DIR/builders" | \
+ grep 'td class="box"' | \
+ sed "s/.*<a href=\"builders\///" | sed "s/\".*//" | \
+ sort | uniq)
+
+ for S in $SLAVES
+ do
+ SLAVE_URL=$1/$S
+ SLAVE_NAME=$(echo $S | sed -e "s/%20/ /g" -e "s/%28/(/g" -e "s/%29/)/g")
+ echo -n "Fetching builds by slave '${SLAVE_NAME}'"
+ download $SLAVE_URL?numbuilds=${NUMBUILDS} "$LOGS_DIR/slave_${S}"
+
+ # We speed up the 'fetch' step by skipping the builds/tests which succeeded.
+ # TODO(timurrrr): OTOH, we won't be able to check
+ # if some suppression is not used anymore.
+ #
+ # The awk script here joins the lines ending with </td> to make it possible
+ # to find the failed builds.
+ LIST_OF_BUILDS=$(cat "$LOGS_DIR/slave_$S" | \
+ awk 'BEGIN { buf = "" }
+ {
+ if ($0 ~ /<\/td>/) { buf = (buf $0); }
+ else {
+ if (buf) { print buf; buf="" }
+ print $0
+ }
+ }
+ END {if (buf) print buf}' | \
+ grep "success\|failure" | \
+ head -n $NUMBUILDS | \
+ grep "failure" | \
+ grep -v "failed compile" | \
+ sed "s/.*\/builds\///" | sed "s/\".*//")
+
+ for BUILD in $LIST_OF_BUILDS
+ do
+ # We'll fetch a few tiny URLs now, let's use a temp file.
+ TMPFILE=$(mktemp -t memory_waterfall.XXXXXX)
+ download $SLAVE_URL/builds/$BUILD "$TMPFILE"
+
+ REPORT_FILE="$LOGS_DIR/report_${S}_${BUILD}"
+ rm -f $REPORT_FILE 2>/dev/null || true # make sure it doesn't exist
+
+ REPORT_URLS=$(grep -o "[0-9]\+/steps/memory.*/logs/[0-9A-F]\{16\}" \
+ "$TMPFILE" \
+ || true) # `true` is to succeed on empty output
+ FAILED_TESTS=$(grep -o "[0-9]\+/steps/memory.*/logs/[A-Za-z0-9_.]\+" \
+ "$TMPFILE" | grep -v "[0-9A-F]\{16\}" \
+ | grep -v "stdio" || true)
+
+ for REPORT in $REPORT_URLS
+ do
+ download "$SLAVE_URL/builds/$REPORT/text" "$TMPFILE"
+ echo "" >> "$TMPFILE" # Add a newline at the end
+ cat "$TMPFILE" | tr -d '\r' >> "$REPORT_FILE"
+ done
+
+ for FAILURE in $FAILED_TESTS
+ do
+ echo -n "FAILED:" >> "$REPORT_FILE"
+ echo "$FAILURE" | sed -e "s/.*\/logs\///" -e "s/\/.*//" \
+ >> "$REPORT_FILE"
+ done
+
+ rm "$TMPFILE"
+ echo $SLAVE_URL/builds/$BUILD >> "$REPORT_FILE"
+ done
+ echo " DONE"
+ done
+ # }}}
+}
+
+match_suppressions() {
+ PYTHONPATH=$THISDIR/../python/google \
+ python "$THISDIR/test_suppressions.py" $@ "$LOGS_DIR/report_"*
+}
+
+match_gtest_excludes() {
+ for PLATFORM in "Linux" "Chromium%20Mac" "Chromium%20OS" "Windows"
+ do
+ echo
+ echo "Test failures on ${PLATFORM}:" | sed "s/%20/ /"
+ grep -h -o "^FAILED:.*" -R "$LOGS_DIR"/*${PLATFORM}* | \
+ grep -v "FAILS\|FLAKY" | sort | uniq | \
+ sed -e "s/^FAILED://" -e "s/^/ /"
+ # Don't put any operators between "grep | sed" and "RESULT=$PIPESTATUS"
+ RESULT=$PIPESTATUS
+
+ if [ "$RESULT" == 1 ]
+ then
+ echo " None!"
+ else
+ echo
+ echo " Note: we don't check for failures already excluded locally yet"
+ echo " TODO(timurrrr): don't list tests we've already excluded locally"
+ fi
+ done
+ echo
+ echo "Note: we don't print FAILS/FLAKY tests and 1200s-timeout failures"
+}
+
+usage() {
+ cat <<EOF
+usage: $0 fetch|match options
+
+This script can be used by waterfall sheriffs to fetch the status
+of Valgrind bots on the memory waterfall and test if their local
+suppressions match the reports on the waterfall.
+
+OPTIONS:
+ -h Show this message
+ -n N Fetch N builds from each slave.
+
+COMMANDS:
+ fetch Fetch Valgrind logs from the memory waterfall
+ match Test the local suppression files against the downloaded logs
+
+EOF
+}
+
+NUMBUILDS=3
+
+CMD=$1
+if [ $# != 0 ]; then
+ shift
+fi
+
+# Arguments for "match" are handled in match_suppressions
+if [ "$CMD" != "match" ]; then
+ while getopts “hn:” OPTION
+ do
+ case $OPTION in
+ h)
+ usage
+ exit
+ ;;
+ n)
+ NUMBUILDS=$OPTARG
+ ;;
+ ?)
+ usage
+ exit
+ ;;
+ esac
+ done
+ shift $((OPTIND-1))
+ if [ $# != 0 ]; then
+ usage
+ exit 1
+ fi
+fi
+
+if [ "$CMD" = "fetch" ]; then
+ echo "Fetching $NUMBUILDS builds"
+ fetch_logs $WATERFALL_PAGE
+ fetch_logs $WATERFALL_FYI_PAGE
+elif [ "$CMD" = "match" ]; then
+ match_suppressions $@
+ match_gtest_excludes
+elif [ "$CMD" = "blame" ]; then
+ echo The blame command died of bitrot. If you need it, please reimplement it.
+ echo Reimplementation is blocked on http://crbug.com/82688
+else
+ usage
+ exit 1
+fi
diff --git a/tools/vim/chromium.ycm_extra_conf.py b/tools/vim/chromium.ycm_extra_conf.py
new file mode 100644
index 0000000..2e5318b
--- /dev/null
+++ b/tools/vim/chromium.ycm_extra_conf.py
@@ -0,0 +1,234 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Autocompletion config for YouCompleteMe in Chromium.
+#
+# USAGE:
+#
+# 1. Install YCM [https://github.com/Valloric/YouCompleteMe]
+# (Googlers should check out [go/ycm])
+#
+# 2. Point to this config file in your .vimrc:
+# let g:ycm_global_ycm_extra_conf =
+# '<chrome_depot>/src/tools/vim/chromium.ycm_extra_conf.py'
+#
+# 3. Profit
+#
+#
+# Usage notes:
+#
+# * You must use ninja & clang to build Chromium.
+#
+# * You must have run gyp_chromium and built Chromium recently.
+#
+#
+# Hacking notes:
+#
+# * The purpose of this script is to construct an accurate enough command line
+# for YCM to pass to clang so it can build and extract the symbols.
+#
+# * Right now, we only pull the -I and -D flags. That seems to be sufficient
+# for everything I've used it for.
+#
+# * That whole ninja & clang thing? We could support other configs if someone
+# were willing to write the correct commands and a parser.
+#
+# * This has only been tested on gPrecise.
+
+
+import os
+import os.path
+import subprocess
+
+
+# Flags from YCM's default config.
+flags = [
+'-DUSE_CLANG_COMPLETER',
+'-std=c++11',
+'-x',
+'c++',
+]
+
+
+def PathExists(*args):
+ return os.path.exists(os.path.join(*args))
+
+
+def FindChromeSrcFromFilename(filename):
+ """Searches for the root of the Chromium checkout.
+
+ Simply checks parent directories until it finds .gclient and src/.
+
+ Args:
+ filename: (String) Path to source file being edited.
+
+ Returns:
+ (String) Path of 'src/', or None if unable to find.
+ """
+ curdir = os.path.normpath(os.path.dirname(filename))
+ while not (PathExists(curdir, 'src') and PathExists(curdir, 'src', 'DEPS')
+ and (PathExists(curdir, '.gclient')
+ or PathExists(curdir, 'src', '.git'))):
+ nextdir = os.path.normpath(os.path.join(curdir, '..'))
+ if nextdir == curdir:
+ return None
+ curdir = nextdir
+ return os.path.join(curdir, 'src')
+
+
+# Largely copied from ninja-build.vim (guess_configuration)
+def GetNinjaOutputDirectory(chrome_root):
+ """Returns <chrome_root>/<output_dir>/(Release|Debug).
+
+ The configuration chosen is the one most recently generated/built. Detects
+ a custom output_dir specified by GYP_GENERATOR_FLAGS."""
+
+ output_dir = 'out'
+ generator_flags = os.getenv('GYP_GENERATOR_FLAGS', '').split(' ')
+ for flag in generator_flags:
+ name_value = flag.split('=', 1)
+ if len(name_value) == 2 and name_value[0] == 'output_dir':
+ output_dir = name_value[1]
+
+ root = os.path.join(chrome_root, output_dir)
+ debug_path = os.path.join(root, 'Debug')
+ release_path = os.path.join(root, 'Release')
+
+ def is_release_15s_newer(test_path):
+ try:
+ debug_mtime = os.path.getmtime(os.path.join(debug_path, test_path))
+ except os.error:
+ debug_mtime = 0
+ try:
+ rel_mtime = os.path.getmtime(os.path.join(release_path, test_path))
+ except os.error:
+ rel_mtime = 0
+ return rel_mtime - debug_mtime >= 15
+
+ if is_release_15s_newer('build.ninja') or is_release_15s_newer('protoc'):
+ return release_path
+ return debug_path
+
+
+def GetClangCommandFromNinjaForFilename(chrome_root, filename):
+ """Returns the command line to build |filename|.
+
+ Asks ninja how it would build the source file. If the specified file is a
+ header, tries to find its companion source file first.
+
+ Args:
+ chrome_root: (String) Path to src/.
+ filename: (String) Path to source file being edited.
+
+ Returns:
+ (List of Strings) Command line arguments for clang.
+ """
+ if not chrome_root:
+ return []
+
+ # Generally, everyone benefits from including Chromium's src/, because all of
+ # Chromium's includes are relative to that.
+ chrome_flags = ['-I' + os.path.join(chrome_root)]
+
+ # Version of Clang used to compile Chromium can be newer then version of
+ # libclang that YCM uses for completion. So it's possible that YCM's libclang
+ # doesn't know about some used warning options, which causes compilation
+ # warnings (and errors, because of '-Werror');
+ chrome_flags.append('-Wno-unknown-warning-option')
+
+ # Default file to get a reasonable approximation of the flags for a Blink
+ # file.
+ blink_root = os.path.join(chrome_root, 'third_party', 'WebKit')
+ default_blink_file = os.path.join(blink_root, 'Source', 'core', 'Init.cpp')
+
+ # Header files can't be built. Instead, try to match a header file to its
+ # corresponding source file.
+ if filename.endswith('.h'):
+ # Add config.h to Blink headers, which won't have it by default.
+ if filename.startswith(blink_root):
+ chrome_flags.append('-include')
+ chrome_flags.append(os.path.join(blink_root, 'Source', 'config.h'))
+
+ alternates = ['.cc', '.cpp']
+ for alt_extension in alternates:
+ alt_name = filename[:-2] + alt_extension
+ if os.path.exists(alt_name):
+ filename = alt_name
+ break
+ else:
+ if filename.startswith(blink_root):
+ # If this is a Blink file, we can at least try to get a reasonable
+ # approximation.
+ filename = default_blink_file
+ else:
+ # If this is a standalone .h file with no source, the best we can do is
+ # try to use the default flags.
+ return chrome_flags
+
+ out_dir = os.path.realpath(GetNinjaOutputDirectory(chrome_root))
+
+ # Ninja needs the path to the source file relative to the output build
+ # directory.
+ rel_filename = os.path.relpath(os.path.realpath(filename), out_dir)
+
+ # Ask ninja how it would build our source file.
+ p = subprocess.Popen(['ninja', '-v', '-C', out_dir, '-t',
+ 'commands', rel_filename + '^'],
+ stdout=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ if p.returncode:
+ return chrome_flags
+
+ # Ninja might execute several commands to build something. We want the last
+ # clang command.
+ clang_line = None
+ for line in reversed(stdout.split('\n')):
+ if 'clang' in line:
+ clang_line = line
+ break
+ else:
+ return chrome_flags
+
+ # Parse flags that are important for YCM's purposes.
+ for flag in clang_line.split(' '):
+ if flag.startswith('-I'):
+ # Relative paths need to be resolved, because they're relative to the
+ # output dir, not the source.
+ if flag[2] == '/':
+ chrome_flags.append(flag)
+ else:
+ abs_path = os.path.normpath(os.path.join(out_dir, flag[2:]))
+ chrome_flags.append('-I' + abs_path)
+ elif flag.startswith('-std'):
+ chrome_flags.append(flag)
+ elif flag.startswith('-') and flag[1] in 'DWFfmO':
+ if flag == '-Wno-deprecated-register' or flag == '-Wno-header-guard':
+ # These flags causes libclang (3.3) to crash. Remove it until things
+ # are fixed.
+ continue
+ chrome_flags.append(flag)
+
+ return chrome_flags
+
+
+def FlagsForFile(filename):
+ """This is the main entry point for YCM. Its interface is fixed.
+
+ Args:
+ filename: (String) Path to source file being edited.
+
+ Returns:
+ (Dictionary)
+ 'flags': (List of Strings) Command line flags.
+ 'do_cache': (Boolean) True if the result should be cached.
+ """
+ chrome_root = FindChromeSrcFromFilename(filename)
+ chrome_flags = GetClangCommandFromNinjaForFilename(chrome_root,
+ filename)
+ final_flags = flags + chrome_flags
+
+ return {
+ 'flags': final_flags,
+ 'do_cache': True
+ }
diff --git a/tools/vim/clang-format.vim b/tools/vim/clang-format.vim
new file mode 100644
index 0000000..982b8d2
--- /dev/null
+++ b/tools/vim/clang-format.vim
@@ -0,0 +1,19 @@
+" Copyright (c) 2014 The Chromium Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style license that can be
+" found in the LICENSE file.
+
+" Binds cmd-shift-i (on Mac) or ctrl-shift-i (elsewhere) to invoking
+" clang-format.py.
+" It will format the current selection (and if there's no selection, the
+" current line.)
+
+let s:script = expand('<sfile>:p:h') .
+ \'/../../buildtools/clang_format/script/clang-format.py'
+
+if has('mac')
+ execute "map <D-I> :pyf " . s:script . "<CR>"
+ execute "imap <D-I> <ESC>:pyf " . s:script . "<CR>i"
+else
+ execute "map <C-I> :pyf " . s:script . "<CR>"
+ execute "imap <C-I> <ESC>:pyf " . s:script . "<CR>i"
+endif
diff --git a/tools/vim/filetypes.vim b/tools/vim/filetypes.vim
new file mode 100644
index 0000000..3e7c8f9
--- /dev/null
+++ b/tools/vim/filetypes.vim
@@ -0,0 +1,9 @@
+" To get syntax highlighting and tab settings for gyp(i) and DEPS files,
+" add the following to your .vimrc file:
+" so /path/to/src/tools/vim/filetypes.vim
+
+augroup filetype
+ au! BufRead,BufNewFile *.gyp set filetype=python expandtab tabstop=2 shiftwidth=2
+ au! BufRead,BufNewFile *.gypi set filetype=python expandtab tabstop=2 shiftwidth=2
+ au! BufRead,BufNewFile DEPS set filetype=python expandtab tabstop=2 shiftwidth=2
+augroup END
diff --git a/tools/vim/ninja-build.vim b/tools/vim/ninja-build.vim
new file mode 100644
index 0000000..852c5a9
--- /dev/null
+++ b/tools/vim/ninja-build.vim
@@ -0,0 +1,129 @@
+" Copyright (c) 2012 The Chromium Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style license that can be
+" found in the LICENSE file.
+"
+" Adds a "Compile this file" function, using ninja. On Mac, binds Cmd-k to
+" this command. On Windows, Ctrl-F7 (which is the same as the VS default).
+" On Linux, <Leader>o, which is \o by default ("o"=creates .o files)
+"
+" Adds a "Build this target" function, using ninja. This is not bound
+" to any key by default, but can be used via the :CrBuild command.
+" It builds 'chrome' by default, but :CrBuild target1 target2 etc works as well.
+"
+" Requires that gyp has already generated build.ninja files, and that ninja is
+" in your path (which it is automatically if depot_tools is in your path).
+"
+" Add the following to your .vimrc file:
+" so /path/to/src/tools/vim/ninja-build.vim
+
+python << endpython
+import os
+import vim
+
+
+def path_to_current_buffer():
+ """Returns the absolute path of the current buffer."""
+ return vim.current.buffer.name
+
+
+def path_to_source_root():
+ """Returns the absolute path to the chromium source root."""
+ candidate = os.path.dirname(path_to_current_buffer())
+ # This is a list of files that need to identify the src directory. The shorter
+ # it is, the more likely it's wrong (checking for just "build/common.gypi"
+ # would find "src/v8" for files below "src/v8", as "src/v8/build/common.gypi"
+ # exists). The longer it is, the more likely it is to break when we rename
+ # directories.
+ fingerprints = ['chrome', 'net', 'v8', 'build', 'skia']
+ while candidate and not all(
+ [os.path.isdir(os.path.join(candidate, fp)) for fp in fingerprints]):
+ candidate = os.path.dirname(candidate)
+ return candidate
+
+
+def guess_configuration():
+ """Default to the configuration with either a newer build.ninja or a newer
+ protoc."""
+ root = os.path.join(path_to_source_root(), 'out')
+ def is_release_15s_newer(test_path):
+ try:
+ debug_mtime = os.path.getmtime(os.path.join(root, 'Debug', test_path))
+ except os.error:
+ debug_mtime = 0
+ try:
+ rel_mtime = os.path.getmtime(os.path.join(root, 'Release', test_path))
+ except os.error:
+ rel_mtime = 0
+ return rel_mtime - debug_mtime >= 15
+ configuration = 'Debug'
+ if is_release_15s_newer('build.ninja') or is_release_15s_newer('protoc'):
+ configuration = 'Release'
+ return configuration
+
+
+def compute_ninja_command_for_current_buffer(configuration=None):
+ """Returns the shell command to compile the file in the current buffer."""
+ if not configuration: configuration = guess_configuration()
+ build_dir = os.path.join(path_to_source_root(), 'out', configuration)
+
+ # ninja needs filepaths for the ^ syntax to be relative to the
+ # build directory.
+ file_to_build = path_to_current_buffer()
+ file_to_build = os.path.relpath(file_to_build, build_dir)
+
+ build_cmd = ' '.join(['ninja', '-C', build_dir, file_to_build + '^'])
+ if sys.platform == 'win32':
+ # Escape \ for Vim, and ^ for both Vim and shell.
+ build_cmd = build_cmd.replace('\\', '\\\\').replace('^', '^^^^')
+ vim.command('return "%s"' % build_cmd)
+
+
+def compute_ninja_command_for_targets(targets='', configuration=None):
+ if not configuration: configuration = guess_configuration()
+ build_dir = os.path.join(path_to_source_root(), 'out', configuration)
+ build_cmd = ' '.join(['ninja', '-C', build_dir, targets])
+ vim.command('return "%s"' % build_cmd)
+endpython
+
+fun! s:MakeWithCustomCommand(build_cmd)
+ let l:oldmakepgr = &makeprg
+ let &makeprg=a:build_cmd
+ silent make | cwindow
+ if !has('gui_running')
+ redraw!
+ endif
+ let &makeprg = l:oldmakepgr
+endfun
+
+fun! s:NinjaCommandForCurrentBuffer()
+ python compute_ninja_command_for_current_buffer()
+endfun
+
+fun! s:NinjaCommandForTargets(targets)
+ python compute_ninja_command_for_targets(vim.eval('a:targets'))
+endfun
+
+fun! CrCompileFile()
+ call s:MakeWithCustomCommand(s:NinjaCommandForCurrentBuffer())
+endfun
+
+fun! CrBuild(...)
+ let l:targets = a:0 > 0 ? join(a:000, ' ') : ''
+ if (l:targets !~ '\i')
+ let l:targets = 'chrome'
+ endif
+ call s:MakeWithCustomCommand(s:NinjaCommandForTargets(l:targets))
+endfun
+
+command! CrCompileFile call CrCompileFile()
+command! -nargs=* CrBuild call CrBuild(<q-args>)
+
+if has('mac')
+ map <D-k> :CrCompileFile<cr>
+ imap <D-k> <esc>:CrCompileFile<cr>
+elseif has('win32')
+ map <C-F7> :CrCompileFile<cr>
+ imap <C-F7> <esc>:CrCompileFile<cr>
+elseif has('unix')
+ map <Leader>o :CrCompileFile<cr>
+endif
diff --git a/tools/xdisplaycheck/BUILD.gn b/tools/xdisplaycheck/BUILD.gn
new file mode 100644
index 0000000..4a3ad3c
--- /dev/null
+++ b/tools/xdisplaycheck/BUILD.gn
@@ -0,0 +1,13 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+executable("xdisplaycheck") {
+ sources = [
+ "xdisplaycheck.cc",
+ ]
+
+ configs += [
+ "//build/config/linux:x11"
+ ]
+}
diff --git a/tools/xdisplaycheck/xdisplaycheck.cc b/tools/xdisplaycheck/xdisplaycheck.cc
new file mode 100644
index 0000000..6623153
--- /dev/null
+++ b/tools/xdisplaycheck/xdisplaycheck.cc
@@ -0,0 +1,119 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This is a small program that tries to connect to the X server. It
+// continually retries until it connects or 30 seconds pass. If it fails
+// to connect to the X server or fails to find needed functiona, it returns
+// an error code of -1.
+//
+// This is to help verify that a useful X server is available before we start
+// start running tests on the build bots.
+
+#include <errno.h>
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include <X11/Xlib.h>
+
+#if defined(USE_AURA)
+#include <X11/extensions/XInput2.h>
+#endif
+
+void Sleep(int duration_ms) {
+ struct timespec sleep_time, remaining;
+
+ // Contains the portion of duration_ms >= 1 sec.
+ sleep_time.tv_sec = duration_ms / 1000;
+ duration_ms -= sleep_time.tv_sec * 1000;
+
+ // Contains the portion of duration_ms < 1 sec.
+ sleep_time.tv_nsec = duration_ms * 1000 * 1000; // nanoseconds.
+
+ while (nanosleep(&sleep_time, &remaining) == -1 && errno == EINTR)
+ sleep_time = remaining;
+}
+
+class XScopedDisplay {
+ public:
+ XScopedDisplay() : display_(NULL) {}
+ ~XScopedDisplay() {
+ if (display_) XCloseDisplay(display_);
+ }
+
+ void set(Display* display) { display_ = display; }
+ Display* display() { return display_; }
+
+ private:
+ Display* display_;
+};
+
+int main(int argc, char* argv[]) {
+ XScopedDisplay scoped_display;
+ if (argv[1] && strcmp(argv[1], "--noserver") == 0) {
+ scoped_display.set(XOpenDisplay(NULL));
+ if (scoped_display.display()) {
+ fprintf(stderr, "Found unexpected connectable display %s\n",
+ XDisplayName(NULL));
+ }
+ // Return success when we got an unexpected display so that the code
+ // without the --noserver is the same, but slow, rather than inverted.
+ return !scoped_display.display();
+ }
+
+ int kNumTries = 78; // 78*77/2 * 10 = 30s of waiting
+ int tries;
+ for (tries = 0; tries < kNumTries; ++tries) {
+ scoped_display.set(XOpenDisplay(NULL));
+ if (scoped_display.display())
+ break;
+ Sleep(10 * tries);
+ }
+
+ if (!scoped_display.display()) {
+ fprintf(stderr, "Failed to connect to %s\n", XDisplayName(NULL));
+ return -1;
+ }
+
+ fprintf(stderr, "Connected after %d retries\n", tries);
+
+#if defined(USE_AURA)
+ // Check for XInput2
+ int opcode, event, err;
+ if (!XQueryExtension(scoped_display.display(), "XInputExtension", &opcode,
+ &event, &err)) {
+ fprintf(stderr,
+ "Failed to get XInputExtension on %s.\n", XDisplayName(NULL));
+ return -2;
+ }
+
+ int major = 2, minor = 0;
+ if (XIQueryVersion(scoped_display.display(), &major, &minor) == BadRequest) {
+ fprintf(stderr,
+ "Server does not have XInput2 on %s.\n", XDisplayName(NULL));
+ return -3;
+ }
+
+ // Ask for the list of devices. This can cause some Xvfb to crash.
+ int count = 0;
+ XIDeviceInfo* devices =
+ XIQueryDevice(scoped_display.display(), XIAllDevices, &count);
+ if (devices)
+ XIFreeDeviceInfo(devices);
+
+ fprintf(stderr,
+ "XInput2 verified initially sane on %s.\n", XDisplayName(NULL));
+#endif
+ return 0;
+}
+
+#if defined(LEAK_SANITIZER)
+// XOpenDisplay leaks memory if it takes more than one try to connect. This
+// causes LSan bots to fail. We don't care about memory leaks in xdisplaycheck
+// anyway, so just disable LSan completely.
+// This function isn't referenced from the executable itself. Make sure it isn't
+// stripped by the linker.
+__attribute__((used))
+__attribute__((visibility("default")))
+extern "C" int __lsan_is_turned_off() { return 1; }
+#endif
diff --git a/tools/xdisplaycheck/xdisplaycheck.gyp b/tools/xdisplaycheck/xdisplaycheck.gyp
new file mode 100644
index 0000000..7030b6f
--- /dev/null
+++ b/tools/xdisplaycheck/xdisplaycheck.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ # GN version: //tools/xdisplaycheck
+ 'target_name': 'xdisplaycheck',
+ 'type': 'executable',
+ 'dependencies': [
+ '../../build/linux/system.gyp:x11',
+ ],
+ 'sources': [
+ 'xdisplaycheck.cc',
+ ],
+ },
+ ],
+}