Squashed 'third_party/googletest/' content from commit f570b27

Change-Id: I80d9a022f8a149322a5c8d60dddc70ae74c22ce2
git-subtree-dir: third_party/googletest
git-subtree-split: f570b27e15a4e921d59495622a82277a3e1e8f87
diff --git a/googlemock/scripts/fuse_gmock_files.py b/googlemock/scripts/fuse_gmock_files.py
new file mode 100755
index 0000000..fc0baf7
--- /dev/null
+++ b/googlemock/scripts/fuse_gmock_files.py
@@ -0,0 +1,240 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""fuse_gmock_files.py v0.1.0
+Fuses Google Mock and Google Test source code into two .h files and a .cc file.
+
+SYNOPSIS
+       fuse_gmock_files.py [GMOCK_ROOT_DIR] OUTPUT_DIR
+
+       Scans GMOCK_ROOT_DIR for Google Mock and Google Test source
+       code, assuming Google Test is in the GMOCK_ROOT_DIR/gtest
+       sub-directory, and generates three files:
+       OUTPUT_DIR/gtest/gtest.h, OUTPUT_DIR/gmock/gmock.h, and
+       OUTPUT_DIR/gmock-gtest-all.cc.  Then you can build your tests
+       by adding OUTPUT_DIR to the include search path and linking
+       with OUTPUT_DIR/gmock-gtest-all.cc.  These three files contain
+       everything you need to use Google Mock.  Hence you can
+       "install" Google Mock by copying them to wherever you want.
+
+       GMOCK_ROOT_DIR can be omitted and defaults to the parent
+       directory of the directory holding this script.
+
+EXAMPLES
+       ./fuse_gmock_files.py fused_gmock
+       ./fuse_gmock_files.py path/to/unpacked/gmock fused_gmock
+
+This tool is experimental.  In particular, it assumes that there is no
+conditional inclusion of Google Mock or Google Test headers.  Please
+report any problems to googlemock@googlegroups.com.  You can read
+http://code.google.com/p/googlemock/wiki/CookBook for more
+information.
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import re
+import sets
+import sys
+
+# We assume that this file is in the scripts/ directory in the Google
+# Mock root directory.
+DEFAULT_GMOCK_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
+
+# We need to call into gtest/scripts/fuse_gtest_files.py.
+sys.path.append(os.path.join(DEFAULT_GMOCK_ROOT_DIR, 'gtest/scripts'))
+import fuse_gtest_files
+gtest = fuse_gtest_files
+
+# Regex for matching '#include "gmock/..."'.
+INCLUDE_GMOCK_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(gmock/.+)"')
+
+# Where to find the source seed files.
+GMOCK_H_SEED = 'include/gmock/gmock.h'
+GMOCK_ALL_CC_SEED = 'src/gmock-all.cc'
+
+# Where to put the generated files.
+GTEST_H_OUTPUT = 'gtest/gtest.h'
+GMOCK_H_OUTPUT = 'gmock/gmock.h'
+GMOCK_GTEST_ALL_CC_OUTPUT = 'gmock-gtest-all.cc'
+
+
+def GetGTestRootDir(gmock_root):
+  """Returns the root directory of Google Test."""
+
+  return os.path.join(gmock_root, 'gtest')
+
+
+def ValidateGMockRootDir(gmock_root):
+  """Makes sure gmock_root points to a valid gmock root directory.
+
+  The function aborts the program on failure.
+  """
+
+  gtest.ValidateGTestRootDir(GetGTestRootDir(gmock_root))
+  gtest.VerifyFileExists(gmock_root, GMOCK_H_SEED)
+  gtest.VerifyFileExists(gmock_root, GMOCK_ALL_CC_SEED)
+
+
+def ValidateOutputDir(output_dir):
+  """Makes sure output_dir points to a valid output directory.
+
+  The function aborts the program on failure.
+  """
+
+  gtest.VerifyOutputFile(output_dir, gtest.GTEST_H_OUTPUT)
+  gtest.VerifyOutputFile(output_dir, GMOCK_H_OUTPUT)
+  gtest.VerifyOutputFile(output_dir, GMOCK_GTEST_ALL_CC_OUTPUT)
+
+
+def FuseGMockH(gmock_root, output_dir):
+  """Scans folder gmock_root to generate gmock/gmock.h in output_dir."""
+
+  output_file = file(os.path.join(output_dir, GMOCK_H_OUTPUT), 'w')
+  processed_files = sets.Set()  # Holds all gmock headers we've processed.
+
+  def ProcessFile(gmock_header_path):
+    """Processes the given gmock header file."""
+
+    # We don't process the same header twice.
+    if gmock_header_path in processed_files:
+      return
+
+    processed_files.add(gmock_header_path)
+
+    # Reads each line in the given gmock header.
+    for line in file(os.path.join(gmock_root, gmock_header_path), 'r'):
+      m = INCLUDE_GMOCK_FILE_REGEX.match(line)
+      if m:
+        # It's '#include "gmock/..."' - let's process it recursively.
+        ProcessFile('include/' + m.group(1))
+      else:
+        m = gtest.INCLUDE_GTEST_FILE_REGEX.match(line)
+        if m:
+          # It's '#include "gtest/foo.h"'.  We translate it to
+          # "gtest/gtest.h", regardless of what foo is, since all
+          # gtest headers are fused into gtest/gtest.h.
+
+          # There is no need to #include gtest.h twice.
+          if not gtest.GTEST_H_SEED in processed_files:
+            processed_files.add(gtest.GTEST_H_SEED)
+            output_file.write('#include "%s"\n' % (gtest.GTEST_H_OUTPUT,))
+        else:
+          # Otherwise we copy the line unchanged to the output file.
+          output_file.write(line)
+
+  ProcessFile(GMOCK_H_SEED)
+  output_file.close()
+
+
+def FuseGMockAllCcToFile(gmock_root, output_file):
+  """Scans folder gmock_root to fuse gmock-all.cc into output_file."""
+
+  processed_files = sets.Set()
+
+  def ProcessFile(gmock_source_file):
+    """Processes the given gmock source file."""
+
+    # We don't process the same #included file twice.
+    if gmock_source_file in processed_files:
+      return
+
+    processed_files.add(gmock_source_file)
+
+    # Reads each line in the given gmock source file.
+    for line in file(os.path.join(gmock_root, gmock_source_file), 'r'):
+      m = INCLUDE_GMOCK_FILE_REGEX.match(line)
+      if m:
+        # It's '#include "gmock/foo.h"'.  We treat it as '#include
+        # "gmock/gmock.h"', as all other gmock headers are being fused
+        # into gmock.h and cannot be #included directly.
+
+        # There is no need to #include "gmock/gmock.h" more than once.
+        if not GMOCK_H_SEED in processed_files:
+          processed_files.add(GMOCK_H_SEED)
+          output_file.write('#include "%s"\n' % (GMOCK_H_OUTPUT,))
+      else:
+        m = gtest.INCLUDE_GTEST_FILE_REGEX.match(line)
+        if m:
+          # It's '#include "gtest/..."'.
+          # There is no need to #include gtest.h as it has been
+          # #included by gtest-all.cc.
+          pass
+        else:
+          m = gtest.INCLUDE_SRC_FILE_REGEX.match(line)
+          if m:
+            # It's '#include "src/foo"' - let's process it recursively.
+            ProcessFile(m.group(1))
+          else:
+            # Otherwise we copy the line unchanged to the output file.
+            output_file.write(line)
+
+  ProcessFile(GMOCK_ALL_CC_SEED)
+
+
+def FuseGMockGTestAllCc(gmock_root, output_dir):
+  """Scans folder gmock_root to generate gmock-gtest-all.cc in output_dir."""
+
+  output_file = file(os.path.join(output_dir, GMOCK_GTEST_ALL_CC_OUTPUT), 'w')
+  # First, fuse gtest-all.cc into gmock-gtest-all.cc.
+  gtest.FuseGTestAllCcToFile(GetGTestRootDir(gmock_root), output_file)
+  # Next, append fused gmock-all.cc to gmock-gtest-all.cc.
+  FuseGMockAllCcToFile(gmock_root, output_file)
+  output_file.close()
+
+
+def FuseGMock(gmock_root, output_dir):
+  """Fuses gtest.h, gmock.h, and gmock-gtest-all.h."""
+
+  ValidateGMockRootDir(gmock_root)
+  ValidateOutputDir(output_dir)
+
+  gtest.FuseGTestH(GetGTestRootDir(gmock_root), output_dir)
+  FuseGMockH(gmock_root, output_dir)
+  FuseGMockGTestAllCc(gmock_root, output_dir)
+
+
+def main():
+  argc = len(sys.argv)
+  if argc == 2:
+    # fuse_gmock_files.py OUTPUT_DIR
+    FuseGMock(DEFAULT_GMOCK_ROOT_DIR, sys.argv[1])
+  elif argc == 3:
+    # fuse_gmock_files.py GMOCK_ROOT_DIR OUTPUT_DIR
+    FuseGMock(sys.argv[1], sys.argv[2])
+  else:
+    print __doc__
+    sys.exit(1)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/googlemock/scripts/generator/LICENSE b/googlemock/scripts/generator/LICENSE
new file mode 100644
index 0000000..87ea063
--- /dev/null
+++ b/googlemock/scripts/generator/LICENSE
@@ -0,0 +1,203 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [2007] Neal Norwitz
+   Portions Copyright [2007] Google Inc.
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/googlemock/scripts/generator/README b/googlemock/scripts/generator/README
new file mode 100644
index 0000000..d6f9597
--- /dev/null
+++ b/googlemock/scripts/generator/README
@@ -0,0 +1,35 @@
+
+The Google Mock class generator is an application that is part of cppclean.
+For more information about cppclean, see the README.cppclean file or
+visit http://code.google.com/p/cppclean/
+
+cppclean requires Python 2.3.5 or later.  If you don't have Python installed
+on your system, you will also need to install it.  You can download Python
+from:  http://www.python.org/download/releases/
+
+To use the Google Mock class generator, you need to call it
+on the command line passing the header file and class for which you want
+to generate a Google Mock class.
+
+Make sure to install the scripts somewhere in your path.  Then you can
+run the program.
+
+  gmock_gen.py header-file.h [ClassName]...
+
+If no ClassNames are specified, all classes in the file are emitted.
+
+To change the indentation from the default of 2, set INDENT in
+the environment.  For example to use an indent of 4 spaces:
+
+INDENT=4 gmock_gen.py header-file.h ClassName
+
+This version was made from SVN revision 281 in the cppclean repository.
+
+Known Limitations
+-----------------
+Not all code will be generated properly.  For example, when mocking templated
+classes, the template information is lost.  You will need to add the template
+information manually.
+
+Not all permutations of using multiple pointers/references will be rendered
+properly.  These will also have to be fixed manually.
diff --git a/googlemock/scripts/generator/README.cppclean b/googlemock/scripts/generator/README.cppclean
new file mode 100644
index 0000000..65431b6
--- /dev/null
+++ b/googlemock/scripts/generator/README.cppclean
@@ -0,0 +1,115 @@
+Goal:
+-----
+  CppClean attempts to find problems in C++ source that slow development
+  in large code bases, for example various forms of unused code.
+  Unused code can be unused functions, methods, data members, types, etc
+  to unnecessary #include directives.  Unnecessary #includes can cause
+  considerable extra compiles increasing the edit-compile-run cycle.
+
+  The project home page is:   http://code.google.com/p/cppclean/
+
+
+Features:
+---------
+ * Find and print C++ language constructs: classes, methods, functions, etc.
+ * Find classes with virtual methods, no virtual destructor, and no bases
+ * Find global/static data that are potential problems when using threads
+ * Unnecessary forward class declarations
+ * Unnecessary function declarations
+ * Undeclared function definitions
+ * (planned) Find unnecessary header files #included
+   - No direct reference to anything in the header
+   - Header is unnecessary if classes were forward declared instead
+ * (planned) Source files that reference headers not directly #included,
+   ie, files that rely on a transitive #include from another header
+ * (planned) Unused members (private, protected, & public) methods and data
+ * (planned) Store AST in a SQL database so relationships can be queried
+
+AST is Abstract Syntax Tree, a representation of parsed source code.
+http://en.wikipedia.org/wiki/Abstract_syntax_tree
+
+
+System Requirements:
+--------------------
+ * Python 2.4 or later (2.3 probably works too)
+ * Works on Windows (untested), Mac OS X, and Unix
+
+
+How to Run:
+-----------
+  For all examples, it is assumed that cppclean resides in a directory called
+  /cppclean.
+
+  To print warnings for classes with virtual methods, no virtual destructor and
+  no base classes:
+
+      /cppclean/run.sh nonvirtual_dtors.py file1.h file2.h file3.cc ...
+
+  To print all the functions defined in header file(s):
+
+      /cppclean/run.sh functions.py file1.h file2.h ...
+
+  All the commands take multiple files on the command line.  Other programs
+  include: find_warnings, headers, methods, and types.  Some other programs
+  are available, but used primarily for debugging.
+
+  run.sh is a simple wrapper that sets PYTHONPATH to /cppclean and then
+  runs the program in /cppclean/cpp/PROGRAM.py.  There is currently
+  no equivalent for Windows.  Contributions for a run.bat file
+  would be greatly appreciated.
+
+
+How to Configure:
+-----------------
+  You can add a siteheaders.py file in /cppclean/cpp to configure where
+  to look for other headers (typically -I options passed to a compiler).
+  Currently two values are supported:  _TRANSITIVE and GetIncludeDirs.
+  _TRANSITIVE should be set to a boolean value (True or False) indicating
+  whether to transitively process all header files.  The default is False.
+
+  GetIncludeDirs is a function that takes a single argument and returns
+  a sequence of directories to include.  This can be a generator or
+  return a static list.
+
+      def GetIncludeDirs(filename):
+          return ['/some/path/with/other/headers']
+
+      # Here is a more complicated example.
+      def GetIncludeDirs(filename):
+          yield '/path1'
+          yield os.path.join('/path2', os.path.dirname(filename))
+          yield '/path3'
+
+
+How to Test:
+------------
+  For all examples, it is assumed that cppclean resides in a directory called
+  /cppclean.  The tests require
+
+  cd /cppclean
+  make test
+  # To generate expected results after a change:
+  make expected
+
+
+Current Status:
+---------------
+  The parser works pretty well for header files, parsing about 99% of Google's
+  header files.  Anything which inspects structure of C++ source files should
+  work reasonably well.  Function bodies are not transformed to an AST,
+  but left as tokens.  Much work is still needed on finding unused header files
+  and storing an AST in a database.
+
+
+Non-goals:
+----------
+ * Parsing all valid C++ source
+ * Handling invalid C++ source gracefully
+ * Compiling to machine code (or anything beyond an AST)
+
+
+Contact:
+--------
+  If you used cppclean, I would love to hear about your experiences
+  cppclean@googlegroups.com.  Even if you don't use cppclean, I'd like to
+  hear from you.  :-)  (You can contact me directly at:  nnorwitz@gmail.com)
diff --git a/googlemock/scripts/generator/cpp/__init__.py b/googlemock/scripts/generator/cpp/__init__.py
new file mode 100755
index 0000000..e69de29
--- /dev/null
+++ b/googlemock/scripts/generator/cpp/__init__.py
diff --git a/googlemock/scripts/generator/cpp/ast.py b/googlemock/scripts/generator/cpp/ast.py
new file mode 100755
index 0000000..11cbe91
--- /dev/null
+++ b/googlemock/scripts/generator/cpp/ast.py
@@ -0,0 +1,1733 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Neal Norwitz
+# Portions Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generate an Abstract Syntax Tree (AST) for C++."""
+
+__author__ = 'nnorwitz@google.com (Neal Norwitz)'
+
+
+# TODO:
+#  * Tokens should never be exported, need to convert to Nodes
+#    (return types, parameters, etc.)
+#  * Handle static class data for templatized classes
+#  * Handle casts (both C++ and C-style)
+#  * Handle conditions and loops (if/else, switch, for, while/do)
+#
+# TODO much, much later:
+#  * Handle #define
+#  * exceptions
+
+
+try:
+    # Python 3.x
+    import builtins
+except ImportError:
+    # Python 2.x
+    import __builtin__ as builtins
+
+import sys
+import traceback
+
+from cpp import keywords
+from cpp import tokenize
+from cpp import utils
+
+
+if not hasattr(builtins, 'reversed'):
+    # Support Python 2.3 and earlier.
+    def reversed(seq):
+        for i in range(len(seq)-1, -1, -1):
+            yield seq[i]
+
+if not hasattr(builtins, 'next'):
+    # Support Python 2.5 and earlier.
+    def next(obj):
+        return obj.next()
+
+
+VISIBILITY_PUBLIC, VISIBILITY_PROTECTED, VISIBILITY_PRIVATE = range(3)
+
+FUNCTION_NONE = 0x00
+FUNCTION_CONST = 0x01
+FUNCTION_VIRTUAL = 0x02
+FUNCTION_PURE_VIRTUAL = 0x04
+FUNCTION_CTOR = 0x08
+FUNCTION_DTOR = 0x10
+FUNCTION_ATTRIBUTE = 0x20
+FUNCTION_UNKNOWN_ANNOTATION = 0x40
+FUNCTION_THROW = 0x80
+FUNCTION_OVERRIDE = 0x100
+
+"""
+These are currently unused.  Should really handle these properly at some point.
+
+TYPE_MODIFIER_INLINE   = 0x010000
+TYPE_MODIFIER_EXTERN   = 0x020000
+TYPE_MODIFIER_STATIC   = 0x040000
+TYPE_MODIFIER_CONST    = 0x080000
+TYPE_MODIFIER_REGISTER = 0x100000
+TYPE_MODIFIER_VOLATILE = 0x200000
+TYPE_MODIFIER_MUTABLE  = 0x400000
+
+TYPE_MODIFIER_MAP = {
+    'inline': TYPE_MODIFIER_INLINE,
+    'extern': TYPE_MODIFIER_EXTERN,
+    'static': TYPE_MODIFIER_STATIC,
+    'const': TYPE_MODIFIER_CONST,
+    'register': TYPE_MODIFIER_REGISTER,
+    'volatile': TYPE_MODIFIER_VOLATILE,
+    'mutable': TYPE_MODIFIER_MUTABLE,
+    }
+"""
+
+_INTERNAL_TOKEN = 'internal'
+_NAMESPACE_POP = 'ns-pop'
+
+
+# TODO(nnorwitz): use this as a singleton for templated_types, etc
+# where we don't want to create a new empty dict each time.  It is also const.
+class _NullDict(object):
+    __contains__ = lambda self: False
+    keys = values = items = iterkeys = itervalues = iteritems = lambda self: ()
+
+
+# TODO(nnorwitz): move AST nodes into a separate module.
+class Node(object):
+    """Base AST node."""
+
+    def __init__(self, start, end):
+        self.start = start
+        self.end = end
+
+    def IsDeclaration(self):
+        """Returns bool if this node is a declaration."""
+        return False
+
+    def IsDefinition(self):
+        """Returns bool if this node is a definition."""
+        return False
+
+    def IsExportable(self):
+        """Returns bool if this node exportable from a header file."""
+        return False
+
+    def Requires(self, node):
+        """Does this AST node require the definition of the node passed in?"""
+        return False
+
+    def XXX__str__(self):
+        return self._StringHelper(self.__class__.__name__, '')
+
+    def _StringHelper(self, name, suffix):
+        if not utils.DEBUG:
+            return '%s(%s)' % (name, suffix)
+        return '%s(%d, %d, %s)' % (name, self.start, self.end, suffix)
+
+    def __repr__(self):
+        return str(self)
+
+
+class Define(Node):
+    def __init__(self, start, end, name, definition):
+        Node.__init__(self, start, end)
+        self.name = name
+        self.definition = definition
+
+    def __str__(self):
+        value = '%s %s' % (self.name, self.definition)
+        return self._StringHelper(self.__class__.__name__, value)
+
+
+class Include(Node):
+    def __init__(self, start, end, filename, system):
+        Node.__init__(self, start, end)
+        self.filename = filename
+        self.system = system
+
+    def __str__(self):
+        fmt = '"%s"'
+        if self.system:
+            fmt = '<%s>'
+        return self._StringHelper(self.__class__.__name__, fmt % self.filename)
+
+
+class Goto(Node):
+    def __init__(self, start, end, label):
+        Node.__init__(self, start, end)
+        self.label = label
+
+    def __str__(self):
+        return self._StringHelper(self.__class__.__name__, str(self.label))
+
+
+class Expr(Node):
+    def __init__(self, start, end, expr):
+        Node.__init__(self, start, end)
+        self.expr = expr
+
+    def Requires(self, node):
+        # TODO(nnorwitz): impl.
+        return False
+
+    def __str__(self):
+        return self._StringHelper(self.__class__.__name__, str(self.expr))
+
+
+class Return(Expr):
+    pass
+
+
+class Delete(Expr):
+    pass
+
+
+class Friend(Expr):
+    def __init__(self, start, end, expr, namespace):
+        Expr.__init__(self, start, end, expr)
+        self.namespace = namespace[:]
+
+
+class Using(Node):
+    def __init__(self, start, end, names):
+        Node.__init__(self, start, end)
+        self.names = names
+
+    def __str__(self):
+        return self._StringHelper(self.__class__.__name__, str(self.names))
+
+
+class Parameter(Node):
+    def __init__(self, start, end, name, parameter_type, default):
+        Node.__init__(self, start, end)
+        self.name = name
+        self.type = parameter_type
+        self.default = default
+
+    def Requires(self, node):
+        # TODO(nnorwitz): handle namespaces, etc.
+        return self.type.name == node.name
+
+    def __str__(self):
+        name = str(self.type)
+        suffix = '%s %s' % (name, self.name)
+        if self.default:
+            suffix += ' = ' + ''.join([d.name for d in self.default])
+        return self._StringHelper(self.__class__.__name__, suffix)
+
+
+class _GenericDeclaration(Node):
+    def __init__(self, start, end, name, namespace):
+        Node.__init__(self, start, end)
+        self.name = name
+        self.namespace = namespace[:]
+
+    def FullName(self):
+        prefix = ''
+        if self.namespace and self.namespace[-1]:
+            prefix = '::'.join(self.namespace) + '::'
+        return prefix + self.name
+
+    def _TypeStringHelper(self, suffix):
+        if self.namespace:
+            names = [n or '<anonymous>' for n in self.namespace]
+            suffix += ' in ' + '::'.join(names)
+        return self._StringHelper(self.__class__.__name__, suffix)
+
+
+# TODO(nnorwitz): merge with Parameter in some way?
+class VariableDeclaration(_GenericDeclaration):
+    def __init__(self, start, end, name, var_type, initial_value, namespace):
+        _GenericDeclaration.__init__(self, start, end, name, namespace)
+        self.type = var_type
+        self.initial_value = initial_value
+
+    def Requires(self, node):
+        # TODO(nnorwitz): handle namespaces, etc.
+        return self.type.name == node.name
+
+    def ToString(self):
+        """Return a string that tries to reconstitute the variable decl."""
+        suffix = '%s %s' % (self.type, self.name)
+        if self.initial_value:
+            suffix += ' = ' + self.initial_value
+        return suffix
+
+    def __str__(self):
+        return self._StringHelper(self.__class__.__name__, self.ToString())
+
+
+class Typedef(_GenericDeclaration):
+    def __init__(self, start, end, name, alias, namespace):
+        _GenericDeclaration.__init__(self, start, end, name, namespace)
+        self.alias = alias
+
+    def IsDefinition(self):
+        return True
+
+    def IsExportable(self):
+        return True
+
+    def Requires(self, node):
+        # TODO(nnorwitz): handle namespaces, etc.
+        name = node.name
+        for token in self.alias:
+            if token is not None and name == token.name:
+                return True
+        return False
+
+    def __str__(self):
+        suffix = '%s, %s' % (self.name, self.alias)
+        return self._TypeStringHelper(suffix)
+
+
+class _NestedType(_GenericDeclaration):
+    def __init__(self, start, end, name, fields, namespace):
+        _GenericDeclaration.__init__(self, start, end, name, namespace)
+        self.fields = fields
+
+    def IsDefinition(self):
+        return True
+
+    def IsExportable(self):
+        return True
+
+    def __str__(self):
+        suffix = '%s, {%s}' % (self.name, self.fields)
+        return self._TypeStringHelper(suffix)
+
+
+class Union(_NestedType):
+    pass
+
+
+class Enum(_NestedType):
+    pass
+
+
+class Class(_GenericDeclaration):
+    def __init__(self, start, end, name, bases, templated_types, body, namespace):
+        _GenericDeclaration.__init__(self, start, end, name, namespace)
+        self.bases = bases
+        self.body = body
+        self.templated_types = templated_types
+
+    def IsDeclaration(self):
+        return self.bases is None and self.body is None
+
+    def IsDefinition(self):
+        return not self.IsDeclaration()
+
+    def IsExportable(self):
+        return not self.IsDeclaration()
+
+    def Requires(self, node):
+        # TODO(nnorwitz): handle namespaces, etc.
+        if self.bases:
+            for token_list in self.bases:
+                # TODO(nnorwitz): bases are tokens, do name comparision.
+                for token in token_list:
+                    if token.name == node.name:
+                        return True
+        # TODO(nnorwitz): search in body too.
+        return False
+
+    def __str__(self):
+        name = self.name
+        if self.templated_types:
+            name += '<%s>' % self.templated_types
+        suffix = '%s, %s, %s' % (name, self.bases, self.body)
+        return self._TypeStringHelper(suffix)
+
+
+class Struct(Class):
+    pass
+
+
+class Function(_GenericDeclaration):
+    def __init__(self, start, end, name, return_type, parameters,
+                 modifiers, templated_types, body, namespace):
+        _GenericDeclaration.__init__(self, start, end, name, namespace)
+        converter = TypeConverter(namespace)
+        self.return_type = converter.CreateReturnType(return_type)
+        self.parameters = converter.ToParameters(parameters)
+        self.modifiers = modifiers
+        self.body = body
+        self.templated_types = templated_types
+
+    def IsDeclaration(self):
+        return self.body is None
+
+    def IsDefinition(self):
+        return self.body is not None
+
+    def IsExportable(self):
+        if self.return_type and 'static' in self.return_type.modifiers:
+            return False
+        return None not in self.namespace
+
+    def Requires(self, node):
+        if self.parameters:
+            # TODO(nnorwitz): parameters are tokens, do name comparision.
+            for p in self.parameters:
+                if p.name == node.name:
+                    return True
+        # TODO(nnorwitz): search in body too.
+        return False
+
+    def __str__(self):
+        # TODO(nnorwitz): add templated_types.
+        suffix = ('%s %s(%s), 0x%02x, %s' %
+                  (self.return_type, self.name, self.parameters,
+                   self.modifiers, self.body))
+        return self._TypeStringHelper(suffix)
+
+
+class Method(Function):
+    def __init__(self, start, end, name, in_class, return_type, parameters,
+                 modifiers, templated_types, body, namespace):
+        Function.__init__(self, start, end, name, return_type, parameters,
+                          modifiers, templated_types, body, namespace)
+        # TODO(nnorwitz): in_class could also be a namespace which can
+        # mess up finding functions properly.
+        self.in_class = in_class
+
+
+class Type(_GenericDeclaration):
+    """Type used for any variable (eg class, primitive, struct, etc)."""
+
+    def __init__(self, start, end, name, templated_types, modifiers,
+                 reference, pointer, array):
+        """
+        Args:
+          name: str name of main type
+          templated_types: [Class (Type?)] template type info between <>
+          modifiers: [str] type modifiers (keywords) eg, const, mutable, etc.
+          reference, pointer, array: bools
+        """
+        _GenericDeclaration.__init__(self, start, end, name, [])
+        self.templated_types = templated_types
+        if not name and modifiers:
+            self.name = modifiers.pop()
+        self.modifiers = modifiers
+        self.reference = reference
+        self.pointer = pointer
+        self.array = array
+
+    def __str__(self):
+        prefix = ''
+        if self.modifiers:
+            prefix = ' '.join(self.modifiers) + ' '
+        name = str(self.name)
+        if self.templated_types:
+            name += '<%s>' % self.templated_types
+        suffix = prefix + name
+        if self.reference:
+            suffix += '&'
+        if self.pointer:
+            suffix += '*'
+        if self.array:
+            suffix += '[]'
+        return self._TypeStringHelper(suffix)
+
+    # By definition, Is* are always False.  A Type can only exist in
+    # some sort of variable declaration, parameter, or return value.
+    def IsDeclaration(self):
+        return False
+
+    def IsDefinition(self):
+        return False
+
+    def IsExportable(self):
+        return False
+
+
+class TypeConverter(object):
+
+    def __init__(self, namespace_stack):
+        self.namespace_stack = namespace_stack
+
+    def _GetTemplateEnd(self, tokens, start):
+        count = 1
+        end = start
+        while 1:
+            token = tokens[end]
+            end += 1
+            if token.name == '<':
+                count += 1
+            elif token.name == '>':
+                count -= 1
+                if count == 0:
+                    break
+        return tokens[start:end-1], end
+
+    def ToType(self, tokens):
+        """Convert [Token,...] to [Class(...), ] useful for base classes.
+        For example, code like class Foo : public Bar<x, y> { ... };
+        the "Bar<x, y>" portion gets converted to an AST.
+
+        Returns:
+          [Class(...), ...]
+        """
+        result = []
+        name_tokens = []
+        reference = pointer = array = False
+
+        def AddType(templated_types):
+            # Partition tokens into name and modifier tokens.
+            names = []
+            modifiers = []
+            for t in name_tokens:
+                if keywords.IsKeyword(t.name):
+                    modifiers.append(t.name)
+                else:
+                    names.append(t.name)
+            name = ''.join(names)
+            if name_tokens:
+                result.append(Type(name_tokens[0].start, name_tokens[-1].end,
+                                   name, templated_types, modifiers,
+                                   reference, pointer, array))
+            del name_tokens[:]
+
+        i = 0
+        end = len(tokens)
+        while i < end:
+            token = tokens[i]
+            if token.name == '<':
+                new_tokens, new_end = self._GetTemplateEnd(tokens, i+1)
+                AddType(self.ToType(new_tokens))
+                # If there is a comma after the template, we need to consume
+                # that here otherwise it becomes part of the name.
+                i = new_end
+                reference = pointer = array = False
+            elif token.name == ',':
+                AddType([])
+                reference = pointer = array = False
+            elif token.name == '*':
+                pointer = True
+            elif token.name == '&':
+                reference = True
+            elif token.name == '[':
+               pointer = True
+            elif token.name == ']':
+                pass
+            else:
+                name_tokens.append(token)
+            i += 1
+
+        if name_tokens:
+            # No '<' in the tokens, just a simple name and no template.
+            AddType([])
+        return result
+
+    def DeclarationToParts(self, parts, needs_name_removed):
+        name = None
+        default = []
+        if needs_name_removed:
+            # Handle default (initial) values properly.
+            for i, t in enumerate(parts):
+                if t.name == '=':
+                    default = parts[i+1:]
+                    name = parts[i-1].name
+                    if name == ']' and parts[i-2].name == '[':
+                        name = parts[i-3].name
+                        i -= 1
+                    parts = parts[:i-1]
+                    break
+            else:
+                if parts[-1].token_type == tokenize.NAME:
+                    name = parts.pop().name
+                else:
+                    # TODO(nnorwitz): this is a hack that happens for code like
+                    # Register(Foo<T>); where it thinks this is a function call
+                    # but it's actually a declaration.
+                    name = '???'
+        modifiers = []
+        type_name = []
+        other_tokens = []
+        templated_types = []
+        i = 0
+        end = len(parts)
+        while i < end:
+            p = parts[i]
+            if keywords.IsKeyword(p.name):
+                modifiers.append(p.name)
+            elif p.name == '<':
+                templated_tokens, new_end = self._GetTemplateEnd(parts, i+1)
+                templated_types = self.ToType(templated_tokens)
+                i = new_end - 1
+                # Don't add a spurious :: to data members being initialized.
+                next_index = i + 1
+                if next_index < end and parts[next_index].name == '::':
+                    i += 1
+            elif p.name in ('[', ']', '='):
+                # These are handled elsewhere.
+                other_tokens.append(p)
+            elif p.name not in ('*', '&', '>'):
+                # Ensure that names have a space between them.
+                if (type_name and type_name[-1].token_type == tokenize.NAME and
+                    p.token_type == tokenize.NAME):
+                    type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
+                type_name.append(p)
+            else:
+                other_tokens.append(p)
+            i += 1
+        type_name = ''.join([t.name for t in type_name])
+        return name, type_name, templated_types, modifiers, default, other_tokens
+
+    def ToParameters(self, tokens):
+        if not tokens:
+            return []
+
+        result = []
+        name = type_name = ''
+        type_modifiers = []
+        pointer = reference = array = False
+        first_token = None
+        default = []
+
+        def AddParameter(end):
+            if default:
+                del default[0]  # Remove flag.
+            parts = self.DeclarationToParts(type_modifiers, True)
+            (name, type_name, templated_types, modifiers,
+             unused_default, unused_other_tokens) = parts
+            parameter_type = Type(first_token.start, first_token.end,
+                                  type_name, templated_types, modifiers,
+                                  reference, pointer, array)
+            p = Parameter(first_token.start, end, name,
+                          parameter_type, default)
+            result.append(p)
+
+        template_count = 0
+        for s in tokens:
+            if not first_token:
+                first_token = s
+            if s.name == '<':
+                template_count += 1
+            elif s.name == '>':
+                template_count -= 1
+            if template_count > 0:
+                type_modifiers.append(s)
+                continue
+
+            if s.name == ',':
+                AddParameter(s.start)
+                name = type_name = ''
+                type_modifiers = []
+                pointer = reference = array = False
+                first_token = None
+                default = []
+            elif s.name == '*':
+                pointer = True
+            elif s.name == '&':
+                reference = True
+            elif s.name == '[':
+                array = True
+            elif s.name == ']':
+                pass  # Just don't add to type_modifiers.
+            elif s.name == '=':
+                # Got a default value.  Add any value (None) as a flag.
+                default.append(None)
+            elif default:
+                default.append(s)
+            else:
+                type_modifiers.append(s)
+        AddParameter(tokens[-1].end)
+        return result
+
+    def CreateReturnType(self, return_type_seq):
+        if not return_type_seq:
+            return None
+        start = return_type_seq[0].start
+        end = return_type_seq[-1].end
+        _, name, templated_types, modifiers, default, other_tokens = \
+           self.DeclarationToParts(return_type_seq, False)
+        names = [n.name for n in other_tokens]
+        reference = '&' in names
+        pointer = '*' in names
+        array = '[' in names
+        return Type(start, end, name, templated_types, modifiers,
+                    reference, pointer, array)
+
+    def GetTemplateIndices(self, names):
+        # names is a list of strings.
+        start = names.index('<')
+        end = len(names) - 1
+        while end > 0:
+            if names[end] == '>':
+                break
+            end -= 1
+        return start, end+1
+
+class AstBuilder(object):
+    def __init__(self, token_stream, filename, in_class='', visibility=None,
+                 namespace_stack=[]):
+        self.tokens = token_stream
+        self.filename = filename
+        # TODO(nnorwitz): use a better data structure (deque) for the queue.
+        # Switching directions of the "queue" improved perf by about 25%.
+        # Using a deque should be even better since we access from both sides.
+        self.token_queue = []
+        self.namespace_stack = namespace_stack[:]
+        self.in_class = in_class
+        if in_class is None:
+            self.in_class_name_only = None
+        else:
+            self.in_class_name_only = in_class.split('::')[-1]
+        self.visibility = visibility
+        self.in_function = False
+        self.current_token = None
+        # Keep the state whether we are currently handling a typedef or not.
+        self._handling_typedef = False
+
+        self.converter = TypeConverter(self.namespace_stack)
+
+    def HandleError(self, msg, token):
+        printable_queue = list(reversed(self.token_queue[-20:]))
+        sys.stderr.write('Got %s in %s @ %s %s\n' %
+                         (msg, self.filename, token, printable_queue))
+
+    def Generate(self):
+        while 1:
+            token = self._GetNextToken()
+            if not token:
+                break
+
+            # Get the next token.
+            self.current_token = token
+
+            # Dispatch on the next token type.
+            if token.token_type == _INTERNAL_TOKEN:
+                if token.name == _NAMESPACE_POP:
+                    self.namespace_stack.pop()
+                continue
+
+            try:
+                result = self._GenerateOne(token)
+                if result is not None:
+                    yield result
+            except:
+                self.HandleError('exception', token)
+                raise
+
+    def _CreateVariable(self, pos_token, name, type_name, type_modifiers,
+                        ref_pointer_name_seq, templated_types, value=None):
+        reference = '&' in ref_pointer_name_seq
+        pointer = '*' in ref_pointer_name_seq
+        array = '[' in ref_pointer_name_seq
+        var_type = Type(pos_token.start, pos_token.end, type_name,
+                        templated_types, type_modifiers,
+                        reference, pointer, array)
+        return VariableDeclaration(pos_token.start, pos_token.end,
+                                   name, var_type, value, self.namespace_stack)
+
+    def _GenerateOne(self, token):
+        if token.token_type == tokenize.NAME:
+            if (keywords.IsKeyword(token.name) and
+                not keywords.IsBuiltinType(token.name)):
+                method = getattr(self, 'handle_' + token.name)
+                return method()
+            elif token.name == self.in_class_name_only:
+                # The token name is the same as the class, must be a ctor if
+                # there is a paren.  Otherwise, it's the return type.
+                # Peek ahead to get the next token to figure out which.
+                next = self._GetNextToken()
+                self._AddBackToken(next)
+                if next.token_type == tokenize.SYNTAX and next.name == '(':
+                    return self._GetMethod([token], FUNCTION_CTOR, None, True)
+                # Fall through--handle like any other method.
+
+            # Handle data or function declaration/definition.
+            syntax = tokenize.SYNTAX
+            temp_tokens, last_token = \
+                self._GetVarTokensUpTo(syntax, '(', ';', '{', '[')
+            temp_tokens.insert(0, token)
+            if last_token.name == '(':
+                # If there is an assignment before the paren,
+                # this is an expression, not a method.
+                expr = bool([e for e in temp_tokens if e.name == '='])
+                if expr:
+                    new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';')
+                    temp_tokens.append(last_token)
+                    temp_tokens.extend(new_temp)
+                    last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0)
+
+            if last_token.name == '[':
+                # Handle array, this isn't a method, unless it's an operator.
+                # TODO(nnorwitz): keep the size somewhere.
+                # unused_size = self._GetTokensUpTo(tokenize.SYNTAX, ']')
+                temp_tokens.append(last_token)
+                if temp_tokens[-2].name == 'operator':
+                    temp_tokens.append(self._GetNextToken())
+                else:
+                    temp_tokens2, last_token = \
+                        self._GetVarTokensUpTo(tokenize.SYNTAX, ';')
+                    temp_tokens.extend(temp_tokens2)
+
+            if last_token.name == ';':
+                # Handle data, this isn't a method.
+                parts = self.converter.DeclarationToParts(temp_tokens, True)
+                (name, type_name, templated_types, modifiers, default,
+                 unused_other_tokens) = parts
+
+                t0 = temp_tokens[0]
+                names = [t.name for t in temp_tokens]
+                if templated_types:
+                    start, end = self.converter.GetTemplateIndices(names)
+                    names = names[:start] + names[end:]
+                default = ''.join([t.name for t in default])
+                return self._CreateVariable(t0, name, type_name, modifiers,
+                                            names, templated_types, default)
+            if last_token.name == '{':
+                self._AddBackTokens(temp_tokens[1:])
+                self._AddBackToken(last_token)
+                method_name = temp_tokens[0].name
+                method = getattr(self, 'handle_' + method_name, None)
+                if not method:
+                    # Must be declaring a variable.
+                    # TODO(nnorwitz): handle the declaration.
+                    return None
+                return method()
+            return self._GetMethod(temp_tokens, 0, None, False)
+        elif token.token_type == tokenize.SYNTAX:
+            if token.name == '~' and self.in_class:
+                # Must be a dtor (probably not in method body).
+                token = self._GetNextToken()
+                # self.in_class can contain A::Name, but the dtor will only
+                # be Name.  Make sure to compare against the right value.
+                if (token.token_type == tokenize.NAME and
+                    token.name == self.in_class_name_only):
+                    return self._GetMethod([token], FUNCTION_DTOR, None, True)
+            # TODO(nnorwitz): handle a lot more syntax.
+        elif token.token_type == tokenize.PREPROCESSOR:
+            # TODO(nnorwitz): handle more preprocessor directives.
+            # token starts with a #, so remove it and strip whitespace.
+            name = token.name[1:].lstrip()
+            if name.startswith('include'):
+                # Remove "include".
+                name = name[7:].strip()
+                assert name
+                # Handle #include \<newline> "header-on-second-line.h".
+                if name.startswith('\\'):
+                    name = name[1:].strip()
+                assert name[0] in '<"', token
+                assert name[-1] in '>"', token
+                system = name[0] == '<'
+                filename = name[1:-1]
+                return Include(token.start, token.end, filename, system)
+            if name.startswith('define'):
+                # Remove "define".
+                name = name[6:].strip()
+                assert name
+                value = ''
+                for i, c in enumerate(name):
+                    if c.isspace():
+                        value = name[i:].lstrip()
+                        name = name[:i]
+                        break
+                return Define(token.start, token.end, name, value)
+            if name.startswith('if') and name[2:3].isspace():
+                condition = name[3:].strip()
+                if condition.startswith('0') or condition.startswith('(0)'):
+                    self._SkipIf0Blocks()
+        return None
+
+    def _GetTokensUpTo(self, expected_token_type, expected_token):
+        return self._GetVarTokensUpTo(expected_token_type, expected_token)[0]
+
+    def _GetVarTokensUpTo(self, expected_token_type, *expected_tokens):
+        last_token = self._GetNextToken()
+        tokens = []
+        while (last_token.token_type != expected_token_type or
+               last_token.name not in expected_tokens):
+            tokens.append(last_token)
+            last_token = self._GetNextToken()
+        return tokens, last_token
+
+    # TODO(nnorwitz): remove _IgnoreUpTo() it shouldn't be necesary.
+    def _IgnoreUpTo(self, token_type, token):
+        unused_tokens = self._GetTokensUpTo(token_type, token)
+
+    def _SkipIf0Blocks(self):
+        count = 1
+        while 1:
+            token = self._GetNextToken()
+            if token.token_type != tokenize.PREPROCESSOR:
+                continue
+
+            name = token.name[1:].lstrip()
+            if name.startswith('endif'):
+                count -= 1
+                if count == 0:
+                    break
+            elif name.startswith('if'):
+                count += 1
+
+    def _GetMatchingChar(self, open_paren, close_paren, GetNextToken=None):
+        if GetNextToken is None:
+            GetNextToken = self._GetNextToken
+        # Assumes the current token is open_paren and we will consume
+        # and return up to the close_paren.
+        count = 1
+        token = GetNextToken()
+        while 1:
+            if token.token_type == tokenize.SYNTAX:
+                if token.name == open_paren:
+                    count += 1
+                elif token.name == close_paren:
+                    count -= 1
+                    if count == 0:
+                        break
+            yield token
+            token = GetNextToken()
+        yield token
+
+    def _GetParameters(self):
+        return self._GetMatchingChar('(', ')')
+
+    def GetScope(self):
+        return self._GetMatchingChar('{', '}')
+
+    def _GetNextToken(self):
+        if self.token_queue:
+            return self.token_queue.pop()
+        return next(self.tokens)
+
+    def _AddBackToken(self, token):
+        if token.whence == tokenize.WHENCE_STREAM:
+            token.whence = tokenize.WHENCE_QUEUE
+            self.token_queue.insert(0, token)
+        else:
+            assert token.whence == tokenize.WHENCE_QUEUE, token
+            self.token_queue.append(token)
+
+    def _AddBackTokens(self, tokens):
+        if tokens:
+            if tokens[-1].whence == tokenize.WHENCE_STREAM:
+                for token in tokens:
+                    token.whence = tokenize.WHENCE_QUEUE
+                self.token_queue[:0] = reversed(tokens)
+            else:
+                assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens
+                self.token_queue.extend(reversed(tokens))
+
+    def GetName(self, seq=None):
+        """Returns ([tokens], next_token_info)."""
+        GetNextToken = self._GetNextToken
+        if seq is not None:
+            it = iter(seq)
+            GetNextToken = lambda: next(it)
+        next_token = GetNextToken()
+        tokens = []
+        last_token_was_name = False
+        while (next_token.token_type == tokenize.NAME or
+               (next_token.token_type == tokenize.SYNTAX and
+                next_token.name in ('::', '<'))):
+            # Two NAMEs in a row means the identifier should terminate.
+            # It's probably some sort of variable declaration.
+            if last_token_was_name and next_token.token_type == tokenize.NAME:
+                break
+            last_token_was_name = next_token.token_type == tokenize.NAME
+            tokens.append(next_token)
+            # Handle templated names.
+            if next_token.name == '<':
+                tokens.extend(self._GetMatchingChar('<', '>', GetNextToken))
+                last_token_was_name = True
+            next_token = GetNextToken()
+        return tokens, next_token
+
+    def GetMethod(self, modifiers, templated_types):
+        return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(')
+        assert len(return_type_and_name) >= 1
+        return self._GetMethod(return_type_and_name, modifiers, templated_types,
+                               False)
+
+    def _GetMethod(self, return_type_and_name, modifiers, templated_types,
+                   get_paren):
+        template_portion = None
+        if get_paren:
+            token = self._GetNextToken()
+            assert token.token_type == tokenize.SYNTAX, token
+            if token.name == '<':
+                # Handle templatized dtors.
+                template_portion = [token]
+                template_portion.extend(self._GetMatchingChar('<', '>'))
+                token = self._GetNextToken()
+            assert token.token_type == tokenize.SYNTAX, token
+            assert token.name == '(', token
+
+        name = return_type_and_name.pop()
+        # Handle templatized ctors.
+        if name.name == '>':
+            index = 1
+            while return_type_and_name[index].name != '<':
+                index += 1
+            template_portion = return_type_and_name[index:] + [name]
+            del return_type_and_name[index:]
+            name = return_type_and_name.pop()
+        elif name.name == ']':
+            rt = return_type_and_name
+            assert rt[-1].name == '[', return_type_and_name
+            assert rt[-2].name == 'operator', return_type_and_name
+            name_seq = return_type_and_name[-2:]
+            del return_type_and_name[-2:]
+            name = tokenize.Token(tokenize.NAME, 'operator[]',
+                                  name_seq[0].start, name.end)
+            # Get the open paren so _GetParameters() below works.
+            unused_open_paren = self._GetNextToken()
+
+        # TODO(nnorwitz): store template_portion.
+        return_type = return_type_and_name
+        indices = name
+        if return_type:
+            indices = return_type[0]
+
+        # Force ctor for templatized ctors.
+        if name.name == self.in_class and not modifiers:
+            modifiers |= FUNCTION_CTOR
+        parameters = list(self._GetParameters())
+        del parameters[-1]              # Remove trailing ')'.
+
+        # Handling operator() is especially weird.
+        if name.name == 'operator' and not parameters:
+            token = self._GetNextToken()
+            assert token.name == '(', token
+            parameters = list(self._GetParameters())
+            del parameters[-1]          # Remove trailing ')'.
+
+        token = self._GetNextToken()
+        while token.token_type == tokenize.NAME:
+            modifier_token = token
+            token = self._GetNextToken()
+            if modifier_token.name == 'const':
+                modifiers |= FUNCTION_CONST
+            elif modifier_token.name == '__attribute__':
+                # TODO(nnorwitz): handle more __attribute__ details.
+                modifiers |= FUNCTION_ATTRIBUTE
+                assert token.name == '(', token
+                # Consume everything between the (parens).
+                unused_tokens = list(self._GetMatchingChar('(', ')'))
+                token = self._GetNextToken()
+            elif modifier_token.name == 'throw':
+                modifiers |= FUNCTION_THROW
+                assert token.name == '(', token
+                # Consume everything between the (parens).
+                unused_tokens = list(self._GetMatchingChar('(', ')'))
+                token = self._GetNextToken()
+            elif modifier_token.name == 'override':
+                modifiers |= FUNCTION_OVERRIDE
+            elif modifier_token.name == modifier_token.name.upper():
+                # HACK(nnorwitz):  assume that all upper-case names
+                # are some macro we aren't expanding.
+                modifiers |= FUNCTION_UNKNOWN_ANNOTATION
+            else:
+                self.HandleError('unexpected token', modifier_token)
+
+        assert token.token_type == tokenize.SYNTAX, token
+        # Handle ctor initializers.
+        if token.name == ':':
+            # TODO(nnorwitz): anything else to handle for initializer list?
+            while token.name != ';' and token.name != '{':
+                token = self._GetNextToken()
+
+        # Handle pointer to functions that are really data but look
+        # like method declarations.
+        if token.name == '(':
+            if parameters[0].name == '*':
+                # name contains the return type.
+                name = parameters.pop()
+                # parameters contains the name of the data.
+                modifiers = [p.name for p in parameters]
+                # Already at the ( to open the parameter list.
+                function_parameters = list(self._GetMatchingChar('(', ')'))
+                del function_parameters[-1]  # Remove trailing ')'.
+                # TODO(nnorwitz): store the function_parameters.
+                token = self._GetNextToken()
+                assert token.token_type == tokenize.SYNTAX, token
+                assert token.name == ';', token
+                return self._CreateVariable(indices, name.name, indices.name,
+                                            modifiers, '', None)
+            # At this point, we got something like:
+            #  return_type (type::*name_)(params);
+            # This is a data member called name_ that is a function pointer.
+            # With this code: void (sq_type::*field_)(string&);
+            # We get: name=void return_type=[] parameters=sq_type ... field_
+            # TODO(nnorwitz): is return_type always empty?
+            # TODO(nnorwitz): this isn't even close to being correct.
+            # Just put in something so we don't crash and can move on.
+            real_name = parameters[-1]
+            modifiers = [p.name for p in self._GetParameters()]
+            del modifiers[-1]           # Remove trailing ')'.
+            return self._CreateVariable(indices, real_name.name, indices.name,
+                                        modifiers, '', None)
+
+        if token.name == '{':
+            body = list(self.GetScope())
+            del body[-1]                # Remove trailing '}'.
+        else:
+            body = None
+            if token.name == '=':
+                token = self._GetNextToken()
+
+                if token.name == 'default' or token.name == 'delete':
+                    # Ignore explicitly defaulted and deleted special members
+                    # in C++11.
+                    token = self._GetNextToken()
+                else:
+                    # Handle pure-virtual declarations.
+                    assert token.token_type == tokenize.CONSTANT, token
+                    assert token.name == '0', token
+                    modifiers |= FUNCTION_PURE_VIRTUAL
+                    token = self._GetNextToken()
+
+            if token.name == '[':
+                # TODO(nnorwitz): store tokens and improve parsing.
+                # template <typename T, size_t N> char (&ASH(T (&seq)[N]))[N];
+                tokens = list(self._GetMatchingChar('[', ']'))
+                token = self._GetNextToken()
+
+            assert token.name == ';', (token, return_type_and_name, parameters)
+
+        # Looks like we got a method, not a function.
+        if len(return_type) > 2 and return_type[-1].name == '::':
+            return_type, in_class = \
+                         self._GetReturnTypeAndClassName(return_type)
+            return Method(indices.start, indices.end, name.name, in_class,
+                          return_type, parameters, modifiers, templated_types,
+                          body, self.namespace_stack)
+        return Function(indices.start, indices.end, name.name, return_type,
+                        parameters, modifiers, templated_types, body,
+                        self.namespace_stack)
+
+    def _GetReturnTypeAndClassName(self, token_seq):
+        # Splitting the return type from the class name in a method
+        # can be tricky.  For example, Return::Type::Is::Hard::To::Find().
+        # Where is the return type and where is the class name?
+        # The heuristic used is to pull the last name as the class name.
+        # This includes all the templated type info.
+        # TODO(nnorwitz): if there is only One name like in the
+        # example above, punt and assume the last bit is the class name.
+
+        # Ignore a :: prefix, if exists so we can find the first real name.
+        i = 0
+        if token_seq[0].name == '::':
+            i = 1
+        # Ignore a :: suffix, if exists.
+        end = len(token_seq) - 1
+        if token_seq[end-1].name == '::':
+            end -= 1
+
+        # Make a copy of the sequence so we can append a sentinel
+        # value. This is required for GetName will has to have some
+        # terminating condition beyond the last name.
+        seq_copy = token_seq[i:end]
+        seq_copy.append(tokenize.Token(tokenize.SYNTAX, '', 0, 0))
+        names = []
+        while i < end:
+            # Iterate through the sequence parsing out each name.
+            new_name, next = self.GetName(seq_copy[i:])
+            assert new_name, 'Got empty new_name, next=%s' % next
+            # We got a pointer or ref.  Add it to the name.
+            if next and next.token_type == tokenize.SYNTAX:
+                new_name.append(next)
+            names.append(new_name)
+            i += len(new_name)
+
+        # Now that we have the names, it's time to undo what we did.
+
+        # Remove the sentinel value.
+        names[-1].pop()
+        # Flatten the token sequence for the return type.
+        return_type = [e for seq in names[:-1] for e in seq]
+        # The class name is the last name.
+        class_name = names[-1]
+        return return_type, class_name
+
+    def handle_bool(self):
+        pass
+
+    def handle_char(self):
+        pass
+
+    def handle_int(self):
+        pass
+
+    def handle_long(self):
+        pass
+
+    def handle_short(self):
+        pass
+
+    def handle_double(self):
+        pass
+
+    def handle_float(self):
+        pass
+
+    def handle_void(self):
+        pass
+
+    def handle_wchar_t(self):
+        pass
+
+    def handle_unsigned(self):
+        pass
+
+    def handle_signed(self):
+        pass
+
+    def _GetNestedType(self, ctor):
+        name = None
+        name_tokens, token = self.GetName()
+        if name_tokens:
+            name = ''.join([t.name for t in name_tokens])
+
+        # Handle forward declarations.
+        if token.token_type == tokenize.SYNTAX and token.name == ';':
+            return ctor(token.start, token.end, name, None,
+                        self.namespace_stack)
+
+        if token.token_type == tokenize.NAME and self._handling_typedef:
+            self._AddBackToken(token)
+            return ctor(token.start, token.end, name, None,
+                        self.namespace_stack)
+
+        # Must be the type declaration.
+        fields = list(self._GetMatchingChar('{', '}'))
+        del fields[-1]                  # Remove trailing '}'.
+        if token.token_type == tokenize.SYNTAX and token.name == '{':
+            next = self._GetNextToken()
+            new_type = ctor(token.start, token.end, name, fields,
+                            self.namespace_stack)
+            # A name means this is an anonymous type and the name
+            # is the variable declaration.
+            if next.token_type != tokenize.NAME:
+                return new_type
+            name = new_type
+            token = next
+
+        # Must be variable declaration using the type prefixed with keyword.
+        assert token.token_type == tokenize.NAME, token
+        return self._CreateVariable(token, token.name, name, [], '', None)
+
+    def handle_struct(self):
+        # Special case the handling typedef/aliasing of structs here.
+        # It would be a pain to handle in the class code.
+        name_tokens, var_token = self.GetName()
+        if name_tokens:
+            next_token = self._GetNextToken()
+            is_syntax = (var_token.token_type == tokenize.SYNTAX and
+                         var_token.name[0] in '*&')
+            is_variable = (var_token.token_type == tokenize.NAME and
+                           next_token.name == ';')
+            variable = var_token
+            if is_syntax and not is_variable:
+                variable = next_token
+                temp = self._GetNextToken()
+                if temp.token_type == tokenize.SYNTAX and temp.name == '(':
+                    # Handle methods declared to return a struct.
+                    t0 = name_tokens[0]
+                    struct = tokenize.Token(tokenize.NAME, 'struct',
+                                            t0.start-7, t0.start-2)
+                    type_and_name = [struct]
+                    type_and_name.extend(name_tokens)
+                    type_and_name.extend((var_token, next_token))
+                    return self._GetMethod(type_and_name, 0, None, False)
+                assert temp.name == ';', (temp, name_tokens, var_token)
+            if is_syntax or (is_variable and not self._handling_typedef):
+                modifiers = ['struct']
+                type_name = ''.join([t.name for t in name_tokens])
+                position = name_tokens[0]
+                return self._CreateVariable(position, variable.name, type_name,
+                                            modifiers, var_token.name, None)
+            name_tokens.extend((var_token, next_token))
+            self._AddBackTokens(name_tokens)
+        else:
+            self._AddBackToken(var_token)
+        return self._GetClass(Struct, VISIBILITY_PUBLIC, None)
+
+    def handle_union(self):
+        return self._GetNestedType(Union)
+
+    def handle_enum(self):
+        return self._GetNestedType(Enum)
+
+    def handle_auto(self):
+        # TODO(nnorwitz): warn about using auto?  Probably not since it
+        # will be reclaimed and useful for C++0x.
+        pass
+
+    def handle_register(self):
+        pass
+
+    def handle_const(self):
+        pass
+
+    def handle_inline(self):
+        pass
+
+    def handle_extern(self):
+        pass
+
+    def handle_static(self):
+        pass
+
+    def handle_virtual(self):
+        # What follows must be a method.
+        token = token2 = self._GetNextToken()
+        if token.name == 'inline':
+            # HACK(nnorwitz): handle inline dtors by ignoring 'inline'.
+            token2 = self._GetNextToken()
+        if token2.token_type == tokenize.SYNTAX and token2.name == '~':
+            return self.GetMethod(FUNCTION_VIRTUAL + FUNCTION_DTOR, None)
+        assert token.token_type == tokenize.NAME or token.name == '::', token
+        return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(')  # )
+        return_type_and_name.insert(0, token)
+        if token2 is not token:
+            return_type_and_name.insert(1, token2)
+        return self._GetMethod(return_type_and_name, FUNCTION_VIRTUAL,
+                               None, False)
+
+    def handle_volatile(self):
+        pass
+
+    def handle_mutable(self):
+        pass
+
+    def handle_public(self):
+        assert self.in_class
+        self.visibility = VISIBILITY_PUBLIC
+
+    def handle_protected(self):
+        assert self.in_class
+        self.visibility = VISIBILITY_PROTECTED
+
+    def handle_private(self):
+        assert self.in_class
+        self.visibility = VISIBILITY_PRIVATE
+
+    def handle_friend(self):
+        tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
+        assert tokens
+        t0 = tokens[0]
+        return Friend(t0.start, t0.end, tokens, self.namespace_stack)
+
+    def handle_static_cast(self):
+        pass
+
+    def handle_const_cast(self):
+        pass
+
+    def handle_dynamic_cast(self):
+        pass
+
+    def handle_reinterpret_cast(self):
+        pass
+
+    def handle_new(self):
+        pass
+
+    def handle_delete(self):
+        tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
+        assert tokens
+        return Delete(tokens[0].start, tokens[0].end, tokens)
+
+    def handle_typedef(self):
+        token = self._GetNextToken()
+        if (token.token_type == tokenize.NAME and
+            keywords.IsKeyword(token.name)):
+            # Token must be struct/enum/union/class.
+            method = getattr(self, 'handle_' + token.name)
+            self._handling_typedef = True
+            tokens = [method()]
+            self._handling_typedef = False
+        else:
+            tokens = [token]
+
+        # Get the remainder of the typedef up to the semi-colon.
+        tokens.extend(self._GetTokensUpTo(tokenize.SYNTAX, ';'))
+
+        # TODO(nnorwitz): clean all this up.
+        assert tokens
+        name = tokens.pop()
+        indices = name
+        if tokens:
+            indices = tokens[0]
+        if not indices:
+            indices = token
+        if name.name == ')':
+            # HACK(nnorwitz): Handle pointers to functions "properly".
+            if (len(tokens) >= 4 and
+                tokens[1].name == '(' and tokens[2].name == '*'):
+                tokens.append(name)
+                name = tokens[3]
+        elif name.name == ']':
+            # HACK(nnorwitz): Handle arrays properly.
+            if len(tokens) >= 2:
+                tokens.append(name)
+                name = tokens[1]
+        new_type = tokens
+        if tokens and isinstance(tokens[0], tokenize.Token):
+            new_type = self.converter.ToType(tokens)[0]
+        return Typedef(indices.start, indices.end, name.name,
+                       new_type, self.namespace_stack)
+
+    def handle_typeid(self):
+        pass  # Not needed yet.
+
+    def handle_typename(self):
+        pass  # Not needed yet.
+
+    def _GetTemplatedTypes(self):
+        result = {}
+        tokens = list(self._GetMatchingChar('<', '>'))
+        len_tokens = len(tokens) - 1    # Ignore trailing '>'.
+        i = 0
+        while i < len_tokens:
+            key = tokens[i].name
+            i += 1
+            if keywords.IsKeyword(key) or key == ',':
+                continue
+            type_name = default = None
+            if i < len_tokens:
+                i += 1
+                if tokens[i-1].name == '=':
+                    assert i < len_tokens, '%s %s' % (i, tokens)
+                    default, unused_next_token = self.GetName(tokens[i:])
+                    i += len(default)
+                else:
+                    if tokens[i-1].name != ',':
+                        # We got something like: Type variable.
+                        # Re-adjust the key (variable) and type_name (Type).
+                        key = tokens[i-1].name
+                        type_name = tokens[i-2]
+
+            result[key] = (type_name, default)
+        return result
+
+    def handle_template(self):
+        token = self._GetNextToken()
+        assert token.token_type == tokenize.SYNTAX, token
+        assert token.name == '<', token
+        templated_types = self._GetTemplatedTypes()
+        # TODO(nnorwitz): for now, just ignore the template params.
+        token = self._GetNextToken()
+        if token.token_type == tokenize.NAME:
+            if token.name == 'class':
+                return self._GetClass(Class, VISIBILITY_PRIVATE, templated_types)
+            elif token.name == 'struct':
+                return self._GetClass(Struct, VISIBILITY_PUBLIC, templated_types)
+            elif token.name == 'friend':
+                return self.handle_friend()
+        self._AddBackToken(token)
+        tokens, last = self._GetVarTokensUpTo(tokenize.SYNTAX, '(', ';')
+        tokens.append(last)
+        self._AddBackTokens(tokens)
+        if last.name == '(':
+            return self.GetMethod(FUNCTION_NONE, templated_types)
+        # Must be a variable definition.
+        return None
+
+    def handle_true(self):
+        pass  # Nothing to do.
+
+    def handle_false(self):
+        pass  # Nothing to do.
+
+    def handle_asm(self):
+        pass  # Not needed yet.
+
+    def handle_class(self):
+        return self._GetClass(Class, VISIBILITY_PRIVATE, None)
+
+    def _GetBases(self):
+        # Get base classes.
+        bases = []
+        while 1:
+            token = self._GetNextToken()
+            assert token.token_type == tokenize.NAME, token
+            # TODO(nnorwitz): store kind of inheritance...maybe.
+            if token.name not in ('public', 'protected', 'private'):
+                # If inheritance type is not specified, it is private.
+                # Just put the token back so we can form a name.
+                # TODO(nnorwitz): it would be good to warn about this.
+                self._AddBackToken(token)
+            else:
+                # Check for virtual inheritance.
+                token = self._GetNextToken()
+                if token.name != 'virtual':
+                    self._AddBackToken(token)
+                else:
+                    # TODO(nnorwitz): store that we got virtual for this base.
+                    pass
+            base, next_token = self.GetName()
+            bases_ast = self.converter.ToType(base)
+            assert len(bases_ast) == 1, bases_ast
+            bases.append(bases_ast[0])
+            assert next_token.token_type == tokenize.SYNTAX, next_token
+            if next_token.name == '{':
+                token = next_token
+                break
+            # Support multiple inheritance.
+            assert next_token.name == ',', next_token
+        return bases, token
+
+    def _GetClass(self, class_type, visibility, templated_types):
+        class_name = None
+        class_token = self._GetNextToken()
+        if class_token.token_type != tokenize.NAME:
+            assert class_token.token_type == tokenize.SYNTAX, class_token
+            token = class_token
+        else:
+            # Skip any macro (e.g. storage class specifiers) after the
+            # 'class' keyword.
+            next_token = self._GetNextToken()
+            if next_token.token_type == tokenize.NAME:
+                self._AddBackToken(next_token)
+            else:
+                self._AddBackTokens([class_token, next_token])
+            name_tokens, token = self.GetName()
+            class_name = ''.join([t.name for t in name_tokens])
+        bases = None
+        if token.token_type == tokenize.SYNTAX:
+            if token.name == ';':
+                # Forward declaration.
+                return class_type(class_token.start, class_token.end,
+                                  class_name, None, templated_types, None,
+                                  self.namespace_stack)
+            if token.name in '*&':
+                # Inline forward declaration.  Could be method or data.
+                name_token = self._GetNextToken()
+                next_token = self._GetNextToken()
+                if next_token.name == ';':
+                    # Handle data
+                    modifiers = ['class']
+                    return self._CreateVariable(class_token, name_token.name,
+                                                class_name,
+                                                modifiers, token.name, None)
+                else:
+                    # Assume this is a method.
+                    tokens = (class_token, token, name_token, next_token)
+                    self._AddBackTokens(tokens)
+                    return self.GetMethod(FUNCTION_NONE, None)
+            if token.name == ':':
+                bases, token = self._GetBases()
+
+        body = None
+        if token.token_type == tokenize.SYNTAX and token.name == '{':
+            assert token.token_type == tokenize.SYNTAX, token
+            assert token.name == '{', token
+
+            ast = AstBuilder(self.GetScope(), self.filename, class_name,
+                             visibility, self.namespace_stack)
+            body = list(ast.Generate())
+
+            if not self._handling_typedef:
+                token = self._GetNextToken()
+                if token.token_type != tokenize.NAME:
+                    assert token.token_type == tokenize.SYNTAX, token
+                    assert token.name == ';', token
+                else:
+                    new_class = class_type(class_token.start, class_token.end,
+                                           class_name, bases, None,
+                                           body, self.namespace_stack)
+
+                    modifiers = []
+                    return self._CreateVariable(class_token,
+                                                token.name, new_class,
+                                                modifiers, token.name, None)
+        else:
+            if not self._handling_typedef:
+                self.HandleError('non-typedef token', token)
+            self._AddBackToken(token)
+
+        return class_type(class_token.start, class_token.end, class_name,
+                          bases, templated_types, body, self.namespace_stack)
+
+    def handle_namespace(self):
+        token = self._GetNextToken()
+        # Support anonymous namespaces.
+        name = None
+        if token.token_type == tokenize.NAME:
+            name = token.name
+            token = self._GetNextToken()
+        self.namespace_stack.append(name)
+        assert token.token_type == tokenize.SYNTAX, token
+        # Create an internal token that denotes when the namespace is complete.
+        internal_token = tokenize.Token(_INTERNAL_TOKEN, _NAMESPACE_POP,
+                                        None, None)
+        internal_token.whence = token.whence
+        if token.name == '=':
+            # TODO(nnorwitz): handle aliasing namespaces.
+            name, next_token = self.GetName()
+            assert next_token.name == ';', next_token
+            self._AddBackToken(internal_token)
+        else:
+            assert token.name == '{', token
+            tokens = list(self.GetScope())
+            # Replace the trailing } with the internal namespace pop token.
+            tokens[-1] = internal_token
+            # Handle namespace with nothing in it.
+            self._AddBackTokens(tokens)
+        return None
+
+    def handle_using(self):
+        tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
+        assert tokens
+        return Using(tokens[0].start, tokens[0].end, tokens)
+
+    def handle_explicit(self):
+        assert self.in_class
+        # Nothing much to do.
+        # TODO(nnorwitz): maybe verify the method name == class name.
+        # This must be a ctor.
+        return self.GetMethod(FUNCTION_CTOR, None)
+
+    def handle_this(self):
+        pass  # Nothing to do.
+
+    def handle_operator(self):
+        # Pull off the next token(s?) and make that part of the method name.
+        pass
+
+    def handle_sizeof(self):
+        pass
+
+    def handle_case(self):
+        pass
+
+    def handle_switch(self):
+        pass
+
+    def handle_default(self):
+        token = self._GetNextToken()
+        assert token.token_type == tokenize.SYNTAX
+        assert token.name == ':'
+
+    def handle_if(self):
+        pass
+
+    def handle_else(self):
+        pass
+
+    def handle_return(self):
+        tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
+        if not tokens:
+            return Return(self.current_token.start, self.current_token.end, None)
+        return Return(tokens[0].start, tokens[0].end, tokens)
+
+    def handle_goto(self):
+        tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
+        assert len(tokens) == 1, str(tokens)
+        return Goto(tokens[0].start, tokens[0].end, tokens[0].name)
+
+    def handle_try(self):
+        pass  # Not needed yet.
+
+    def handle_catch(self):
+        pass  # Not needed yet.
+
+    def handle_throw(self):
+        pass  # Not needed yet.
+
+    def handle_while(self):
+        pass
+
+    def handle_do(self):
+        pass
+
+    def handle_for(self):
+        pass
+
+    def handle_break(self):
+        self._IgnoreUpTo(tokenize.SYNTAX, ';')
+
+    def handle_continue(self):
+        self._IgnoreUpTo(tokenize.SYNTAX, ';')
+
+
+def BuilderFromSource(source, filename):
+    """Utility method that returns an AstBuilder from source code.
+
+    Args:
+      source: 'C++ source code'
+      filename: 'file1'
+
+    Returns:
+      AstBuilder
+    """
+    return AstBuilder(tokenize.GetTokens(source), filename)
+
+
+def PrintIndentifiers(filename, should_print):
+    """Prints all identifiers for a C++ source file.
+
+    Args:
+      filename: 'file1'
+      should_print: predicate with signature: bool Function(token)
+    """
+    source = utils.ReadFile(filename, False)
+    if source is None:
+        sys.stderr.write('Unable to find: %s\n' % filename)
+        return
+
+    #print('Processing %s' % actual_filename)
+    builder = BuilderFromSource(source, filename)
+    try:
+        for node in builder.Generate():
+            if should_print(node):
+                print(node.name)
+    except KeyboardInterrupt:
+        return
+    except:
+        pass
+
+
+def PrintAllIndentifiers(filenames, should_print):
+    """Prints all identifiers for each C++ source file in filenames.
+
+    Args:
+      filenames: ['file1', 'file2', ...]
+      should_print: predicate with signature: bool Function(token)
+    """
+    for path in filenames:
+        PrintIndentifiers(path, should_print)
+
+
+def main(argv):
+    for filename in argv[1:]:
+        source = utils.ReadFile(filename)
+        if source is None:
+            continue
+
+        print('Processing %s' % filename)
+        builder = BuilderFromSource(source, filename)
+        try:
+            entire_ast = filter(None, builder.Generate())
+        except KeyboardInterrupt:
+            return
+        except:
+            # Already printed a warning, print the traceback and continue.
+            traceback.print_exc()
+        else:
+            if utils.DEBUG:
+                for ast in entire_ast:
+                    print(ast)
+
+
+if __name__ == '__main__':
+    main(sys.argv)
diff --git a/googlemock/scripts/generator/cpp/gmock_class.py b/googlemock/scripts/generator/cpp/gmock_class.py
new file mode 100755
index 0000000..f9966cb
--- /dev/null
+++ b/googlemock/scripts/generator/cpp/gmock_class.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python
+#
+# Copyright 2008 Google Inc.  All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generate Google Mock classes from base classes.
+
+This program will read in a C++ source file and output the Google Mock
+classes for the specified classes.  If no class is specified, all
+classes in the source file are emitted.
+
+Usage:
+  gmock_class.py header-file.h [ClassName]...
+
+Output is sent to stdout.
+"""
+
+__author__ = 'nnorwitz@google.com (Neal Norwitz)'
+
+
+import os
+import re
+import sys
+
+from cpp import ast
+from cpp import utils
+
+# Preserve compatibility with Python 2.3.
+try:
+  _dummy = set
+except NameError:
+  import sets
+  set = sets.Set
+
+_VERSION = (1, 0, 1)  # The version of this script.
+# How many spaces to indent.  Can set me with the INDENT environment variable.
+_INDENT = 2
+
+
+def _GenerateMethods(output_lines, source, class_node):
+  function_type = (ast.FUNCTION_VIRTUAL | ast.FUNCTION_PURE_VIRTUAL |
+                   ast.FUNCTION_OVERRIDE)
+  ctor_or_dtor = ast.FUNCTION_CTOR | ast.FUNCTION_DTOR
+  indent = ' ' * _INDENT
+
+  for node in class_node.body:
+    # We only care about virtual functions.
+    if (isinstance(node, ast.Function) and
+        node.modifiers & function_type and
+        not node.modifiers & ctor_or_dtor):
+      # Pick out all the elements we need from the original function.
+      const = ''
+      if node.modifiers & ast.FUNCTION_CONST:
+        const = 'CONST_'
+      return_type = 'void'
+      if node.return_type:
+        # Add modifiers like 'const'.
+        modifiers = ''
+        if node.return_type.modifiers:
+          modifiers = ' '.join(node.return_type.modifiers) + ' '
+        return_type = modifiers + node.return_type.name
+        template_args = [arg.name for arg in node.return_type.templated_types]
+        if template_args:
+          return_type += '<' + ', '.join(template_args) + '>'
+          if len(template_args) > 1:
+            for line in [
+                '// The following line won\'t really compile, as the return',
+                '// type has multiple template arguments.  To fix it, use a',
+                '// typedef for the return type.']:
+              output_lines.append(indent + line)
+        if node.return_type.pointer:
+          return_type += '*'
+        if node.return_type.reference:
+          return_type += '&'
+        num_parameters = len(node.parameters)
+        if len(node.parameters) == 1:
+          first_param = node.parameters[0]
+          if source[first_param.start:first_param.end].strip() == 'void':
+            # We must treat T(void) as a function with no parameters.
+            num_parameters = 0
+      tmpl = ''
+      if class_node.templated_types:
+        tmpl = '_T'
+      mock_method_macro = 'MOCK_%sMETHOD%d%s' % (const, num_parameters, tmpl)
+
+      args = ''
+      if node.parameters:
+        # Due to the parser limitations, it is impossible to keep comments
+        # while stripping the default parameters.  When defaults are
+        # present, we choose to strip them and comments (and produce
+        # compilable code).
+        # TODO(nnorwitz@google.com): Investigate whether it is possible to
+        # preserve parameter name when reconstructing parameter text from
+        # the AST.
+        if len([param for param in node.parameters if param.default]) > 0:
+          args = ', '.join(param.type.name for param in node.parameters)
+        else:
+          # Get the full text of the parameters from the start
+          # of the first parameter to the end of the last parameter.
+          start = node.parameters[0].start
+          end = node.parameters[-1].end
+          # Remove // comments.
+          args_strings = re.sub(r'//.*', '', source[start:end])
+          # Condense multiple spaces and eliminate newlines putting the
+          # parameters together on a single line.  Ensure there is a
+          # space in an argument which is split by a newline without
+          # intervening whitespace, e.g.: int\nBar
+          args = re.sub('  +', ' ', args_strings.replace('\n', ' '))
+
+      # Create the mock method definition.
+      output_lines.extend(['%s%s(%s,' % (indent, mock_method_macro, node.name),
+                           '%s%s(%s));' % (indent*3, return_type, args)])
+
+
+def _GenerateMocks(filename, source, ast_list, desired_class_names):
+  processed_class_names = set()
+  lines = []
+  for node in ast_list:
+    if (isinstance(node, ast.Class) and node.body and
+        # desired_class_names being None means that all classes are selected.
+        (not desired_class_names or node.name in desired_class_names)):
+      class_name = node.name
+      parent_name = class_name
+      processed_class_names.add(class_name)
+      class_node = node
+      # Add namespace before the class.
+      if class_node.namespace:
+        lines.extend(['namespace %s {' % n for n in class_node.namespace])  # }
+        lines.append('')
+
+      # Add template args for templated classes.
+      if class_node.templated_types:
+        # TODO(paulchang): The AST doesn't preserve template argument order,
+        # so we have to make up names here.
+        # TODO(paulchang): Handle non-type template arguments (e.g.
+        # template<typename T, int N>).
+        template_arg_count = len(class_node.templated_types.keys())
+        template_args = ['T%d' % n for n in range(template_arg_count)]
+        template_decls = ['typename ' + arg for arg in template_args]
+        lines.append('template <' + ', '.join(template_decls) + '>')
+        parent_name += '<' + ', '.join(template_args) + '>'
+
+      # Add the class prolog.
+      lines.append('class Mock%s : public %s {'  # }
+                   % (class_name, parent_name))
+      lines.append('%spublic:' % (' ' * (_INDENT // 2)))
+
+      # Add all the methods.
+      _GenerateMethods(lines, source, class_node)
+
+      # Close the class.
+      if lines:
+        # If there are no virtual methods, no need for a public label.
+        if len(lines) == 2:
+          del lines[-1]
+
+        # Only close the class if there really is a class.
+        lines.append('};')
+        lines.append('')  # Add an extra newline.
+
+      # Close the namespace.
+      if class_node.namespace:
+        for i in range(len(class_node.namespace)-1, -1, -1):
+          lines.append('}  // namespace %s' % class_node.namespace[i])
+        lines.append('')  # Add an extra newline.
+
+  if desired_class_names:
+    missing_class_name_list = list(desired_class_names - processed_class_names)
+    if missing_class_name_list:
+      missing_class_name_list.sort()
+      sys.stderr.write('Class(es) not found in %s: %s\n' %
+                       (filename, ', '.join(missing_class_name_list)))
+  elif not processed_class_names:
+    sys.stderr.write('No class found in %s\n' % filename)
+
+  return lines
+
+
+def main(argv=sys.argv):
+  if len(argv) < 2:
+    sys.stderr.write('Google Mock Class Generator v%s\n\n' %
+                     '.'.join(map(str, _VERSION)))
+    sys.stderr.write(__doc__)
+    return 1
+
+  global _INDENT
+  try:
+    _INDENT = int(os.environ['INDENT'])
+  except KeyError:
+    pass
+  except:
+    sys.stderr.write('Unable to use indent of %s\n' % os.environ.get('INDENT'))
+
+  filename = argv[1]
+  desired_class_names = None  # None means all classes in the source file.
+  if len(argv) >= 3:
+    desired_class_names = set(argv[2:])
+  source = utils.ReadFile(filename)
+  if source is None:
+    return 1
+
+  builder = ast.BuilderFromSource(source, filename)
+  try:
+    entire_ast = filter(None, builder.Generate())
+  except KeyboardInterrupt:
+    return
+  except:
+    # An error message was already printed since we couldn't parse.
+    sys.exit(1)
+  else:
+    lines = _GenerateMocks(filename, source, entire_ast, desired_class_names)
+    sys.stdout.write('\n'.join(lines))
+
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/googlemock/scripts/generator/cpp/gmock_class_test.py b/googlemock/scripts/generator/cpp/gmock_class_test.py
new file mode 100755
index 0000000..018f90a
--- /dev/null
+++ b/googlemock/scripts/generator/cpp/gmock_class_test.py
@@ -0,0 +1,448 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Neal Norwitz All Rights Reserved.
+# Portions Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for gmock.scripts.generator.cpp.gmock_class."""
+
+__author__ = 'nnorwitz@google.com (Neal Norwitz)'
+
+
+import os
+import sys
+import unittest
+
+# Allow the cpp imports below to work when run as a standalone script.
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+
+from cpp import ast
+from cpp import gmock_class
+
+
+class TestCase(unittest.TestCase):
+  """Helper class that adds assert methods."""
+
+  def StripLeadingWhitespace(self, lines):
+    """Strip leading whitespace in each line in 'lines'."""
+    return '\n'.join([s.lstrip() for s in lines.split('\n')])
+
+  def assertEqualIgnoreLeadingWhitespace(self, expected_lines, lines):
+    """Specialized assert that ignores the indent level."""
+    self.assertEqual(expected_lines, self.StripLeadingWhitespace(lines))
+
+
+class GenerateMethodsTest(TestCase):
+
+  def GenerateMethodSource(self, cpp_source):
+    """Convert C++ source to Google Mock output source lines."""
+    method_source_lines = []
+    # <test> is a pseudo-filename, it is not read or written.
+    builder = ast.BuilderFromSource(cpp_source, '<test>')
+    ast_list = list(builder.Generate())
+    gmock_class._GenerateMethods(method_source_lines, cpp_source, ast_list[0])
+    return '\n'.join(method_source_lines)
+
+  def testSimpleMethod(self):
+    source = """
+class Foo {
+ public:
+  virtual int Bar();
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD0(Bar,\nint());',
+        self.GenerateMethodSource(source))
+
+  def testSimpleConstructorsAndDestructor(self):
+    source = """
+class Foo {
+ public:
+  Foo();
+  Foo(int x);
+  Foo(const Foo& f);
+  Foo(Foo&& f);
+  ~Foo();
+  virtual int Bar() = 0;
+};
+"""
+    # The constructors and destructor should be ignored.
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD0(Bar,\nint());',
+        self.GenerateMethodSource(source))
+
+  def testVirtualDestructor(self):
+    source = """
+class Foo {
+ public:
+  virtual ~Foo();
+  virtual int Bar() = 0;
+};
+"""
+    # The destructor should be ignored.
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD0(Bar,\nint());',
+        self.GenerateMethodSource(source))
+
+  def testExplicitlyDefaultedConstructorsAndDestructor(self):
+    source = """
+class Foo {
+ public:
+  Foo() = default;
+  Foo(const Foo& f) = default;
+  Foo(Foo&& f) = default;
+  ~Foo() = default;
+  virtual int Bar() = 0;
+};
+"""
+    # The constructors and destructor should be ignored.
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD0(Bar,\nint());',
+        self.GenerateMethodSource(source))
+
+  def testExplicitlyDeletedConstructorsAndDestructor(self):
+    source = """
+class Foo {
+ public:
+  Foo() = delete;
+  Foo(const Foo& f) = delete;
+  Foo(Foo&& f) = delete;
+  ~Foo() = delete;
+  virtual int Bar() = 0;
+};
+"""
+    # The constructors and destructor should be ignored.
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD0(Bar,\nint());',
+        self.GenerateMethodSource(source))
+
+  def testSimpleOverrideMethod(self):
+    source = """
+class Foo {
+ public:
+  int Bar() override;
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD0(Bar,\nint());',
+        self.GenerateMethodSource(source))
+
+  def testSimpleConstMethod(self):
+    source = """
+class Foo {
+ public:
+  virtual void Bar(bool flag) const;
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_CONST_METHOD1(Bar,\nvoid(bool flag));',
+        self.GenerateMethodSource(source))
+
+  def testExplicitVoid(self):
+    source = """
+class Foo {
+ public:
+  virtual int Bar(void);
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD0(Bar,\nint(void));',
+        self.GenerateMethodSource(source))
+
+  def testStrangeNewlineInParameter(self):
+    source = """
+class Foo {
+ public:
+  virtual void Bar(int
+a) = 0;
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD1(Bar,\nvoid(int a));',
+        self.GenerateMethodSource(source))
+
+  def testDefaultParameters(self):
+    source = """
+class Foo {
+ public:
+  virtual void Bar(int a, char c = 'x') = 0;
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD2(Bar,\nvoid(int, char));',
+        self.GenerateMethodSource(source))
+
+  def testMultipleDefaultParameters(self):
+    source = """
+class Foo {
+ public:
+  virtual void Bar(int a = 42, char c = 'x') = 0;
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD2(Bar,\nvoid(int, char));',
+        self.GenerateMethodSource(source))
+
+  def testRemovesCommentsWhenDefaultsArePresent(self):
+    source = """
+class Foo {
+ public:
+  virtual void Bar(int a = 42 /* a comment */,
+                   char /* other comment */ c= 'x') = 0;
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD2(Bar,\nvoid(int, char));',
+        self.GenerateMethodSource(source))
+
+  def testDoubleSlashCommentsInParameterListAreRemoved(self):
+    source = """
+class Foo {
+ public:
+  virtual void Bar(int a,  // inline comments should be elided.
+                   int b   // inline comments should be elided.
+                   ) const = 0;
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_CONST_METHOD2(Bar,\nvoid(int a, int b));',
+        self.GenerateMethodSource(source))
+
+  def testCStyleCommentsInParameterListAreNotRemoved(self):
+    # NOTE(nnorwitz): I'm not sure if it's the best behavior to keep these
+    # comments.  Also note that C style comments after the last parameter
+    # are still elided.
+    source = """
+class Foo {
+ public:
+  virtual const string& Bar(int /* keeper */, int b);
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD2(Bar,\nconst string&(int /* keeper */, int b));',
+        self.GenerateMethodSource(source))
+
+  def testArgsOfTemplateTypes(self):
+    source = """
+class Foo {
+ public:
+  virtual int Bar(const vector<int>& v, map<int, string>* output);
+};"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD2(Bar,\n'
+        'int(const vector<int>& v, map<int, string>* output));',
+        self.GenerateMethodSource(source))
+
+  def testReturnTypeWithOneTemplateArg(self):
+    source = """
+class Foo {
+ public:
+  virtual vector<int>* Bar(int n);
+};"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD1(Bar,\nvector<int>*(int n));',
+        self.GenerateMethodSource(source))
+
+  def testReturnTypeWithManyTemplateArgs(self):
+    source = """
+class Foo {
+ public:
+  virtual map<int, string> Bar();
+};"""
+    # Comparing the comment text is brittle - we'll think of something
+    # better in case this gets annoying, but for now let's keep it simple.
+    self.assertEqualIgnoreLeadingWhitespace(
+        '// The following line won\'t really compile, as the return\n'
+        '// type has multiple template arguments.  To fix it, use a\n'
+        '// typedef for the return type.\n'
+        'MOCK_METHOD0(Bar,\nmap<int, string>());',
+        self.GenerateMethodSource(source))
+
+  def testSimpleMethodInTemplatedClass(self):
+    source = """
+template<class T>
+class Foo {
+ public:
+  virtual int Bar();
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD0_T(Bar,\nint());',
+        self.GenerateMethodSource(source))
+
+  def testPointerArgWithoutNames(self):
+    source = """
+class Foo {
+  virtual int Bar(C*);
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD1(Bar,\nint(C*));',
+        self.GenerateMethodSource(source))
+
+  def testReferenceArgWithoutNames(self):
+    source = """
+class Foo {
+  virtual int Bar(C&);
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD1(Bar,\nint(C&));',
+        self.GenerateMethodSource(source))
+
+  def testArrayArgWithoutNames(self):
+    source = """
+class Foo {
+  virtual int Bar(C[]);
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        'MOCK_METHOD1(Bar,\nint(C[]));',
+        self.GenerateMethodSource(source))
+
+
+class GenerateMocksTest(TestCase):
+
+  def GenerateMocks(self, cpp_source):
+    """Convert C++ source to complete Google Mock output source."""
+    # <test> is a pseudo-filename, it is not read or written.
+    filename = '<test>'
+    builder = ast.BuilderFromSource(cpp_source, filename)
+    ast_list = list(builder.Generate())
+    lines = gmock_class._GenerateMocks(filename, cpp_source, ast_list, None)
+    return '\n'.join(lines)
+
+  def testNamespaces(self):
+    source = """
+namespace Foo {
+namespace Bar { class Forward; }
+namespace Baz {
+
+class Test {
+ public:
+  virtual void Foo();
+};
+
+}  // namespace Baz
+}  // namespace Foo
+"""
+    expected = """\
+namespace Foo {
+namespace Baz {
+
+class MockTest : public Test {
+public:
+MOCK_METHOD0(Foo,
+void());
+};
+
+}  // namespace Baz
+}  // namespace Foo
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        expected, self.GenerateMocks(source))
+
+  def testClassWithStorageSpecifierMacro(self):
+    source = """
+class STORAGE_SPECIFIER Test {
+ public:
+  virtual void Foo();
+};
+"""
+    expected = """\
+class MockTest : public Test {
+public:
+MOCK_METHOD0(Foo,
+void());
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        expected, self.GenerateMocks(source))
+
+  def testTemplatedForwardDeclaration(self):
+    source = """
+template <class T> class Forward;  // Forward declaration should be ignored.
+class Test {
+ public:
+  virtual void Foo();
+};
+"""
+    expected = """\
+class MockTest : public Test {
+public:
+MOCK_METHOD0(Foo,
+void());
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        expected, self.GenerateMocks(source))
+
+  def testTemplatedClass(self):
+    source = """
+template <typename S, typename T>
+class Test {
+ public:
+  virtual void Foo();
+};
+"""
+    expected = """\
+template <typename T0, typename T1>
+class MockTest : public Test<T0, T1> {
+public:
+MOCK_METHOD0_T(Foo,
+void());
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        expected, self.GenerateMocks(source))
+
+  def testTemplateInATemplateTypedef(self):
+    source = """
+class Test {
+ public:
+  typedef std::vector<std::list<int>> FooType;
+  virtual void Bar(const FooType& test_arg);
+};
+"""
+    expected = """\
+class MockTest : public Test {
+public:
+MOCK_METHOD1(Bar,
+void(const FooType& test_arg));
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        expected, self.GenerateMocks(source))
+
+  def testTemplateInATemplateTypedefWithComma(self):
+    source = """
+class Test {
+ public:
+  typedef std::function<void(
+      const vector<std::list<int>>&, int> FooType;
+  virtual void Bar(const FooType& test_arg);
+};
+"""
+    expected = """\
+class MockTest : public Test {
+public:
+MOCK_METHOD1(Bar,
+void(const FooType& test_arg));
+};
+"""
+    self.assertEqualIgnoreLeadingWhitespace(
+        expected, self.GenerateMocks(source))
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/googlemock/scripts/generator/cpp/keywords.py b/googlemock/scripts/generator/cpp/keywords.py
new file mode 100755
index 0000000..f694450
--- /dev/null
+++ b/googlemock/scripts/generator/cpp/keywords.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Neal Norwitz
+# Portions Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""C++ keywords and helper utilities for determining keywords."""
+
+__author__ = 'nnorwitz@google.com (Neal Norwitz)'
+
+
+try:
+    # Python 3.x
+    import builtins
+except ImportError:
+    # Python 2.x
+    import __builtin__ as builtins
+
+
+if not hasattr(builtins, 'set'):
+    # Nominal support for Python 2.3.
+    from sets import Set as set
+
+
+TYPES = set('bool char int long short double float void wchar_t unsigned signed'.split())
+TYPE_MODIFIERS = set('auto register const inline extern static virtual volatile mutable'.split())
+ACCESS = set('public protected private friend'.split())
+
+CASTS = set('static_cast const_cast dynamic_cast reinterpret_cast'.split())
+
+OTHERS = set('true false asm class namespace using explicit this operator sizeof'.split())
+OTHER_TYPES = set('new delete typedef struct union enum typeid typename template'.split())
+
+CONTROL = set('case switch default if else return goto'.split())
+EXCEPTION = set('try catch throw'.split())
+LOOP = set('while do for break continue'.split())
+
+ALL = TYPES | TYPE_MODIFIERS | ACCESS | CASTS | OTHERS | OTHER_TYPES | CONTROL | EXCEPTION | LOOP
+
+
+def IsKeyword(token):
+    return token in ALL
+
+def IsBuiltinType(token):
+    if token in ('virtual', 'inline'):
+        # These only apply to methods, they can't be types by themselves.
+        return False
+    return token in TYPES or token in TYPE_MODIFIERS
diff --git a/googlemock/scripts/generator/cpp/tokenize.py b/googlemock/scripts/generator/cpp/tokenize.py
new file mode 100755
index 0000000..359d556
--- /dev/null
+++ b/googlemock/scripts/generator/cpp/tokenize.py
@@ -0,0 +1,287 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Neal Norwitz
+# Portions Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tokenize C++ source code."""
+
+__author__ = 'nnorwitz@google.com (Neal Norwitz)'
+
+
+try:
+    # Python 3.x
+    import builtins
+except ImportError:
+    # Python 2.x
+    import __builtin__ as builtins
+
+
+import sys
+
+from cpp import utils
+
+
+if not hasattr(builtins, 'set'):
+    # Nominal support for Python 2.3.
+    from sets import Set as set
+
+
+# Add $ as a valid identifier char since so much code uses it.
+_letters = 'abcdefghijklmnopqrstuvwxyz'
+VALID_IDENTIFIER_CHARS = set(_letters + _letters.upper() + '_0123456789$')
+HEX_DIGITS = set('0123456789abcdefABCDEF')
+INT_OR_FLOAT_DIGITS = set('01234567890eE-+')
+
+
+# C++0x string preffixes.
+_STR_PREFIXES = set(('R', 'u8', 'u8R', 'u', 'uR', 'U', 'UR', 'L', 'LR'))
+
+
+# Token types.
+UNKNOWN = 'UNKNOWN'
+SYNTAX = 'SYNTAX'
+CONSTANT = 'CONSTANT'
+NAME = 'NAME'
+PREPROCESSOR = 'PREPROCESSOR'
+
+# Where the token originated from.  This can be used for backtracking.
+# It is always set to WHENCE_STREAM in this code.
+WHENCE_STREAM, WHENCE_QUEUE = range(2)
+
+
+class Token(object):
+    """Data container to represent a C++ token.
+
+    Tokens can be identifiers, syntax char(s), constants, or
+    pre-processor directives.
+
+    start contains the index of the first char of the token in the source
+    end contains the index of the last char of the token in the source
+    """
+
+    def __init__(self, token_type, name, start, end):
+        self.token_type = token_type
+        self.name = name
+        self.start = start
+        self.end = end
+        self.whence = WHENCE_STREAM
+
+    def __str__(self):
+        if not utils.DEBUG:
+            return 'Token(%r)' % self.name
+        return 'Token(%r, %s, %s)' % (self.name, self.start, self.end)
+
+    __repr__ = __str__
+
+
+def _GetString(source, start, i):
+    i = source.find('"', i+1)
+    while source[i-1] == '\\':
+        # Count the trailing backslashes.
+        backslash_count = 1
+        j = i - 2
+        while source[j] == '\\':
+            backslash_count += 1
+            j -= 1
+        # When trailing backslashes are even, they escape each other.
+        if (backslash_count % 2) == 0:
+            break
+        i = source.find('"', i+1)
+    return i + 1
+
+
+def _GetChar(source, start, i):
+    # NOTE(nnorwitz): may not be quite correct, should be good enough.
+    i = source.find("'", i+1)
+    while source[i-1] == '\\':
+        # Need to special case '\\'.
+        if (i - 2) > start and source[i-2] == '\\':
+            break
+        i = source.find("'", i+1)
+    # Try to handle unterminated single quotes (in a #if 0 block).
+    if i < 0:
+        i = start
+    return i + 1
+
+
+def GetTokens(source):
+    """Returns a sequence of Tokens.
+
+    Args:
+      source: string of C++ source code.
+
+    Yields:
+      Token that represents the next token in the source.
+    """
+    # Cache various valid character sets for speed.
+    valid_identifier_chars = VALID_IDENTIFIER_CHARS
+    hex_digits = HEX_DIGITS
+    int_or_float_digits = INT_OR_FLOAT_DIGITS
+    int_or_float_digits2 = int_or_float_digits | set('.')
+
+    # Only ignore errors while in a #if 0 block.
+    ignore_errors = False
+    count_ifs = 0
+
+    i = 0
+    end = len(source)
+    while i < end:
+        # Skip whitespace.
+        while i < end and source[i].isspace():
+            i += 1
+        if i >= end:
+            return
+
+        token_type = UNKNOWN
+        start = i
+        c = source[i]
+        if c.isalpha() or c == '_':              # Find a string token.
+            token_type = NAME
+            while source[i] in valid_identifier_chars:
+                i += 1
+            # String and character constants can look like a name if
+            # they are something like L"".
+            if (source[i] == "'" and (i - start) == 1 and
+                source[start:i] in 'uUL'):
+                # u, U, and L are valid C++0x character preffixes.
+                token_type = CONSTANT
+                i = _GetChar(source, start, i)
+            elif source[i] == "'" and source[start:i] in _STR_PREFIXES:
+                token_type = CONSTANT
+                i = _GetString(source, start, i)
+        elif c == '/' and source[i+1] == '/':    # Find // comments.
+            i = source.find('\n', i)
+            if i == -1:  # Handle EOF.
+                i = end
+            continue
+        elif c == '/' and source[i+1] == '*':    # Find /* comments. */
+            i = source.find('*/', i) + 2
+            continue
+        elif c in ':+-<>&|*=':                   # : or :: (plus other chars).
+            token_type = SYNTAX
+            i += 1
+            new_ch = source[i]
+            if new_ch == c and c != '>':         # Treat ">>" as two tokens.
+                i += 1
+            elif c == '-' and new_ch == '>':
+                i += 1
+            elif new_ch == '=':
+                i += 1
+        elif c in '()[]{}~!?^%;/.,':             # Handle single char tokens.
+            token_type = SYNTAX
+            i += 1
+            if c == '.' and source[i].isdigit():
+                token_type = CONSTANT
+                i += 1
+                while source[i] in int_or_float_digits:
+                    i += 1
+                # Handle float suffixes.
+                for suffix in ('l', 'f'):
+                    if suffix == source[i:i+1].lower():
+                        i += 1
+                        break
+        elif c.isdigit():                        # Find integer.
+            token_type = CONSTANT
+            if c == '0' and source[i+1] in 'xX':
+                # Handle hex digits.
+                i += 2
+                while source[i] in hex_digits:
+                    i += 1
+            else:
+                while source[i] in int_or_float_digits2:
+                    i += 1
+            # Handle integer (and float) suffixes.
+            for suffix in ('ull', 'll', 'ul', 'l', 'f', 'u'):
+                size = len(suffix)
+                if suffix == source[i:i+size].lower():
+                    i += size
+                    break
+        elif c == '"':                           # Find string.
+            token_type = CONSTANT
+            i = _GetString(source, start, i)
+        elif c == "'":                           # Find char.
+            token_type = CONSTANT
+            i = _GetChar(source, start, i)
+        elif c == '#':                           # Find pre-processor command.
+            token_type = PREPROCESSOR
+            got_if = source[i:i+3] == '#if' and source[i+3:i+4].isspace()
+            if got_if:
+                count_ifs += 1
+            elif source[i:i+6] == '#endif':
+                count_ifs -= 1
+                if count_ifs == 0:
+                    ignore_errors = False
+
+            # TODO(nnorwitz): handle preprocessor statements (\ continuations).
+            while 1:
+                i1 = source.find('\n', i)
+                i2 = source.find('//', i)
+                i3 = source.find('/*', i)
+                i4 = source.find('"', i)
+                # NOTE(nnorwitz): doesn't handle comments in #define macros.
+                # Get the first important symbol (newline, comment, EOF/end).
+                i = min([x for x in (i1, i2, i3, i4, end) if x != -1])
+
+                # Handle #include "dir//foo.h" properly.
+                if source[i] == '"':
+                    i = source.find('"', i+1) + 1
+                    assert i > 0
+                    continue
+                # Keep going if end of the line and the line ends with \.
+                if not (i == i1 and source[i-1] == '\\'):
+                    if got_if:
+                        condition = source[start+4:i].lstrip()
+                        if (condition.startswith('0') or
+                            condition.startswith('(0)')):
+                            ignore_errors = True
+                    break
+                i += 1
+        elif c == '\\':                          # Handle \ in code.
+            # This is different from the pre-processor \ handling.
+            i += 1
+            continue
+        elif ignore_errors:
+            # The tokenizer seems to be in pretty good shape.  This
+            # raise is conditionally disabled so that bogus code
+            # in an #if 0 block can be handled.  Since we will ignore
+            # it anyways, this is probably fine.  So disable the
+            # exception and  return the bogus char.
+            i += 1
+        else:
+            sys.stderr.write('Got invalid token in %s @ %d token:%s: %r\n' %
+                             ('?', i, c, source[i-10:i+10]))
+            raise RuntimeError('unexpected token')
+
+        if i <= 0:
+            print('Invalid index, exiting now.')
+            return
+        yield Token(token_type, source[start:i], start, i)
+
+
+if __name__ == '__main__':
+    def main(argv):
+        """Driver mostly for testing purposes."""
+        for filename in argv[1:]:
+            source = utils.ReadFile(filename)
+            if source is None:
+                continue
+
+            for token in GetTokens(source):
+                print('%-12s: %s' % (token.token_type, token.name))
+                # print('\r%6.2f%%' % (100.0 * index / token.end),)
+            sys.stdout.write('\n')
+
+
+    main(sys.argv)
diff --git a/googlemock/scripts/generator/cpp/utils.py b/googlemock/scripts/generator/cpp/utils.py
new file mode 100755
index 0000000..eab36ee
--- /dev/null
+++ b/googlemock/scripts/generator/cpp/utils.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Neal Norwitz
+# Portions Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generic utilities for C++ parsing."""
+
+__author__ = 'nnorwitz@google.com (Neal Norwitz)'
+
+
+import sys
+
+
+# Set to True to see the start/end token indices.
+DEBUG = True
+
+
+def ReadFile(filename, print_error=True):
+    """Returns the contents of a file."""
+    try:
+        fp = open(filename)
+        try:
+            return fp.read()
+        finally:
+            fp.close()
+    except IOError:
+        if print_error:
+            print('Error reading %s: %s' % (filename, sys.exc_info()[1]))
+        return None
diff --git a/googlemock/scripts/generator/gmock_gen.py b/googlemock/scripts/generator/gmock_gen.py
new file mode 100755
index 0000000..8cc0d13
--- /dev/null
+++ b/googlemock/scripts/generator/gmock_gen.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+#
+# Copyright 2008 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Driver for starting up Google Mock class generator."""
+
+__author__ = 'nnorwitz@google.com (Neal Norwitz)'
+
+import os
+import sys
+
+if __name__ == '__main__':
+  # Add the directory of this script to the path so we can import gmock_class.
+  sys.path.append(os.path.dirname(__file__))
+
+  from cpp import gmock_class
+  # Fix the docstring in case they require the usage.
+  gmock_class.__doc__ = gmock_class.__doc__.replace('gmock_class.py', __file__)
+  gmock_class.main()
diff --git a/googlemock/scripts/gmock-config.in b/googlemock/scripts/gmock-config.in
new file mode 100755
index 0000000..2baefe9
--- /dev/null
+++ b/googlemock/scripts/gmock-config.in
@@ -0,0 +1,303 @@
+#!/bin/sh
+
+# These variables are automatically filled in by the configure script.
+name="@PACKAGE_TARNAME@"
+version="@PACKAGE_VERSION@"
+
+show_usage()
+{
+  echo "Usage: gmock-config [OPTIONS...]"
+}
+
+show_help()
+{
+  show_usage
+  cat <<\EOF
+
+The `gmock-config' script provides access to the necessary compile and linking
+flags to connect with Google C++ Mocking Framework, both in a build prior to
+installation, and on the system proper after installation. The installation
+overrides may be issued in combination with any other queries, but will only
+affect installation queries if called on a built but not installed gmock. The
+installation queries may not be issued with any other types of queries, and
+only one installation query may be made at a time. The version queries and
+compiler flag queries may be combined as desired but not mixed. Different
+version queries are always combined with logical "and" semantics, and only the
+last of any particular query is used while all previous ones ignored. All
+versions must be specified as a sequence of numbers separated by periods.
+Compiler flag queries output the union of the sets of flags when combined.
+
+ Examples:
+  gmock-config --min-version=1.0 || echo "Insufficient Google Mock version."
+
+  g++ $(gmock-config --cppflags --cxxflags) -o foo.o -c foo.cpp
+  g++ $(gmock-config --ldflags --libs) -o foo foo.o
+
+  # When using a built but not installed Google Mock:
+  g++ $(../../my_gmock_build/scripts/gmock-config ...) ...
+
+  # When using an installed Google Mock, but with installation overrides:
+  export GMOCK_PREFIX="/opt"
+  g++ $(gmock-config --libdir="/opt/lib64" ...) ...
+
+ Help:
+  --usage                    brief usage information
+  --help                     display this help message
+
+ Installation Overrides:
+  --prefix=<dir>             overrides the installation prefix
+  --exec-prefix=<dir>        overrides the executable installation prefix
+  --libdir=<dir>             overrides the library installation prefix
+  --includedir=<dir>         overrides the header file installation prefix
+
+ Installation Queries:
+  --prefix                   installation prefix
+  --exec-prefix              executable installation prefix
+  --libdir                   library installation directory
+  --includedir               header file installation directory
+  --version                  the version of the Google Mock installation
+
+ Version Queries:
+  --min-version=VERSION      return 0 if the version is at least VERSION
+  --exact-version=VERSION    return 0 if the version is exactly VERSION
+  --max-version=VERSION      return 0 if the version is at most VERSION
+
+ Compilation Flag Queries:
+  --cppflags                 compile flags specific to the C-like preprocessors
+  --cxxflags                 compile flags appropriate for C++ programs
+  --ldflags                  linker flags
+  --libs                     libraries for linking
+
+EOF
+}
+
+# This function bounds our version with a min and a max. It uses some clever
+# POSIX-compliant variable expansion to portably do all the work in the shell
+# and avoid any dependency on a particular "sed" or "awk" implementation.
+# Notable is that it will only ever compare the first 3 components of versions.
+# Further components will be cleanly stripped off. All versions must be
+# unadorned, so "v1.0" will *not* work. The minimum version must be in $1, and
+# the max in $2. TODO(chandlerc@google.com): If this ever breaks, we should
+# investigate expanding this via autom4te from AS_VERSION_COMPARE rather than
+# continuing to maintain our own shell version.
+check_versions()
+{
+  major_version=${version%%.*}
+  minor_version="0"
+  point_version="0"
+  if test "${version#*.}" != "${version}"; then
+    minor_version=${version#*.}
+    minor_version=${minor_version%%.*}
+  fi
+  if test "${version#*.*.}" != "${version}"; then
+    point_version=${version#*.*.}
+    point_version=${point_version%%.*}
+  fi
+
+  min_version="$1"
+  min_major_version=${min_version%%.*}
+  min_minor_version="0"
+  min_point_version="0"
+  if test "${min_version#*.}" != "${min_version}"; then
+    min_minor_version=${min_version#*.}
+    min_minor_version=${min_minor_version%%.*}
+  fi
+  if test "${min_version#*.*.}" != "${min_version}"; then
+    min_point_version=${min_version#*.*.}
+    min_point_version=${min_point_version%%.*}
+  fi
+
+  max_version="$2"
+  max_major_version=${max_version%%.*}
+  max_minor_version="0"
+  max_point_version="0"
+  if test "${max_version#*.}" != "${max_version}"; then
+    max_minor_version=${max_version#*.}
+    max_minor_version=${max_minor_version%%.*}
+  fi
+  if test "${max_version#*.*.}" != "${max_version}"; then
+    max_point_version=${max_version#*.*.}
+    max_point_version=${max_point_version%%.*}
+  fi
+
+  test $(($major_version)) -lt $(($min_major_version)) && exit 1
+  if test $(($major_version)) -eq $(($min_major_version)); then
+    test $(($minor_version)) -lt $(($min_minor_version)) && exit 1
+    if test $(($minor_version)) -eq $(($min_minor_version)); then
+      test $(($point_version)) -lt $(($min_point_version)) && exit 1
+    fi
+  fi
+
+  test $(($major_version)) -gt $(($max_major_version)) && exit 1
+  if test $(($major_version)) -eq $(($max_major_version)); then
+    test $(($minor_version)) -gt $(($max_minor_version)) && exit 1
+    if test $(($minor_version)) -eq $(($max_minor_version)); then
+      test $(($point_version)) -gt $(($max_point_version)) && exit 1
+    fi
+  fi
+
+  exit 0
+}
+
+# Show the usage line when no arguments are specified.
+if test $# -eq 0; then
+  show_usage
+  exit 1
+fi
+
+while test $# -gt 0; do
+  case $1 in
+    --usage)          show_usage;         exit 0;;
+    --help)           show_help;          exit 0;;
+
+    # Installation overrides
+    --prefix=*)       GMOCK_PREFIX=${1#--prefix=};;
+    --exec-prefix=*)  GMOCK_EXEC_PREFIX=${1#--exec-prefix=};;
+    --libdir=*)       GMOCK_LIBDIR=${1#--libdir=};;
+    --includedir=*)   GMOCK_INCLUDEDIR=${1#--includedir=};;
+
+    # Installation queries
+    --prefix|--exec-prefix|--libdir|--includedir|--version)
+      if test -n "${do_query}"; then
+        show_usage
+        exit 1
+      fi
+      do_query=${1#--}
+      ;;
+
+    # Version checking
+    --min-version=*)
+      do_check_versions=yes
+      min_version=${1#--min-version=}
+      ;;
+    --max-version=*)
+      do_check_versions=yes
+      max_version=${1#--max-version=}
+      ;;
+    --exact-version=*)
+      do_check_versions=yes
+      exact_version=${1#--exact-version=}
+      ;;
+
+    # Compiler flag output
+    --cppflags)       echo_cppflags=yes;;
+    --cxxflags)       echo_cxxflags=yes;;
+    --ldflags)        echo_ldflags=yes;;
+    --libs)           echo_libs=yes;;
+
+    # Everything else is an error
+    *)                show_usage;         exit 1;;
+  esac
+  shift
+done
+
+# These have defaults filled in by the configure script but can also be
+# overridden by environment variables or command line parameters.
+prefix="${GMOCK_PREFIX:-@prefix@}"
+exec_prefix="${GMOCK_EXEC_PREFIX:-@exec_prefix@}"
+libdir="${GMOCK_LIBDIR:-@libdir@}"
+includedir="${GMOCK_INCLUDEDIR:-@includedir@}"
+
+# We try and detect if our binary is not located at its installed location. If
+# it's not, we provide variables pointing to the source and build tree rather
+# than to the install tree. We also locate Google Test using the configured
+# gtest-config script rather than searching the PATH and our bindir for one.
+# This allows building against a just-built gmock rather than an installed
+# gmock.
+bindir="@bindir@"
+this_relative_bindir=`dirname $0`
+this_bindir=`cd ${this_relative_bindir}; pwd -P`
+if test "${this_bindir}" = "${this_bindir%${bindir}}"; then
+  # The path to the script doesn't end in the bindir sequence from Autoconf,
+  # assume that we are in a build tree.
+  build_dir=`dirname ${this_bindir}`
+  src_dir=`cd ${this_bindir}/@top_srcdir@; pwd -P`
+
+  # TODO(chandlerc@google.com): This is a dangerous dependency on libtool, we
+  # should work to remove it, and/or remove libtool altogether, replacing it
+  # with direct references to the library and a link path.
+  gmock_libs="${build_dir}/lib/libgmock.la"
+  gmock_ldflags=""
+
+  # We provide hooks to include from either the source or build dir, where the
+  # build dir is always preferred. This will potentially allow us to write
+  # build rules for generated headers and have them automatically be preferred
+  # over provided versions.
+  gmock_cppflags="-I${build_dir}/include -I${src_dir}/include"
+  gmock_cxxflags=""
+
+  # Directly invoke the gtest-config script used during the build process.
+  gtest_config="@GTEST_CONFIG@"
+else
+  # We're using an installed gmock, although it may be staged under some
+  # prefix. Assume (as our own libraries do) that we can resolve the prefix,
+  # and are present in the dynamic link paths.
+  gmock_ldflags="-L${libdir}"
+  gmock_libs="-l${name}"
+  gmock_cppflags="-I${includedir}"
+  gmock_cxxflags=""
+
+  # We also prefer any gtest-config script installed in our prefix. Lacking
+  # one, we look in the PATH for one.
+  gtest_config="${bindir}/gtest-config"
+  if test ! -x "${gtest_config}"; then
+    gtest_config=`which gtest-config`
+  fi
+fi
+
+# Ensure that we have located a Google Test to link against.
+if ! test -x "${gtest_config}"; then
+  echo "Unable to locate Google Test, check your Google Mock configuration" \
+       "and installation" >&2
+  exit 1
+elif ! "${gtest_config}" "--exact-version=@GTEST_VERSION@"; then
+  echo "The Google Test found is not the same version as Google Mock was " \
+       "built against" >&2
+  exit 1
+fi
+
+# Add the necessary Google Test bits into the various flag variables
+gmock_cppflags="${gmock_cppflags} `${gtest_config} --cppflags`"
+gmock_cxxflags="${gmock_cxxflags} `${gtest_config} --cxxflags`"
+gmock_ldflags="${gmock_ldflags} `${gtest_config} --ldflags`"
+gmock_libs="${gmock_libs} `${gtest_config} --libs`"
+
+# Do an installation query if requested.
+if test -n "$do_query"; then
+  case $do_query in
+    prefix)           echo $prefix;       exit 0;;
+    exec-prefix)      echo $exec_prefix;  exit 0;;
+    libdir)           echo $libdir;       exit 0;;
+    includedir)       echo $includedir;   exit 0;;
+    version)          echo $version;      exit 0;;
+    *)                show_usage;         exit 1;;
+  esac
+fi
+
+# Do a version check if requested.
+if test "$do_check_versions" = "yes"; then
+  # Make sure we didn't receive a bad combination of parameters.
+  test "$echo_cppflags" = "yes" && show_usage && exit 1
+  test "$echo_cxxflags" = "yes" && show_usage && exit 1
+  test "$echo_ldflags" = "yes"  && show_usage && exit 1
+  test "$echo_libs" = "yes"     && show_usage && exit 1
+
+  if test "$exact_version" != ""; then
+    check_versions $exact_version $exact_version
+    # unreachable
+  else
+    check_versions ${min_version:-0.0.0} ${max_version:-9999.9999.9999}
+    # unreachable
+  fi
+fi
+
+# Do the output in the correct order so that these can be used in-line of
+# a compiler invocation.
+output=""
+test "$echo_cppflags" = "yes" && output="$output $gmock_cppflags"
+test "$echo_cxxflags" = "yes" && output="$output $gmock_cxxflags"
+test "$echo_ldflags" = "yes"  && output="$output $gmock_ldflags"
+test "$echo_libs" = "yes"     && output="$output $gmock_libs"
+echo $output
+
+exit 0
diff --git a/googlemock/scripts/gmock_doctor.py b/googlemock/scripts/gmock_doctor.py
new file mode 100755
index 0000000..74992bc
--- /dev/null
+++ b/googlemock/scripts/gmock_doctor.py
@@ -0,0 +1,640 @@
+#!/usr/bin/env python
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Converts compiler's errors in code using Google Mock to plain English."""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import re
+import sys
+
+_VERSION = '1.0.3'
+
+_EMAIL = 'googlemock@googlegroups.com'
+
+_COMMON_GMOCK_SYMBOLS = [
+    # Matchers
+    '_',
+    'A',
+    'AddressSatisfies',
+    'AllOf',
+    'An',
+    'AnyOf',
+    'ContainerEq',
+    'Contains',
+    'ContainsRegex',
+    'DoubleEq',
+    'ElementsAre',
+    'ElementsAreArray',
+    'EndsWith',
+    'Eq',
+    'Field',
+    'FloatEq',
+    'Ge',
+    'Gt',
+    'HasSubstr',
+    'IsInitializedProto',
+    'Le',
+    'Lt',
+    'MatcherCast',
+    'Matches',
+    'MatchesRegex',
+    'NanSensitiveDoubleEq',
+    'NanSensitiveFloatEq',
+    'Ne',
+    'Not',
+    'NotNull',
+    'Pointee',
+    'Property',
+    'Ref',
+    'ResultOf',
+    'SafeMatcherCast',
+    'StartsWith',
+    'StrCaseEq',
+    'StrCaseNe',
+    'StrEq',
+    'StrNe',
+    'Truly',
+    'TypedEq',
+    'Value',
+
+    # Actions
+    'Assign',
+    'ByRef',
+    'DeleteArg',
+    'DoAll',
+    'DoDefault',
+    'IgnoreResult',
+    'Invoke',
+    'InvokeArgument',
+    'InvokeWithoutArgs',
+    'Return',
+    'ReturnNew',
+    'ReturnNull',
+    'ReturnRef',
+    'SaveArg',
+    'SetArgReferee',
+    'SetArgPointee',
+    'SetArgumentPointee',
+    'SetArrayArgument',
+    'SetErrnoAndReturn',
+    'Throw',
+    'WithArg',
+    'WithArgs',
+    'WithoutArgs',
+
+    # Cardinalities
+    'AnyNumber',
+    'AtLeast',
+    'AtMost',
+    'Between',
+    'Exactly',
+
+    # Sequences
+    'InSequence',
+    'Sequence',
+
+    # Misc
+    'DefaultValue',
+    'Mock',
+    ]
+
+# Regex for matching source file path and line number in the compiler's errors.
+_GCC_FILE_LINE_RE = r'(?P<file>.*):(?P<line>\d+):(\d+:)?\s+'
+_CLANG_FILE_LINE_RE = r'(?P<file>.*):(?P<line>\d+):(?P<column>\d+):\s+'
+_CLANG_NON_GMOCK_FILE_LINE_RE = (
+    r'(?P<file>.*[/\\^](?!gmock-)[^/\\]+):(?P<line>\d+):(?P<column>\d+):\s+')
+
+
+def _FindAllMatches(regex, s):
+  """Generates all matches of regex in string s."""
+
+  r = re.compile(regex)
+  return r.finditer(s)
+
+
+def _GenericDiagnoser(short_name, long_name, diagnoses, msg):
+  """Diagnoses the given disease by pattern matching.
+
+  Can provide different diagnoses for different patterns.
+
+  Args:
+    short_name: Short name of the disease.
+    long_name:  Long name of the disease.
+    diagnoses:  A list of pairs (regex, pattern for formatting the diagnosis
+                for matching regex).
+    msg:        Compiler's error messages.
+  Yields:
+    Tuples of the form
+      (short name of disease, long name of disease, diagnosis).
+  """
+  for regex, diagnosis in diagnoses:
+    if re.search(regex, msg):
+      diagnosis = '%(file)s:%(line)s:' + diagnosis
+      for m in _FindAllMatches(regex, msg):
+        yield (short_name, long_name, diagnosis % m.groupdict())
+
+
+def _NeedToReturnReferenceDiagnoser(msg):
+  """Diagnoses the NRR disease, given the error messages by the compiler."""
+
+  gcc_regex = (r'In member function \'testing::internal::ReturnAction<R>.*\n'
+               + _GCC_FILE_LINE_RE + r'instantiated from here\n'
+               r'.*gmock-actions\.h.*error: creating array with negative size')
+  clang_regex = (r'error:.*array.*negative.*\r?\n'
+                 r'(.*\n)*?' +
+                 _CLANG_NON_GMOCK_FILE_LINE_RE +
+                 r'note: in instantiation of function template specialization '
+                 r'\'testing::internal::ReturnAction<(?P<type>.*)>'
+                 r'::operator Action<.*>\' requested here')
+  clang11_re = (r'use_ReturnRef_instead_of_Return_to_return_a_reference.*'
+                r'(.*\n)*?' + _CLANG_NON_GMOCK_FILE_LINE_RE)
+
+  diagnosis = """
+You are using a Return() action in a function that returns a reference to
+%(type)s.  Please use ReturnRef() instead."""
+  return _GenericDiagnoser('NRR', 'Need to Return Reference',
+                           [(clang_regex, diagnosis),
+                            (clang11_re, diagnosis % {'type': 'a type'}),
+                            (gcc_regex, diagnosis % {'type': 'a type'})],
+                           msg)
+
+
+def _NeedToReturnSomethingDiagnoser(msg):
+  """Diagnoses the NRS disease, given the error messages by the compiler."""
+
+  gcc_regex = (_GCC_FILE_LINE_RE + r'(instantiated from here\n.'
+               r'*gmock.*actions\.h.*error: void value not ignored)'
+               r'|(error: control reaches end of non-void function)')
+  clang_regex1 = (_CLANG_FILE_LINE_RE +
+                  r'error: cannot initialize return object '
+                  r'of type \'Result\' \(aka \'(?P<return_type>.*)\'\) '
+                  r'with an rvalue of type \'void\'')
+  clang_regex2 = (_CLANG_FILE_LINE_RE +
+                  r'error: cannot initialize return object '
+                  r'of type \'(?P<return_type>.*)\' '
+                  r'with an rvalue of type \'void\'')
+  diagnosis = """
+You are using an action that returns void, but it needs to return
+%(return_type)s.  Please tell it *what* to return.  Perhaps you can use
+the pattern DoAll(some_action, Return(some_value))?"""
+  return _GenericDiagnoser(
+      'NRS',
+      'Need to Return Something',
+      [(gcc_regex, diagnosis % {'return_type': '*something*'}),
+       (clang_regex1, diagnosis),
+       (clang_regex2, diagnosis)],
+      msg)
+
+
+def _NeedToReturnNothingDiagnoser(msg):
+  """Diagnoses the NRN disease, given the error messages by the compiler."""
+
+  gcc_regex = (_GCC_FILE_LINE_RE + r'instantiated from here\n'
+               r'.*gmock-actions\.h.*error: instantiation of '
+               r'\'testing::internal::ReturnAction<R>::Impl<F>::value_\' '
+               r'as type \'void\'')
+  clang_regex1 = (r'error: field has incomplete type '
+                  r'\'Result\' \(aka \'void\'\)(\r)?\n'
+                  r'(.*\n)*?' +
+                  _CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
+                  r'of function template specialization '
+                  r'\'testing::internal::ReturnAction<(?P<return_type>.*)>'
+                  r'::operator Action<void \(.*\)>\' requested here')
+  clang_regex2 = (r'error: field has incomplete type '
+                  r'\'Result\' \(aka \'void\'\)(\r)?\n'
+                  r'(.*\n)*?' +
+                  _CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
+                  r'of function template specialization '
+                  r'\'testing::internal::DoBothAction<.*>'
+                  r'::operator Action<(?P<return_type>.*) \(.*\)>\' '
+                  r'requested here')
+  diagnosis = """
+You are using an action that returns %(return_type)s, but it needs to return
+void.  Please use a void-returning action instead.
+
+All actions but the last in DoAll(...) must return void.  Perhaps you need
+to re-arrange the order of actions in a DoAll(), if you are using one?"""
+  return _GenericDiagnoser(
+      'NRN',
+      'Need to Return Nothing',
+      [(gcc_regex, diagnosis % {'return_type': '*something*'}),
+       (clang_regex1, diagnosis),
+       (clang_regex2, diagnosis)],
+      msg)
+
+
+def _IncompleteByReferenceArgumentDiagnoser(msg):
+  """Diagnoses the IBRA disease, given the error messages by the compiler."""
+
+  gcc_regex = (_GCC_FILE_LINE_RE + r'instantiated from here\n'
+               r'.*gtest-printers\.h.*error: invalid application of '
+               r'\'sizeof\' to incomplete type \'(?P<type>.*)\'')
+
+  clang_regex = (r'.*gtest-printers\.h.*error: invalid application of '
+                 r'\'sizeof\' to an incomplete type '
+                 r'\'(?P<type>.*)( const)?\'\r?\n'
+                 r'(.*\n)*?' +
+                 _CLANG_NON_GMOCK_FILE_LINE_RE +
+                 r'note: in instantiation of member function '
+                 r'\'testing::internal2::TypeWithoutFormatter<.*>::'
+                 r'PrintValue\' requested here')
+  diagnosis = """
+In order to mock this function, Google Mock needs to see the definition
+of type "%(type)s" - declaration alone is not enough.  Either #include
+the header that defines it, or change the argument to be passed
+by pointer."""
+
+  return _GenericDiagnoser('IBRA', 'Incomplete By-Reference Argument Type',
+                           [(gcc_regex, diagnosis),
+                            (clang_regex, diagnosis)],
+                           msg)
+
+
+def _OverloadedFunctionMatcherDiagnoser(msg):
+  """Diagnoses the OFM disease, given the error messages by the compiler."""
+
+  gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for '
+               r'call to \'Truly\(<unresolved overloaded function type>\)')
+  clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching function for '
+                 r'call to \'Truly')
+  diagnosis = """
+The argument you gave to Truly() is an overloaded function.  Please tell
+your compiler which overloaded version you want to use.
+
+For example, if you want to use the version whose signature is
+  bool Foo(int n);
+you should write
+  Truly(static_cast<bool (*)(int n)>(Foo))"""
+  return _GenericDiagnoser('OFM', 'Overloaded Function Matcher',
+                           [(gcc_regex, diagnosis),
+                            (clang_regex, diagnosis)],
+                           msg)
+
+
+def _OverloadedFunctionActionDiagnoser(msg):
+  """Diagnoses the OFA disease, given the error messages by the compiler."""
+
+  gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for call to '
+               r'\'Invoke\(<unresolved overloaded function type>')
+  clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching '
+                 r'function for call to \'Invoke\'\r?\n'
+                 r'(.*\n)*?'
+                 r'.*\bgmock-generated-actions\.h:\d+:\d+:\s+'
+                 r'note: candidate template ignored:\s+'
+                 r'couldn\'t infer template argument \'FunctionImpl\'')
+  diagnosis = """
+Function you are passing to Invoke is overloaded.  Please tell your compiler
+which overloaded version you want to use.
+
+For example, if you want to use the version whose signature is
+  bool MyFunction(int n, double x);
+you should write something like
+  Invoke(static_cast<bool (*)(int n, double x)>(MyFunction))"""
+  return _GenericDiagnoser('OFA', 'Overloaded Function Action',
+                           [(gcc_regex, diagnosis),
+                            (clang_regex, diagnosis)],
+                           msg)
+
+
+def _OverloadedMethodActionDiagnoser(msg):
+  """Diagnoses the OMA disease, given the error messages by the compiler."""
+
+  gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for '
+               r'call to \'Invoke\(.+, <unresolved overloaded function '
+               r'type>\)')
+  clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching function '
+                 r'for call to \'Invoke\'\r?\n'
+                 r'(.*\n)*?'
+                 r'.*\bgmock-generated-actions\.h:\d+:\d+: '
+                 r'note: candidate function template not viable: '
+                 r'requires .*, but 2 (arguments )?were provided')
+  diagnosis = """
+The second argument you gave to Invoke() is an overloaded method.  Please
+tell your compiler which overloaded version you want to use.
+
+For example, if you want to use the version whose signature is
+  class Foo {
+    ...
+    bool Bar(int n, double x);
+  };
+you should write something like
+  Invoke(foo, static_cast<bool (Foo::*)(int n, double x)>(&Foo::Bar))"""
+  return _GenericDiagnoser('OMA', 'Overloaded Method Action',
+                           [(gcc_regex, diagnosis),
+                            (clang_regex, diagnosis)],
+                           msg)
+
+
+def _MockObjectPointerDiagnoser(msg):
+  """Diagnoses the MOP disease, given the error messages by the compiler."""
+
+  gcc_regex = (_GCC_FILE_LINE_RE + r'error: request for member '
+               r'\'gmock_(?P<method>.+)\' in \'(?P<mock_object>.+)\', '
+               r'which is of non-class type \'(.*::)*(?P<class_name>.+)\*\'')
+  clang_regex = (_CLANG_FILE_LINE_RE + r'error: member reference type '
+                 r'\'(?P<class_name>.*?) *\' is a pointer; '
+                 r'(did you mean|maybe you meant) to use \'->\'\?')
+  diagnosis = """
+The first argument to ON_CALL() and EXPECT_CALL() must be a mock *object*,
+not a *pointer* to it.  Please write '*(%(mock_object)s)' instead of
+'%(mock_object)s' as your first argument.
+
+For example, given the mock class:
+
+  class %(class_name)s : public ... {
+    ...
+    MOCK_METHOD0(%(method)s, ...);
+  };
+
+and the following mock instance:
+
+  %(class_name)s* mock_ptr = ...
+
+you should use the EXPECT_CALL like this:
+
+  EXPECT_CALL(*mock_ptr, %(method)s(...));"""
+
+  return _GenericDiagnoser(
+      'MOP',
+      'Mock Object Pointer',
+      [(gcc_regex, diagnosis),
+       (clang_regex, diagnosis % {'mock_object': 'mock_object',
+                                  'method': 'method',
+                                  'class_name': '%(class_name)s'})],
+       msg)
+
+
+def _NeedToUseSymbolDiagnoser(msg):
+  """Diagnoses the NUS disease, given the error messages by the compiler."""
+
+  gcc_regex = (_GCC_FILE_LINE_RE + r'error: \'(?P<symbol>.+)\' '
+               r'(was not declared in this scope|has not been declared)')
+  clang_regex = (_CLANG_FILE_LINE_RE +
+                 r'error: (use of undeclared identifier|unknown type name|'
+                 r'no template named) \'(?P<symbol>[^\']+)\'')
+  diagnosis = """
+'%(symbol)s' is defined by Google Mock in the testing namespace.
+Did you forget to write
+  using testing::%(symbol)s;
+?"""
+  for m in (list(_FindAllMatches(gcc_regex, msg)) +
+            list(_FindAllMatches(clang_regex, msg))):
+    symbol = m.groupdict()['symbol']
+    if symbol in _COMMON_GMOCK_SYMBOLS:
+      yield ('NUS', 'Need to Use Symbol', diagnosis % m.groupdict())
+
+
+def _NeedToUseReturnNullDiagnoser(msg):
+  """Diagnoses the NRNULL disease, given the error messages by the compiler."""
+
+  gcc_regex = ('instantiated from \'testing::internal::ReturnAction<R>'
+               '::operator testing::Action<Func>\(\) const.*\n' +
+               _GCC_FILE_LINE_RE + r'instantiated from here\n'
+               r'.*error: no matching function for call to \'ImplicitCast_\('
+               r'(:?long )?int&\)')
+  clang_regex = (r'\bgmock-actions.h:.* error: no matching function for '
+                 r'call to \'ImplicitCast_\'\r?\n'
+                 r'(.*\n)*?' +
+                 _CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
+                 r'of function template specialization '
+                 r'\'testing::internal::ReturnAction<(int|long)>::operator '
+                 r'Action<(?P<type>.*)\(\)>\' requested here')
+  diagnosis = """
+You are probably calling Return(NULL) and the compiler isn't sure how to turn
+NULL into %(type)s. Use ReturnNull() instead.
+Note: the line number may be off; please fix all instances of Return(NULL)."""
+  return _GenericDiagnoser(
+      'NRNULL', 'Need to use ReturnNull',
+      [(clang_regex, diagnosis),
+       (gcc_regex, diagnosis % {'type': 'the right type'})],
+      msg)
+
+
+def _TypeInTemplatedBaseDiagnoser(msg):
+  """Diagnoses the TTB disease, given the error messages by the compiler."""
+
+  # This version works when the type is used as the mock function's return
+  # type.
+  gcc_4_3_1_regex_type_in_retval = (
+      r'In member function \'int .*\n' + _GCC_FILE_LINE_RE +
+      r'error: a function call cannot appear in a constant-expression')
+  gcc_4_4_0_regex_type_in_retval = (
+      r'error: a function call cannot appear in a constant-expression'
+      + _GCC_FILE_LINE_RE + r'error: template argument 1 is invalid\n')
+  # This version works when the type is used as the mock function's sole
+  # parameter type.
+  gcc_regex_type_of_sole_param = (
+      _GCC_FILE_LINE_RE +
+      r'error: \'(?P<type>.+)\' was not declared in this scope\n'
+      r'.*error: template argument 1 is invalid\n')
+  # This version works when the type is used as a parameter of a mock
+  # function that has multiple parameters.
+  gcc_regex_type_of_a_param = (
+      r'error: expected `;\' before \'::\' token\n'
+      + _GCC_FILE_LINE_RE +
+      r'error: \'(?P<type>.+)\' was not declared in this scope\n'
+      r'.*error: template argument 1 is invalid\n'
+      r'.*error: \'.+\' was not declared in this scope')
+  clang_regex_type_of_retval_or_sole_param = (
+      _CLANG_FILE_LINE_RE +
+      r'error: use of undeclared identifier \'(?P<type>.*)\'\n'
+      r'(.*\n)*?'
+      r'(?P=file):(?P=line):\d+: error: '
+      r'non-friend class member \'Result\' cannot have a qualified name'
+      )
+  clang_regex_type_of_a_param = (
+      _CLANG_FILE_LINE_RE +
+      r'error: C\+\+ requires a type specifier for all declarations\n'
+      r'(.*\n)*?'
+      r'(?P=file):(?P=line):(?P=column): error: '
+      r'C\+\+ requires a type specifier for all declarations'
+      )
+  clang_regex_unknown_type = (
+      _CLANG_FILE_LINE_RE +
+      r'error: unknown type name \'(?P<type>[^\']+)\''
+      )
+
+  diagnosis = """
+In a mock class template, types or typedefs defined in the base class
+template are *not* automatically visible.  This is how C++ works.  Before
+you can use a type or typedef named %(type)s defined in base class Base<T>, you
+need to make it visible.  One way to do it is:
+
+  typedef typename Base<T>::%(type)s %(type)s;"""
+
+  for diag in _GenericDiagnoser(
+      'TTB', 'Type in Template Base',
+      [(gcc_4_3_1_regex_type_in_retval, diagnosis % {'type': 'Foo'}),
+       (gcc_4_4_0_regex_type_in_retval, diagnosis % {'type': 'Foo'}),
+       (gcc_regex_type_of_sole_param, diagnosis),
+       (gcc_regex_type_of_a_param, diagnosis),
+       (clang_regex_type_of_retval_or_sole_param, diagnosis),
+       (clang_regex_type_of_a_param, diagnosis % {'type': 'Foo'})],
+      msg):
+    yield diag
+  # Avoid overlap with the NUS pattern.
+  for m in _FindAllMatches(clang_regex_unknown_type, msg):
+    type_ = m.groupdict()['type']
+    if type_ not in _COMMON_GMOCK_SYMBOLS:
+      yield ('TTB', 'Type in Template Base', diagnosis % m.groupdict())
+
+
+def _WrongMockMethodMacroDiagnoser(msg):
+  """Diagnoses the WMM disease, given the error messages by the compiler."""
+
+  gcc_regex = (_GCC_FILE_LINE_RE +
+               r'.*this_method_does_not_take_(?P<wrong_args>\d+)_argument.*\n'
+               r'.*\n'
+               r'.*candidates are.*FunctionMocker<[^>]+A(?P<args>\d+)\)>')
+  clang_regex = (_CLANG_NON_GMOCK_FILE_LINE_RE +
+                 r'error:.*array.*negative.*r?\n'
+                 r'(.*\n)*?'
+                 r'(?P=file):(?P=line):(?P=column): error: too few arguments '
+                 r'to function call, expected (?P<args>\d+), '
+                 r'have (?P<wrong_args>\d+)')
+  clang11_re = (_CLANG_NON_GMOCK_FILE_LINE_RE +
+                r'.*this_method_does_not_take_'
+                r'(?P<wrong_args>\d+)_argument.*')
+  diagnosis = """
+You are using MOCK_METHOD%(wrong_args)s to define a mock method that has
+%(args)s arguments. Use MOCK_METHOD%(args)s (or MOCK_CONST_METHOD%(args)s,
+MOCK_METHOD%(args)s_T, MOCK_CONST_METHOD%(args)s_T as appropriate) instead."""
+  return _GenericDiagnoser('WMM', 'Wrong MOCK_METHODn Macro',
+                           [(gcc_regex, diagnosis),
+                            (clang11_re, diagnosis % {'wrong_args': 'm',
+                                                      'args': 'n'}),
+                            (clang_regex, diagnosis)],
+                           msg)
+
+
+def _WrongParenPositionDiagnoser(msg):
+  """Diagnoses the WPP disease, given the error messages by the compiler."""
+
+  gcc_regex = (_GCC_FILE_LINE_RE +
+               r'error:.*testing::internal::MockSpec<.* has no member named \''
+               r'(?P<method>\w+)\'')
+  clang_regex = (_CLANG_NON_GMOCK_FILE_LINE_RE +
+                 r'error: no member named \'(?P<method>\w+)\' in '
+                 r'\'testing::internal::MockSpec<.*>\'')
+  diagnosis = """
+The closing parenthesis of ON_CALL or EXPECT_CALL should be *before*
+".%(method)s".  For example, you should write:
+  EXPECT_CALL(my_mock, Foo(_)).%(method)s(...);
+instead of:
+  EXPECT_CALL(my_mock, Foo(_).%(method)s(...));"""
+  return _GenericDiagnoser('WPP', 'Wrong Parenthesis Position',
+                           [(gcc_regex, diagnosis),
+                            (clang_regex, diagnosis)],
+                           msg)
+
+
+_DIAGNOSERS = [
+    _IncompleteByReferenceArgumentDiagnoser,
+    _MockObjectPointerDiagnoser,
+    _NeedToReturnNothingDiagnoser,
+    _NeedToReturnReferenceDiagnoser,
+    _NeedToReturnSomethingDiagnoser,
+    _NeedToUseReturnNullDiagnoser,
+    _NeedToUseSymbolDiagnoser,
+    _OverloadedFunctionActionDiagnoser,
+    _OverloadedFunctionMatcherDiagnoser,
+    _OverloadedMethodActionDiagnoser,
+    _TypeInTemplatedBaseDiagnoser,
+    _WrongMockMethodMacroDiagnoser,
+    _WrongParenPositionDiagnoser,
+    ]
+
+
+def Diagnose(msg):
+  """Generates all possible diagnoses given the compiler error message."""
+
+  msg = re.sub(r'\x1b\[[^m]*m', '', msg)  # Strips all color formatting.
+  # Assuming the string is using the UTF-8 encoding, replaces the left and
+  # the right single quote characters with apostrophes.
+  msg = re.sub(r'(\xe2\x80\x98|\xe2\x80\x99)', "'", msg)
+
+  diagnoses = []
+  for diagnoser in _DIAGNOSERS:
+    for diag in diagnoser(msg):
+      diagnosis = '[%s - %s]\n%s' % diag
+      if not diagnosis in diagnoses:
+        diagnoses.append(diagnosis)
+  return diagnoses
+
+
+def main():
+  print ('Google Mock Doctor v%s - '
+         'diagnoses problems in code using Google Mock.' % _VERSION)
+
+  if sys.stdin.isatty():
+    print ('Please copy and paste the compiler errors here.  Press c-D when '
+           'you are done:')
+  else:
+    print ('Waiting for compiler errors on stdin . . .')
+
+  msg = sys.stdin.read().strip()
+  diagnoses = Diagnose(msg)
+  count = len(diagnoses)
+  if not count:
+    print ("""
+Your compiler complained:
+8<------------------------------------------------------------
+%s
+------------------------------------------------------------>8
+
+Uh-oh, I'm not smart enough to figure out what the problem is. :-(
+However...
+If you send your source code and the compiler's error messages to
+%s, you can be helped and I can get smarter --
+win-win for us!""" % (msg, _EMAIL))
+  else:
+    print ('------------------------------------------------------------')
+    print ('Your code appears to have the following',)
+    if count > 1:
+      print ('%s diseases:' % (count,))
+    else:
+      print ('disease:')
+    i = 0
+    for d in diagnoses:
+      i += 1
+      if count > 1:
+        print ('\n#%s:' % (i,))
+      print (d)
+    print ("""
+How did I do?  If you think I'm wrong or unhelpful, please send your
+source code and the compiler's error messages to %s.
+Then you can be helped and I can get smarter -- I promise I won't be upset!""" %
+           _EMAIL)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/googlemock/scripts/upload.py b/googlemock/scripts/upload.py
new file mode 100755
index 0000000..6e6f9a1
--- /dev/null
+++ b/googlemock/scripts/upload.py
@@ -0,0 +1,1387 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tool for uploading diffs from a version control system to the codereview app.
+
+Usage summary: upload.py [options] [-- diff_options]
+
+Diff options are passed to the diff command of the underlying system.
+
+Supported version control systems:
+  Git
+  Mercurial
+  Subversion
+
+It is important for Git/Mercurial users to specify a tree/node/branch to diff
+against by using the '--rev' option.
+"""
+# This code is derived from appcfg.py in the App Engine SDK (open source),
+# and from ASPN recipe #146306.
+
+import cookielib
+import getpass
+import logging
+import md5
+import mimetypes
+import optparse
+import os
+import re
+import socket
+import subprocess
+import sys
+import urllib
+import urllib2
+import urlparse
+
+try:
+  import readline
+except ImportError:
+  pass
+
+# The logging verbosity:
+#  0: Errors only.
+#  1: Status messages.
+#  2: Info logs.
+#  3: Debug logs.
+verbosity = 1
+
+# Max size of patch or base file.
+MAX_UPLOAD_SIZE = 900 * 1024
+
+
+def GetEmail(prompt):
+  """Prompts the user for their email address and returns it.
+
+  The last used email address is saved to a file and offered up as a suggestion
+  to the user. If the user presses enter without typing in anything the last
+  used email address is used. If the user enters a new address, it is saved
+  for next time we prompt.
+
+  """
+  last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
+  last_email = ""
+  if os.path.exists(last_email_file_name):
+    try:
+      last_email_file = open(last_email_file_name, "r")
+      last_email = last_email_file.readline().strip("\n")
+      last_email_file.close()
+      prompt += " [%s]" % last_email
+    except IOError, e:
+      pass
+  email = raw_input(prompt + ": ").strip()
+  if email:
+    try:
+      last_email_file = open(last_email_file_name, "w")
+      last_email_file.write(email)
+      last_email_file.close()
+    except IOError, e:
+      pass
+  else:
+    email = last_email
+  return email
+
+
+def StatusUpdate(msg):
+  """Print a status message to stdout.
+
+  If 'verbosity' is greater than 0, print the message.
+
+  Args:
+    msg: The string to print.
+  """
+  if verbosity > 0:
+    print msg
+
+
+def ErrorExit(msg):
+  """Print an error message to stderr and exit."""
+  print >>sys.stderr, msg
+  sys.exit(1)
+
+
+class ClientLoginError(urllib2.HTTPError):
+  """Raised to indicate there was an error authenticating with ClientLogin."""
+
+  def __init__(self, url, code, msg, headers, args):
+    urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
+    self.args = args
+    self.reason = args["Error"]
+
+
+class AbstractRpcServer(object):
+  """Provides a common interface for a simple RPC server."""
+
+  def __init__(self, host, auth_function, host_override=None, extra_headers={},
+               save_cookies=False):
+    """Creates a new HttpRpcServer.
+
+    Args:
+      host: The host to send requests to.
+      auth_function: A function that takes no arguments and returns an
+        (email, password) tuple when called. Will be called if authentication
+        is required.
+      host_override: The host header to send to the server (defaults to host).
+      extra_headers: A dict of extra headers to append to every request.
+      save_cookies: If True, save the authentication cookies to local disk.
+        If False, use an in-memory cookiejar instead.  Subclasses must
+        implement this functionality.  Defaults to False.
+    """
+    self.host = host
+    self.host_override = host_override
+    self.auth_function = auth_function
+    self.authenticated = False
+    self.extra_headers = extra_headers
+    self.save_cookies = save_cookies
+    self.opener = self._GetOpener()
+    if self.host_override:
+      logging.info("Server: %s; Host: %s", self.host, self.host_override)
+    else:
+      logging.info("Server: %s", self.host)
+
+  def _GetOpener(self):
+    """Returns an OpenerDirector for making HTTP requests.
+
+    Returns:
+      A urllib2.OpenerDirector object.
+    """
+    raise NotImplementedError()
+
+  def _CreateRequest(self, url, data=None):
+    """Creates a new urllib request."""
+    logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
+    req = urllib2.Request(url, data=data)
+    if self.host_override:
+      req.add_header("Host", self.host_override)
+    for key, value in self.extra_headers.iteritems():
+      req.add_header(key, value)
+    return req
+
+  def _GetAuthToken(self, email, password):
+    """Uses ClientLogin to authenticate the user, returning an auth token.
+
+    Args:
+      email:    The user's email address
+      password: The user's password
+
+    Raises:
+      ClientLoginError: If there was an error authenticating with ClientLogin.
+      HTTPError: If there was some other form of HTTP error.
+
+    Returns:
+      The authentication token returned by ClientLogin.
+    """
+    account_type = "GOOGLE"
+    if self.host.endswith(".google.com"):
+      # Needed for use inside Google.
+      account_type = "HOSTED"
+    req = self._CreateRequest(
+        url="https://www.google.com/accounts/ClientLogin",
+        data=urllib.urlencode({
+            "Email": email,
+            "Passwd": password,
+            "service": "ah",
+            "source": "rietveld-codereview-upload",
+            "accountType": account_type,
+        }),
+    )
+    try:
+      response = self.opener.open(req)
+      response_body = response.read()
+      response_dict = dict(x.split("=")
+                           for x in response_body.split("\n") if x)
+      return response_dict["Auth"]
+    except urllib2.HTTPError, e:
+      if e.code == 403:
+        body = e.read()
+        response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
+        raise ClientLoginError(req.get_full_url(), e.code, e.msg,
+                               e.headers, response_dict)
+      else:
+        raise
+
+  def _GetAuthCookie(self, auth_token):
+    """Fetches authentication cookies for an authentication token.
+
+    Args:
+      auth_token: The authentication token returned by ClientLogin.
+
+    Raises:
+      HTTPError: If there was an error fetching the authentication cookies.
+    """
+    # This is a dummy value to allow us to identify when we're successful.
+    continue_location = "http://localhost/"
+    args = {"continue": continue_location, "auth": auth_token}
+    req = self._CreateRequest("http://%s/_ah/login?%s" %
+                              (self.host, urllib.urlencode(args)))
+    try:
+      response = self.opener.open(req)
+    except urllib2.HTTPError, e:
+      response = e
+    if (response.code != 302 or
+        response.info()["location"] != continue_location):
+      raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
+                              response.headers, response.fp)
+    self.authenticated = True
+
+  def _Authenticate(self):
+    """Authenticates the user.
+
+    The authentication process works as follows:
+     1) We get a username and password from the user
+     2) We use ClientLogin to obtain an AUTH token for the user
+        (see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
+     3) We pass the auth token to /_ah/login on the server to obtain an
+        authentication cookie. If login was successful, it tries to redirect
+        us to the URL we provided.
+
+    If we attempt to access the upload API without first obtaining an
+    authentication cookie, it returns a 401 response and directs us to
+    authenticate ourselves with ClientLogin.
+    """
+    for i in range(3):
+      credentials = self.auth_function()
+      try:
+        auth_token = self._GetAuthToken(credentials[0], credentials[1])
+      except ClientLoginError, e:
+        if e.reason == "BadAuthentication":
+          print >>sys.stderr, "Invalid username or password."
+          continue
+        if e.reason == "CaptchaRequired":
+          print >>sys.stderr, (
+              "Please go to\n"
+              "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
+              "and verify you are a human.  Then try again.")
+          break
+        if e.reason == "NotVerified":
+          print >>sys.stderr, "Account not verified."
+          break
+        if e.reason == "TermsNotAgreed":
+          print >>sys.stderr, "User has not agreed to TOS."
+          break
+        if e.reason == "AccountDeleted":
+          print >>sys.stderr, "The user account has been deleted."
+          break
+        if e.reason == "AccountDisabled":
+          print >>sys.stderr, "The user account has been disabled."
+          break
+        if e.reason == "ServiceDisabled":
+          print >>sys.stderr, ("The user's access to the service has been "
+                               "disabled.")
+          break
+        if e.reason == "ServiceUnavailable":
+          print >>sys.stderr, "The service is not available; try again later."
+          break
+        raise
+      self._GetAuthCookie(auth_token)
+      return
+
+  def Send(self, request_path, payload=None,
+           content_type="application/octet-stream",
+           timeout=None,
+           **kwargs):
+    """Sends an RPC and returns the response.
+
+    Args:
+      request_path: The path to send the request to, eg /api/appversion/create.
+      payload: The body of the request, or None to send an empty request.
+      content_type: The Content-Type header to use.
+      timeout: timeout in seconds; default None i.e. no timeout.
+        (Note: for large requests on OS X, the timeout doesn't work right.)
+      kwargs: Any keyword arguments are converted into query string parameters.
+
+    Returns:
+      The response body, as a string.
+    """
+    # TODO: Don't require authentication.  Let the server say
+    # whether it is necessary.
+    if not self.authenticated:
+      self._Authenticate()
+
+    old_timeout = socket.getdefaulttimeout()
+    socket.setdefaulttimeout(timeout)
+    try:
+      tries = 0
+      while True:
+        tries += 1
+        args = dict(kwargs)
+        url = "http://%s%s" % (self.host, request_path)
+        if args:
+          url += "?" + urllib.urlencode(args)
+        req = self._CreateRequest(url=url, data=payload)
+        req.add_header("Content-Type", content_type)
+        try:
+          f = self.opener.open(req)
+          response = f.read()
+          f.close()
+          return response
+        except urllib2.HTTPError, e:
+          if tries > 3:
+            raise
+          elif e.code == 401:
+            self._Authenticate()
+##           elif e.code >= 500 and e.code < 600:
+##             # Server Error - try again.
+##             continue
+          else:
+            raise
+    finally:
+      socket.setdefaulttimeout(old_timeout)
+
+
+class HttpRpcServer(AbstractRpcServer):
+  """Provides a simplified RPC-style interface for HTTP requests."""
+
+  def _Authenticate(self):
+    """Save the cookie jar after authentication."""
+    super(HttpRpcServer, self)._Authenticate()
+    if self.save_cookies:
+      StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
+      self.cookie_jar.save()
+
+  def _GetOpener(self):
+    """Returns an OpenerDirector that supports cookies and ignores redirects.
+
+    Returns:
+      A urllib2.OpenerDirector object.
+    """
+    opener = urllib2.OpenerDirector()
+    opener.add_handler(urllib2.ProxyHandler())
+    opener.add_handler(urllib2.UnknownHandler())
+    opener.add_handler(urllib2.HTTPHandler())
+    opener.add_handler(urllib2.HTTPDefaultErrorHandler())
+    opener.add_handler(urllib2.HTTPSHandler())
+    opener.add_handler(urllib2.HTTPErrorProcessor())
+    if self.save_cookies:
+      self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
+      self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
+      if os.path.exists(self.cookie_file):
+        try:
+          self.cookie_jar.load()
+          self.authenticated = True
+          StatusUpdate("Loaded authentication cookies from %s" %
+                       self.cookie_file)
+        except (cookielib.LoadError, IOError):
+          # Failed to load cookies - just ignore them.
+          pass
+      else:
+        # Create an empty cookie file with mode 600
+        fd = os.open(self.cookie_file, os.O_CREAT, 0600)
+        os.close(fd)
+      # Always chmod the cookie file
+      os.chmod(self.cookie_file, 0600)
+    else:
+      # Don't save cookies across runs of update.py.
+      self.cookie_jar = cookielib.CookieJar()
+    opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
+    return opener
+
+
+parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
+parser.add_option("-y", "--assume_yes", action="store_true",
+                  dest="assume_yes", default=False,
+                  help="Assume that the answer to yes/no questions is 'yes'.")
+# Logging
+group = parser.add_option_group("Logging options")
+group.add_option("-q", "--quiet", action="store_const", const=0,
+                 dest="verbose", help="Print errors only.")
+group.add_option("-v", "--verbose", action="store_const", const=2,
+                 dest="verbose", default=1,
+                 help="Print info level logs (default).")
+group.add_option("--noisy", action="store_const", const=3,
+                 dest="verbose", help="Print all logs.")
+# Review server
+group = parser.add_option_group("Review server options")
+group.add_option("-s", "--server", action="store", dest="server",
+                 default="codereview.appspot.com",
+                 metavar="SERVER",
+                 help=("The server to upload to. The format is host[:port]. "
+                       "Defaults to 'codereview.appspot.com'."))
+group.add_option("-e", "--email", action="store", dest="email",
+                 metavar="EMAIL", default=None,
+                 help="The username to use. Will prompt if omitted.")
+group.add_option("-H", "--host", action="store", dest="host",
+                 metavar="HOST", default=None,
+                 help="Overrides the Host header sent with all RPCs.")
+group.add_option("--no_cookies", action="store_false",
+                 dest="save_cookies", default=True,
+                 help="Do not save authentication cookies to local disk.")
+# Issue
+group = parser.add_option_group("Issue options")
+group.add_option("-d", "--description", action="store", dest="description",
+                 metavar="DESCRIPTION", default=None,
+                 help="Optional description when creating an issue.")
+group.add_option("-f", "--description_file", action="store",
+                 dest="description_file", metavar="DESCRIPTION_FILE",
+                 default=None,
+                 help="Optional path of a file that contains "
+                      "the description when creating an issue.")
+group.add_option("-r", "--reviewers", action="store", dest="reviewers",
+                 metavar="REVIEWERS", default=None,
+                 help="Add reviewers (comma separated email addresses).")
+group.add_option("--cc", action="store", dest="cc",
+                 metavar="CC", default=None,
+                 help="Add CC (comma separated email addresses).")
+# Upload options
+group = parser.add_option_group("Patch options")
+group.add_option("-m", "--message", action="store", dest="message",
+                 metavar="MESSAGE", default=None,
+                 help="A message to identify the patch. "
+                      "Will prompt if omitted.")
+group.add_option("-i", "--issue", type="int", action="store",
+                 metavar="ISSUE", default=None,
+                 help="Issue number to which to add. Defaults to new issue.")
+group.add_option("--download_base", action="store_true",
+                 dest="download_base", default=False,
+                 help="Base files will be downloaded by the server "
+                 "(side-by-side diffs may not work on files with CRs).")
+group.add_option("--rev", action="store", dest="revision",
+                 metavar="REV", default=None,
+                 help="Branch/tree/revision to diff against (used by DVCS).")
+group.add_option("--send_mail", action="store_true",
+                 dest="send_mail", default=False,
+                 help="Send notification email to reviewers.")
+
+
+def GetRpcServer(options):
+  """Returns an instance of an AbstractRpcServer.
+
+  Returns:
+    A new AbstractRpcServer, on which RPC calls can be made.
+  """
+
+  rpc_server_class = HttpRpcServer
+
+  def GetUserCredentials():
+    """Prompts the user for a username and password."""
+    email = options.email
+    if email is None:
+      email = GetEmail("Email (login for uploading to %s)" % options.server)
+    password = getpass.getpass("Password for %s: " % email)
+    return (email, password)
+
+  # If this is the dev_appserver, use fake authentication.
+  host = (options.host or options.server).lower()
+  if host == "localhost" or host.startswith("localhost:"):
+    email = options.email
+    if email is None:
+      email = "test@example.com"
+      logging.info("Using debug user %s.  Override with --email" % email)
+    server = rpc_server_class(
+        options.server,
+        lambda: (email, "password"),
+        host_override=options.host,
+        extra_headers={"Cookie":
+                       'dev_appserver_login="%s:False"' % email},
+        save_cookies=options.save_cookies)
+    # Don't try to talk to ClientLogin.
+    server.authenticated = True
+    return server
+
+  return rpc_server_class(options.server, GetUserCredentials,
+                          host_override=options.host,
+                          save_cookies=options.save_cookies)
+
+
+def EncodeMultipartFormData(fields, files):
+  """Encode form fields for multipart/form-data.
+
+  Args:
+    fields: A sequence of (name, value) elements for regular form fields.
+    files: A sequence of (name, filename, value) elements for data to be
+           uploaded as files.
+  Returns:
+    (content_type, body) ready for httplib.HTTP instance.
+
+  Source:
+    http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
+  """
+  BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+  CRLF = '\r\n'
+  lines = []
+  for (key, value) in fields:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"' % key)
+    lines.append('')
+    lines.append(value)
+  for (key, filename, value) in files:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
+             (key, filename))
+    lines.append('Content-Type: %s' % GetContentType(filename))
+    lines.append('')
+    lines.append(value)
+  lines.append('--' + BOUNDARY + '--')
+  lines.append('')
+  body = CRLF.join(lines)
+  content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+  return content_type, body
+
+
+def GetContentType(filename):
+  """Helper to guess the content-type from the filename."""
+  return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# Use a shell for subcommands on Windows to get a PATH search.
+use_shell = sys.platform.startswith("win")
+
+def RunShellWithReturnCode(command, print_output=False,
+                           universal_newlines=True):
+  """Executes a command and returns the output from stdout and the return code.
+
+  Args:
+    command: Command to execute.
+    print_output: If True, the output is printed to stdout.
+                  If False, both stdout and stderr are ignored.
+    universal_newlines: Use universal_newlines flag (default: True).
+
+  Returns:
+    Tuple (output, return code)
+  """
+  logging.info("Running %s", command)
+  p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+                       shell=use_shell, universal_newlines=universal_newlines)
+  if print_output:
+    output_array = []
+    while True:
+      line = p.stdout.readline()
+      if not line:
+        break
+      print line.strip("\n")
+      output_array.append(line)
+    output = "".join(output_array)
+  else:
+    output = p.stdout.read()
+  p.wait()
+  errout = p.stderr.read()
+  if print_output and errout:
+    print >>sys.stderr, errout
+  p.stdout.close()
+  p.stderr.close()
+  return output, p.returncode
+
+
+def RunShell(command, silent_ok=False, universal_newlines=True,
+             print_output=False):
+  data, retcode = RunShellWithReturnCode(command, print_output,
+                                         universal_newlines)
+  if retcode:
+    ErrorExit("Got error status from %s:\n%s" % (command, data))
+  if not silent_ok and not data:
+    ErrorExit("No output from %s" % command)
+  return data
+
+
+class VersionControlSystem(object):
+  """Abstract base class providing an interface to the VCS."""
+
+  def __init__(self, options):
+    """Constructor.
+
+    Args:
+      options: Command line options.
+    """
+    self.options = options
+
+  def GenerateDiff(self, args):
+    """Return the current diff as a string.
+
+    Args:
+      args: Extra arguments to pass to the diff command.
+    """
+    raise NotImplementedError(
+        "abstract method -- subclass %s must override" % self.__class__)
+
+  def GetUnknownFiles(self):
+    """Return a list of files unknown to the VCS."""
+    raise NotImplementedError(
+        "abstract method -- subclass %s must override" % self.__class__)
+
+  def CheckForUnknownFiles(self):
+    """Show an "are you sure?" prompt if there are unknown files."""
+    unknown_files = self.GetUnknownFiles()
+    if unknown_files:
+      print "The following files are not added to version control:"
+      for line in unknown_files:
+        print line
+      prompt = "Are you sure to continue?(y/N) "
+      answer = raw_input(prompt).strip()
+      if answer != "y":
+        ErrorExit("User aborted")
+
+  def GetBaseFile(self, filename):
+    """Get the content of the upstream version of a file.
+
+    Returns:
+      A tuple (base_content, new_content, is_binary, status)
+        base_content: The contents of the base file.
+        new_content: For text files, this is empty.  For binary files, this is
+          the contents of the new file, since the diff output won't contain
+          information to reconstruct the current file.
+        is_binary: True iff the file is binary.
+        status: The status of the file.
+    """
+
+    raise NotImplementedError(
+        "abstract method -- subclass %s must override" % self.__class__)
+
+
+  def GetBaseFiles(self, diff):
+    """Helper that calls GetBase file for each file in the patch.
+
+    Returns:
+      A dictionary that maps from filename to GetBaseFile's tuple.  Filenames
+      are retrieved based on lines that start with "Index:" or
+      "Property changes on:".
+    """
+    files = {}
+    for line in diff.splitlines(True):
+      if line.startswith('Index:') or line.startswith('Property changes on:'):
+        unused, filename = line.split(':', 1)
+        # On Windows if a file has property changes its filename uses '\'
+        # instead of '/'.
+        filename = filename.strip().replace('\\', '/')
+        files[filename] = self.GetBaseFile(filename)
+    return files
+
+
+  def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
+                      files):
+    """Uploads the base files (and if necessary, the current ones as well)."""
+
+    def UploadFile(filename, file_id, content, is_binary, status, is_base):
+      """Uploads a file to the server."""
+      file_too_large = False
+      if is_base:
+        type = "base"
+      else:
+        type = "current"
+      if len(content) > MAX_UPLOAD_SIZE:
+        print ("Not uploading the %s file for %s because it's too large." %
+               (type, filename))
+        file_too_large = True
+        content = ""
+      checksum = md5.new(content).hexdigest()
+      if options.verbose > 0 and not file_too_large:
+        print "Uploading %s file for %s" % (type, filename)
+      url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
+      form_fields = [("filename", filename),
+                     ("status", status),
+                     ("checksum", checksum),
+                     ("is_binary", str(is_binary)),
+                     ("is_current", str(not is_base)),
+                    ]
+      if file_too_large:
+        form_fields.append(("file_too_large", "1"))
+      if options.email:
+        form_fields.append(("user", options.email))
+      ctype, body = EncodeMultipartFormData(form_fields,
+                                            [("data", filename, content)])
+      response_body = rpc_server.Send(url, body,
+                                      content_type=ctype)
+      if not response_body.startswith("OK"):
+        StatusUpdate("  --> %s" % response_body)
+        sys.exit(1)
+
+    patches = dict()
+    [patches.setdefault(v, k) for k, v in patch_list]
+    for filename in patches.keys():
+      base_content, new_content, is_binary, status = files[filename]
+      file_id_str = patches.get(filename)
+      if file_id_str.find("nobase") != -1:
+        base_content = None
+        file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
+      file_id = int(file_id_str)
+      if base_content != None:
+        UploadFile(filename, file_id, base_content, is_binary, status, True)
+      if new_content != None:
+        UploadFile(filename, file_id, new_content, is_binary, status, False)
+
+  def IsImage(self, filename):
+    """Returns true if the filename has an image extension."""
+    mimetype =  mimetypes.guess_type(filename)[0]
+    if not mimetype:
+      return False
+    return mimetype.startswith("image/")
+
+
+class SubversionVCS(VersionControlSystem):
+  """Implementation of the VersionControlSystem interface for Subversion."""
+
+  def __init__(self, options):
+    super(SubversionVCS, self).__init__(options)
+    if self.options.revision:
+      match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
+      if not match:
+        ErrorExit("Invalid Subversion revision %s." % self.options.revision)
+      self.rev_start = match.group(1)
+      self.rev_end = match.group(3)
+    else:
+      self.rev_start = self.rev_end = None
+    # Cache output from "svn list -r REVNO dirname".
+    # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
+    self.svnls_cache = {}
+    # SVN base URL is required to fetch files deleted in an older revision.
+    # Result is cached to not guess it over and over again in GetBaseFile().
+    required = self.options.download_base or self.options.revision is not None
+    self.svn_base = self._GuessBase(required)
+
+  def GuessBase(self, required):
+    """Wrapper for _GuessBase."""
+    return self.svn_base
+
+  def _GuessBase(self, required):
+    """Returns the SVN base URL.
+
+    Args:
+      required: If true, exits if the url can't be guessed, otherwise None is
+        returned.
+    """
+    info = RunShell(["svn", "info"])
+    for line in info.splitlines():
+      words = line.split()
+      if len(words) == 2 and words[0] == "URL:":
+        url = words[1]
+        scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
+        username, netloc = urllib.splituser(netloc)
+        if username:
+          logging.info("Removed username from base URL")
+        if netloc.endswith("svn.python.org"):
+          if netloc == "svn.python.org":
+            if path.startswith("/projects/"):
+              path = path[9:]
+          elif netloc != "pythondev@svn.python.org":
+            ErrorExit("Unrecognized Python URL: %s" % url)
+          base = "http://svn.python.org/view/*checkout*%s/" % path
+          logging.info("Guessed Python base = %s", base)
+        elif netloc.endswith("svn.collab.net"):
+          if path.startswith("/repos/"):
+            path = path[6:]
+          base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
+          logging.info("Guessed CollabNet base = %s", base)
+        elif netloc.endswith(".googlecode.com"):
+          path = path + "/"
+          base = urlparse.urlunparse(("http", netloc, path, params,
+                                      query, fragment))
+          logging.info("Guessed Google Code base = %s", base)
+        else:
+          path = path + "/"
+          base = urlparse.urlunparse((scheme, netloc, path, params,
+                                      query, fragment))
+          logging.info("Guessed base = %s", base)
+        return base
+    if required:
+      ErrorExit("Can't find URL in output from svn info")
+    return None
+
+  def GenerateDiff(self, args):
+    cmd = ["svn", "diff"]
+    if self.options.revision:
+      cmd += ["-r", self.options.revision]
+    cmd.extend(args)
+    data = RunShell(cmd)
+    count = 0
+    for line in data.splitlines():
+      if line.startswith("Index:") or line.startswith("Property changes on:"):
+        count += 1
+        logging.info(line)
+    if not count:
+      ErrorExit("No valid patches found in output from svn diff")
+    return data
+
+  def _CollapseKeywords(self, content, keyword_str):
+    """Collapses SVN keywords."""
+    # svn cat translates keywords but svn diff doesn't. As a result of this
+    # behavior patching.PatchChunks() fails with a chunk mismatch error.
+    # This part was originally written by the Review Board development team
+    # who had the same problem (http://reviews.review-board.org/r/276/).
+    # Mapping of keywords to known aliases
+    svn_keywords = {
+      # Standard keywords
+      'Date':                ['Date', 'LastChangedDate'],
+      'Revision':            ['Revision', 'LastChangedRevision', 'Rev'],
+      'Author':              ['Author', 'LastChangedBy'],
+      'HeadURL':             ['HeadURL', 'URL'],
+      'Id':                  ['Id'],
+
+      # Aliases
+      'LastChangedDate':     ['LastChangedDate', 'Date'],
+      'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
+      'LastChangedBy':       ['LastChangedBy', 'Author'],
+      'URL':                 ['URL', 'HeadURL'],
+    }
+
+    def repl(m):
+       if m.group(2):
+         return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
+       return "$%s$" % m.group(1)
+    keywords = [keyword
+                for name in keyword_str.split(" ")
+                for keyword in svn_keywords.get(name, [])]
+    return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
+
+  def GetUnknownFiles(self):
+    status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
+    unknown_files = []
+    for line in status.split("\n"):
+      if line and line[0] == "?":
+        unknown_files.append(line)
+    return unknown_files
+
+  def ReadFile(self, filename):
+    """Returns the contents of a file."""
+    file = open(filename, 'rb')
+    result = ""
+    try:
+      result = file.read()
+    finally:
+      file.close()
+    return result
+
+  def GetStatus(self, filename):
+    """Returns the status of a file."""
+    if not self.options.revision:
+      status = RunShell(["svn", "status", "--ignore-externals", filename])
+      if not status:
+        ErrorExit("svn status returned no output for %s" % filename)
+      status_lines = status.splitlines()
+      # If file is in a cl, the output will begin with
+      # "\n--- Changelist 'cl_name':\n".  See
+      # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
+      if (len(status_lines) == 3 and
+          not status_lines[0] and
+          status_lines[1].startswith("--- Changelist")):
+        status = status_lines[2]
+      else:
+        status = status_lines[0]
+    # If we have a revision to diff against we need to run "svn list"
+    # for the old and the new revision and compare the results to get
+    # the correct status for a file.
+    else:
+      dirname, relfilename = os.path.split(filename)
+      if dirname not in self.svnls_cache:
+        cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
+        out, returncode = RunShellWithReturnCode(cmd)
+        if returncode:
+          ErrorExit("Failed to get status for %s." % filename)
+        old_files = out.splitlines()
+        args = ["svn", "list"]
+        if self.rev_end:
+          args += ["-r", self.rev_end]
+        cmd = args + [dirname or "."]
+        out, returncode = RunShellWithReturnCode(cmd)
+        if returncode:
+          ErrorExit("Failed to run command %s" % cmd)
+        self.svnls_cache[dirname] = (old_files, out.splitlines())
+      old_files, new_files = self.svnls_cache[dirname]
+      if relfilename in old_files and relfilename not in new_files:
+        status = "D   "
+      elif relfilename in old_files and relfilename in new_files:
+        status = "M   "
+      else:
+        status = "A   "
+    return status
+
+  def GetBaseFile(self, filename):
+    status = self.GetStatus(filename)
+    base_content = None
+    new_content = None
+
+    # If a file is copied its status will be "A  +", which signifies
+    # "addition-with-history".  See "svn st" for more information.  We need to
+    # upload the original file or else diff parsing will fail if the file was
+    # edited.
+    if status[0] == "A" and status[3] != "+":
+      # We'll need to upload the new content if we're adding a binary file
+      # since diff's output won't contain it.
+      mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
+                          silent_ok=True)
+      base_content = ""
+      is_binary = mimetype and not mimetype.startswith("text/")
+      if is_binary and self.IsImage(filename):
+        new_content = self.ReadFile(filename)
+    elif (status[0] in ("M", "D", "R") or
+          (status[0] == "A" and status[3] == "+") or  # Copied file.
+          (status[0] == " " and status[1] == "M")):  # Property change.
+      args = []
+      if self.options.revision:
+        url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
+      else:
+        # Don't change filename, it's needed later.
+        url = filename
+        args += ["-r", "BASE"]
+      cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
+      mimetype, returncode = RunShellWithReturnCode(cmd)
+      if returncode:
+        # File does not exist in the requested revision.
+        # Reset mimetype, it contains an error message.
+        mimetype = ""
+      get_base = False
+      is_binary = mimetype and not mimetype.startswith("text/")
+      if status[0] == " ":
+        # Empty base content just to force an upload.
+        base_content = ""
+      elif is_binary:
+        if self.IsImage(filename):
+          get_base = True
+          if status[0] == "M":
+            if not self.rev_end:
+              new_content = self.ReadFile(filename)
+            else:
+              url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
+              new_content = RunShell(["svn", "cat", url],
+                                     universal_newlines=True, silent_ok=True)
+        else:
+          base_content = ""
+      else:
+        get_base = True
+
+      if get_base:
+        if is_binary:
+          universal_newlines = False
+        else:
+          universal_newlines = True
+        if self.rev_start:
+          # "svn cat -r REV delete_file.txt" doesn't work. cat requires
+          # the full URL with "@REV" appended instead of using "-r" option.
+          url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
+          base_content = RunShell(["svn", "cat", url],
+                                  universal_newlines=universal_newlines,
+                                  silent_ok=True)
+        else:
+          base_content = RunShell(["svn", "cat", filename],
+                                  universal_newlines=universal_newlines,
+                                  silent_ok=True)
+        if not is_binary:
+          args = []
+          if self.rev_start:
+            url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
+          else:
+            url = filename
+            args += ["-r", "BASE"]
+          cmd = ["svn"] + args + ["propget", "svn:keywords", url]
+          keywords, returncode = RunShellWithReturnCode(cmd)
+          if keywords and not returncode:
+            base_content = self._CollapseKeywords(base_content, keywords)
+    else:
+      StatusUpdate("svn status returned unexpected output: %s" % status)
+      sys.exit(1)
+    return base_content, new_content, is_binary, status[0:5]
+
+
+class GitVCS(VersionControlSystem):
+  """Implementation of the VersionControlSystem interface for Git."""
+
+  def __init__(self, options):
+    super(GitVCS, self).__init__(options)
+    # Map of filename -> hash of base file.
+    self.base_hashes = {}
+
+  def GenerateDiff(self, extra_args):
+    # This is more complicated than svn's GenerateDiff because we must convert
+    # the diff output to include an svn-style "Index:" line as well as record
+    # the hashes of the base files, so we can upload them along with our diff.
+    if self.options.revision:
+      extra_args = [self.options.revision] + extra_args
+    gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args)
+    svndiff = []
+    filecount = 0
+    filename = None
+    for line in gitdiff.splitlines():
+      match = re.match(r"diff --git a/(.*) b/.*$", line)
+      if match:
+        filecount += 1
+        filename = match.group(1)
+        svndiff.append("Index: %s\n" % filename)
+      else:
+        # The "index" line in a git diff looks like this (long hashes elided):
+        #   index 82c0d44..b2cee3f 100755
+        # We want to save the left hash, as that identifies the base file.
+        match = re.match(r"index (\w+)\.\.", line)
+        if match:
+          self.base_hashes[filename] = match.group(1)
+      svndiff.append(line + "\n")
+    if not filecount:
+      ErrorExit("No valid patches found in output from git diff")
+    return "".join(svndiff)
+
+  def GetUnknownFiles(self):
+    status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
+                      silent_ok=True)
+    return status.splitlines()
+
+  def GetBaseFile(self, filename):
+    hash = self.base_hashes[filename]
+    base_content = None
+    new_content = None
+    is_binary = False
+    if hash == "0" * 40:  # All-zero hash indicates no base file.
+      status = "A"
+      base_content = ""
+    else:
+      status = "M"
+      base_content, returncode = RunShellWithReturnCode(["git", "show", hash])
+      if returncode:
+        ErrorExit("Got error status from 'git show %s'" % hash)
+    return (base_content, new_content, is_binary, status)
+
+
+class MercurialVCS(VersionControlSystem):
+  """Implementation of the VersionControlSystem interface for Mercurial."""
+
+  def __init__(self, options, repo_dir):
+    super(MercurialVCS, self).__init__(options)
+    # Absolute path to repository (we can be in a subdir)
+    self.repo_dir = os.path.normpath(repo_dir)
+    # Compute the subdir
+    cwd = os.path.normpath(os.getcwd())
+    assert cwd.startswith(self.repo_dir)
+    self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
+    if self.options.revision:
+      self.base_rev = self.options.revision
+    else:
+      self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
+
+  def _GetRelPath(self, filename):
+    """Get relative path of a file according to the current directory,
+    given its logical path in the repo."""
+    assert filename.startswith(self.subdir), filename
+    return filename[len(self.subdir):].lstrip(r"\/")
+
+  def GenerateDiff(self, extra_args):
+    # If no file specified, restrict to the current subdir
+    extra_args = extra_args or ["."]
+    cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
+    data = RunShell(cmd, silent_ok=True)
+    svndiff = []
+    filecount = 0
+    for line in data.splitlines():
+      m = re.match("diff --git a/(\S+) b/(\S+)", line)
+      if m:
+        # Modify line to make it look like as it comes from svn diff.
+        # With this modification no changes on the server side are required
+        # to make upload.py work with Mercurial repos.
+        # NOTE: for proper handling of moved/copied files, we have to use
+        # the second filename.
+        filename = m.group(2)
+        svndiff.append("Index: %s" % filename)
+        svndiff.append("=" * 67)
+        filecount += 1
+        logging.info(line)
+      else:
+        svndiff.append(line)
+    if not filecount:
+      ErrorExit("No valid patches found in output from hg diff")
+    return "\n".join(svndiff) + "\n"
+
+  def GetUnknownFiles(self):
+    """Return a list of files unknown to the VCS."""
+    args = []
+    status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
+        silent_ok=True)
+    unknown_files = []
+    for line in status.splitlines():
+      st, fn = line.split(" ", 1)
+      if st == "?":
+        unknown_files.append(fn)
+    return unknown_files
+
+  def GetBaseFile(self, filename):
+    # "hg status" and "hg cat" both take a path relative to the current subdir
+    # rather than to the repo root, but "hg diff" has given us the full path
+    # to the repo root.
+    base_content = ""
+    new_content = None
+    is_binary = False
+    oldrelpath = relpath = self._GetRelPath(filename)
+    # "hg status -C" returns two lines for moved/copied files, one otherwise
+    out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
+    out = out.splitlines()
+    # HACK: strip error message about missing file/directory if it isn't in
+    # the working copy
+    if out[0].startswith('%s: ' % relpath):
+      out = out[1:]
+    if len(out) > 1:
+      # Moved/copied => considered as modified, use old filename to
+      # retrieve base contents
+      oldrelpath = out[1].strip()
+      status = "M"
+    else:
+      status, _ = out[0].split(' ', 1)
+    if status != "A":
+      base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
+        silent_ok=True)
+      is_binary = "\0" in base_content  # Mercurial's heuristic
+    if status != "R":
+      new_content = open(relpath, "rb").read()
+      is_binary = is_binary or "\0" in new_content
+    if is_binary and base_content:
+      # Fetch again without converting newlines
+      base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
+        silent_ok=True, universal_newlines=False)
+    if not is_binary or not self.IsImage(relpath):
+      new_content = None
+    return base_content, new_content, is_binary, status
+
+
+# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
+def SplitPatch(data):
+  """Splits a patch into separate pieces for each file.
+
+  Args:
+    data: A string containing the output of svn diff.
+
+  Returns:
+    A list of 2-tuple (filename, text) where text is the svn diff output
+      pertaining to filename.
+  """
+  patches = []
+  filename = None
+  diff = []
+  for line in data.splitlines(True):
+    new_filename = None
+    if line.startswith('Index:'):
+      unused, new_filename = line.split(':', 1)
+      new_filename = new_filename.strip()
+    elif line.startswith('Property changes on:'):
+      unused, temp_filename = line.split(':', 1)
+      # When a file is modified, paths use '/' between directories, however
+      # when a property is modified '\' is used on Windows.  Make them the same
+      # otherwise the file shows up twice.
+      temp_filename = temp_filename.strip().replace('\\', '/')
+      if temp_filename != filename:
+        # File has property changes but no modifications, create a new diff.
+        new_filename = temp_filename
+    if new_filename:
+      if filename and diff:
+        patches.append((filename, ''.join(diff)))
+      filename = new_filename
+      diff = [line]
+      continue
+    if diff is not None:
+      diff.append(line)
+  if filename and diff:
+    patches.append((filename, ''.join(diff)))
+  return patches
+
+
+def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
+  """Uploads a separate patch for each file in the diff output.
+
+  Returns a list of [patch_key, filename] for each file.
+  """
+  patches = SplitPatch(data)
+  rv = []
+  for patch in patches:
+    if len(patch[1]) > MAX_UPLOAD_SIZE:
+      print ("Not uploading the patch for " + patch[0] +
+             " because the file is too large.")
+      continue
+    form_fields = [("filename", patch[0])]
+    if not options.download_base:
+      form_fields.append(("content_upload", "1"))
+    files = [("data", "data.diff", patch[1])]
+    ctype, body = EncodeMultipartFormData(form_fields, files)
+    url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
+    print "Uploading patch for " + patch[0]
+    response_body = rpc_server.Send(url, body, content_type=ctype)
+    lines = response_body.splitlines()
+    if not lines or lines[0] != "OK":
+      StatusUpdate("  --> %s" % response_body)
+      sys.exit(1)
+    rv.append([lines[1], patch[0]])
+  return rv
+
+
+def GuessVCS(options):
+  """Helper to guess the version control system.
+
+  This examines the current directory, guesses which VersionControlSystem
+  we're using, and returns an instance of the appropriate class.  Exit with an
+  error if we can't figure it out.
+
+  Returns:
+    A VersionControlSystem instance. Exits if the VCS can't be guessed.
+  """
+  # Mercurial has a command to get the base directory of a repository
+  # Try running it, but don't die if we don't have hg installed.
+  # NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
+  try:
+    out, returncode = RunShellWithReturnCode(["hg", "root"])
+    if returncode == 0:
+      return MercurialVCS(options, out.strip())
+  except OSError, (errno, message):
+    if errno != 2:  # ENOENT -- they don't have hg installed.
+      raise
+
+  # Subversion has a .svn in all working directories.
+  if os.path.isdir('.svn'):
+    logging.info("Guessed VCS = Subversion")
+    return SubversionVCS(options)
+
+  # Git has a command to test if you're in a git tree.
+  # Try running it, but don't die if we don't have git installed.
+  try:
+    out, returncode = RunShellWithReturnCode(["git", "rev-parse",
+                                              "--is-inside-work-tree"])
+    if returncode == 0:
+      return GitVCS(options)
+  except OSError, (errno, message):
+    if errno != 2:  # ENOENT -- they don't have git installed.
+      raise
+
+  ErrorExit(("Could not guess version control system. "
+             "Are you in a working copy directory?"))
+
+
+def RealMain(argv, data=None):
+  """The real main function.
+
+  Args:
+    argv: Command line arguments.
+    data: Diff contents. If None (default) the diff is generated by
+      the VersionControlSystem implementation returned by GuessVCS().
+
+  Returns:
+    A 2-tuple (issue id, patchset id).
+    The patchset id is None if the base files are not uploaded by this
+    script (applies only to SVN checkouts).
+  """
+  logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
+                              "%(lineno)s %(message)s "))
+  os.environ['LC_ALL'] = 'C'
+  options, args = parser.parse_args(argv[1:])
+  global verbosity
+  verbosity = options.verbose
+  if verbosity >= 3:
+    logging.getLogger().setLevel(logging.DEBUG)
+  elif verbosity >= 2:
+    logging.getLogger().setLevel(logging.INFO)
+  vcs = GuessVCS(options)
+  if isinstance(vcs, SubversionVCS):
+    # base field is only allowed for Subversion.
+    # Note: Fetching base files may become deprecated in future releases.
+    base = vcs.GuessBase(options.download_base)
+  else:
+    base = None
+  if not base and options.download_base:
+    options.download_base = True
+    logging.info("Enabled upload of base file")
+  if not options.assume_yes:
+    vcs.CheckForUnknownFiles()
+  if data is None:
+    data = vcs.GenerateDiff(args)
+  files = vcs.GetBaseFiles(data)
+  if verbosity >= 1:
+    print "Upload server:", options.server, "(change with -s/--server)"
+  if options.issue:
+    prompt = "Message describing this patch set: "
+  else:
+    prompt = "New issue subject: "
+  message = options.message or raw_input(prompt).strip()
+  if not message:
+    ErrorExit("A non-empty message is required")
+  rpc_server = GetRpcServer(options)
+  form_fields = [("subject", message)]
+  if base:
+    form_fields.append(("base", base))
+  if options.issue:
+    form_fields.append(("issue", str(options.issue)))
+  if options.email:
+    form_fields.append(("user", options.email))
+  if options.reviewers:
+    for reviewer in options.reviewers.split(','):
+      if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
+        ErrorExit("Invalid email address: %s" % reviewer)
+    form_fields.append(("reviewers", options.reviewers))
+  if options.cc:
+    for cc in options.cc.split(','):
+      if "@" in cc and not cc.split("@")[1].count(".") == 1:
+        ErrorExit("Invalid email address: %s" % cc)
+    form_fields.append(("cc", options.cc))
+  description = options.description
+  if options.description_file:
+    if options.description:
+      ErrorExit("Can't specify description and description_file")
+    file = open(options.description_file, 'r')
+    description = file.read()
+    file.close()
+  if description:
+    form_fields.append(("description", description))
+  # Send a hash of all the base file so the server can determine if a copy
+  # already exists in an earlier patchset.
+  base_hashes = ""
+  for file, info in files.iteritems():
+    if not info[0] is None:
+      checksum = md5.new(info[0]).hexdigest()
+      if base_hashes:
+        base_hashes += "|"
+      base_hashes += checksum + ":" + file
+  form_fields.append(("base_hashes", base_hashes))
+  # If we're uploading base files, don't send the email before the uploads, so
+  # that it contains the file status.
+  if options.send_mail and options.download_base:
+    form_fields.append(("send_mail", "1"))
+  if not options.download_base:
+    form_fields.append(("content_upload", "1"))
+  if len(data) > MAX_UPLOAD_SIZE:
+    print "Patch is large, so uploading file patches separately."
+    uploaded_diff_file = []
+    form_fields.append(("separate_patches", "1"))
+  else:
+    uploaded_diff_file = [("data", "data.diff", data)]
+  ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
+  response_body = rpc_server.Send("/upload", body, content_type=ctype)
+  patchset = None
+  if not options.download_base or not uploaded_diff_file:
+    lines = response_body.splitlines()
+    if len(lines) >= 2:
+      msg = lines[0]
+      patchset = lines[1].strip()
+      patches = [x.split(" ", 1) for x in lines[2:]]
+    else:
+      msg = response_body
+  else:
+    msg = response_body
+  StatusUpdate(msg)
+  if not response_body.startswith("Issue created.") and \
+  not response_body.startswith("Issue updated."):
+    sys.exit(0)
+  issue = msg[msg.rfind("/")+1:]
+
+  if not uploaded_diff_file:
+    result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
+    if not options.download_base:
+      patches = result
+
+  if not options.download_base:
+    vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
+    if options.send_mail:
+      rpc_server.Send("/" + issue + "/mail", payload="")
+  return issue, patchset
+
+
+def main():
+  try:
+    RealMain(sys.argv)
+  except KeyboardInterrupt:
+    print
+    StatusUpdate("Interrupted.")
+    sys.exit(1)
+
+
+if __name__ == "__main__":
+  main()
diff --git a/googlemock/scripts/upload_gmock.py b/googlemock/scripts/upload_gmock.py
new file mode 100755
index 0000000..5dc484b
--- /dev/null
+++ b/googlemock/scripts/upload_gmock.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""upload_gmock.py v0.1.0 -- uploads a Google Mock patch for review.
+
+This simple wrapper passes all command line flags and
+--cc=googlemock@googlegroups.com to upload.py.
+
+USAGE: upload_gmock.py [options for upload.py]
+"""
+
+__author__ = 'wan@google.com (Zhanyong Wan)'
+
+import os
+import sys
+
+CC_FLAG = '--cc='
+GMOCK_GROUP = 'googlemock@googlegroups.com'
+
+
+def main():
+  # Finds the path to upload.py, assuming it is in the same directory
+  # as this file.
+  my_dir = os.path.dirname(os.path.abspath(__file__))
+  upload_py_path = os.path.join(my_dir, 'upload.py')
+
+  # Adds Google Mock discussion group to the cc line if it's not there
+  # already.
+  upload_py_argv = [upload_py_path]
+  found_cc_flag = False
+  for arg in sys.argv[1:]:
+    if arg.startswith(CC_FLAG):
+      found_cc_flag = True
+      cc_line = arg[len(CC_FLAG):]
+      cc_list = [addr for addr in cc_line.split(',') if addr]
+      if GMOCK_GROUP not in cc_list:
+        cc_list.append(GMOCK_GROUP)
+      upload_py_argv.append(CC_FLAG + ','.join(cc_list))
+    else:
+      upload_py_argv.append(arg)
+
+  if not found_cc_flag:
+    upload_py_argv.append(CC_FLAG + GMOCK_GROUP)
+
+  # Invokes upload.py with the modified command line flags.
+  os.execv(upload_py_path, upload_py_argv)
+
+
+if __name__ == '__main__':
+  main()