Upgrade protobuf to v3.6.1
Merge commit 'be9845eb2abf508c86da76321b4e2c352b45b532' into master
Change-Id: I1b3cd2e177686afe95f44abc4fb0c6f9e001e784
diff --git a/third_party/protobuf/protobuf.bzl b/third_party/protobuf/protobuf.bzl
index 842785c..9589750 100644
--- a/third_party/protobuf/protobuf.bzl
+++ b/third_party/protobuf/protobuf.bzl
@@ -1,29 +1,57 @@
-# -*- mode: python; -*- PYTHON-PREPROCESSING-REQUIRED
-
def _GetPath(ctx, path):
- if str(ctx.label).startswith('@') and not str(ctx.label).startswith('@//'):
- fail('External labels not supported for now')
- return path
+ if ctx.label.workspace_root:
+ return ctx.label.workspace_root + '/' + path
+ else:
+ return path
+
+def _IsNewExternal(ctx):
+ # Bazel 0.4.4 and older have genfiles paths that look like:
+ # bazel-out/local-fastbuild/genfiles/external/repo/foo
+ # After the exec root rearrangement, they look like:
+ # ../repo/bazel-out/local-fastbuild/genfiles/foo
+ return ctx.label.workspace_root.startswith("../")
def _GenDir(ctx):
+ if _IsNewExternal(ctx):
+ # We are using the fact that Bazel 0.4.4+ provides repository-relative paths
+ # for ctx.genfiles_dir.
+ return ctx.genfiles_dir.path + (
+ "/" + ctx.attr.includes[0] if ctx.attr.includes and ctx.attr.includes[0] else "")
+ # This means that we're either in the old version OR the new version in the local repo.
+ # Either way, appending the source path to the genfiles dir works.
+ return ctx.var["GENDIR"] + "/" + _SourceDir(ctx)
+
+def _SourceDir(ctx):
if not ctx.attr.includes:
- if str(ctx.label).startswith('@') and not str(ctx.label).startswith('@//'):
- fail('External labels not supported for now')
- return ''
+ return ctx.label.workspace_root
if not ctx.attr.includes[0]:
return _GetPath(ctx, ctx.label.package)
if not ctx.label.package:
return _GetPath(ctx, ctx.attr.includes[0])
return _GetPath(ctx, ctx.label.package + '/' + ctx.attr.includes[0])
-def _CcOuts(srcs):
- return [s[:-len(".proto")] + ".pb.h" for s in srcs] + \
- [s[:-len(".proto")] + ".pb.cc" for s in srcs]
+def _CcHdrs(srcs, use_grpc_plugin=False):
+ ret = [s[:-len(".proto")] + ".pb.h" for s in srcs]
+ if use_grpc_plugin:
+ ret += [s[:-len(".proto")] + ".grpc.pb.h" for s in srcs]
+ return ret
-def _PyOuts(srcs):
- return [s[:-len(".proto")] + "_pb2.py" for s in srcs]
+def _CcSrcs(srcs, use_grpc_plugin=False):
+ ret = [s[:-len(".proto")] + ".pb.cc" for s in srcs]
+ if use_grpc_plugin:
+ ret += [s[:-len(".proto")] + ".grpc.pb.cc" for s in srcs]
+ return ret
-def _RelativeOutputPath(path, include):
+def _CcOuts(srcs, use_grpc_plugin=False):
+ return _CcHdrs(srcs, use_grpc_plugin) + _CcSrcs(srcs, use_grpc_plugin)
+
+def _PyOuts(srcs, use_grpc_plugin=False):
+ ret = [s[:-len(".proto")] + "_pb2.py" for s in srcs]
+ if use_grpc_plugin:
+ ret += [s[:-len(".proto")] + "_pb2_grpc.py" for s in srcs]
+ return ret
+
+def _RelativeOutputPath(path, include, dest=""):
if include == None:
return path
@@ -32,29 +60,21 @@
if include and include[-1] != '/':
include = include + '/'
+ if dest and dest[-1] != '/':
+ dest = dest + '/'
path = path[len(include):]
-
- package_name = PACKAGE_NAME
- if not package_name.startswith('third_party/protobuf'):
- fail('The package %s is not a protobuf package' % package_name)
- package_name = package_name[len('third_party/protobuf/'):]
- if not path.startswith(package_name):
- fail("The package %s is not within the path %s" % (package_name, path))
-
- if not package_name:
- return path
-
- return path[len(package_name)+1:]
+ return dest + path
def _proto_gen_impl(ctx):
"""General implementation for generating protos"""
srcs = ctx.files.srcs
deps = []
deps += ctx.files.srcs
+ source_dir = _SourceDir(ctx)
gen_dir = _GenDir(ctx)
- if gen_dir:
- import_flags = ["-I" + gen_dir]
+ if source_dir:
+ import_flags = ["-I" + source_dir, "-I" + gen_dir]
else:
import_flags = ["-I."]
@@ -64,16 +84,34 @@
args = []
if ctx.attr.gen_cc:
- args += ["--cpp_out=" + ctx.var["GENDIR"] + "/" + gen_dir]
+ args += ["--cpp_out=" + gen_dir]
if ctx.attr.gen_py:
- args += ["--python_out=" + ctx.var["GENDIR"] + "/" + gen_dir]
+ args += ["--python_out=" + gen_dir]
+
+ inputs = srcs + deps
+ if ctx.executable.plugin:
+ plugin = ctx.executable.plugin
+ lang = ctx.attr.plugin_language
+ if not lang and plugin.basename.startswith('protoc-gen-'):
+ lang = plugin.basename[len('protoc-gen-'):]
+ if not lang:
+ fail("cannot infer the target language of plugin", "plugin_language")
+
+ outdir = gen_dir
+ if ctx.attr.plugin_options:
+ outdir = ",".join(ctx.attr.plugin_options) + ":" + outdir
+ args += ["--plugin=protoc-gen-%s=%s" % (lang, plugin.path)]
+ args += ["--%s_out=%s" % (lang, outdir)]
+ inputs += [plugin]
if args:
ctx.action(
- inputs=srcs + deps,
+ inputs=inputs,
outputs=ctx.outputs.outs,
arguments=args + import_flags + [s.path for s in srcs],
executable=ctx.executable.protoc,
+ mnemonic="ProtoCompile",
+ use_default_shell_env=True,
)
return struct(
@@ -84,7 +122,7 @@
),
)
-_proto_gen = rule(
+proto_gen = rule(
attrs = {
"srcs": attr.label_list(allow_files = True),
"deps": attr.label_list(providers = ["proto"]),
@@ -95,6 +133,13 @@
single_file = True,
mandatory = True,
),
+ "plugin": attr.label(
+ cfg = "host",
+ allow_files = True,
+ executable = True,
+ ),
+ "plugin_language": attr.string(),
+ "plugin_options": attr.string_list(),
"gen_cc": attr.bool(),
"gen_py": attr.bool(),
"outs": attr.output_list(),
@@ -102,6 +147,57 @@
output_to_genfiles = True,
implementation = _proto_gen_impl,
)
+"""Generates codes from Protocol Buffers definitions.
+
+This rule helps you to implement Skylark macros specific to the target
+language. You should prefer more specific `cc_proto_library `,
+`py_proto_library` and others unless you are adding such wrapper macros.
+
+Args:
+ srcs: Protocol Buffers definition files (.proto) to run the protocol compiler
+ against.
+ deps: a list of dependency labels; must be other proto libraries.
+ includes: a list of include paths to .proto files.
+ protoc: the label of the protocol compiler to generate the sources.
+ plugin: the label of the protocol compiler plugin to be passed to the protocol
+ compiler.
+ plugin_language: the language of the generated sources
+ plugin_options: a list of options to be passed to the plugin
+ gen_cc: generates C++ sources in addition to the ones from the plugin.
+ gen_py: generates Python sources in addition to the ones from the plugin.
+ outs: a list of labels of the expected outputs from the protocol compiler.
+"""
+
+MSVC_COPTS = [
+ "/DHAVE_PTHREAD",
+ "/wd4018", # -Wno-sign-compare
+ "/wd4514", # -Wno-unused-function
+]
+
+load("@//tools/build_rules:select.bzl", "compiler_select")
+
+COPTS = [
+ "-DHAVE_PTHREAD",
+ "-DGOOGLE_THIRD_PARTY_PROTOBUF",
+ "-Wall",
+ "-Wwrite-strings",
+ "-Woverloaded-virtual",
+ "-Wno-sign-compare",
+ "-Wno-unused-function",
+ "-Wno-unused-parameter",
+ "-Wno-format-nonliteral",
+ "-Wno-switch-enum",
+ "-Wno-missing-field-initializers",
+ "-Wno-ignored-qualifiers",
+ ] + compiler_select({
+ "gcc": [
+ "-Wno-error=cast-align",
+ ],
+ "clang": [
+ "-Wno-unused-const-variable",
+ "-Wno-unused-private-field",
+ ],
+ })
def cc_proto_library(
name,
@@ -109,9 +205,12 @@
deps=[],
cc_libs=[],
include=None,
- protoc="//google/protobuf:protoc",
+ protoc="@com_google_protobuf//:protoc",
internal_bootstrap_hack=False,
- default_runtime="//google/protobuf:protobuf",
+ use_grpc_plugin=False,
+ default_runtime="@com_google_protobuf//:protobuf",
+ compatible_with = None,
+ copts = [],
**kargs):
"""Bazel rule to create a C++ protobuf library from proto source files
@@ -131,6 +230,8 @@
for bootstraping. When it is set to True, no files will be generated.
The rule will simply be a provider for .proto files, so that other
cc_proto_library can depend on it.
+ use_grpc_plugin: a flag to indicate whether to call the grpc C++ plugin
+ when processing the proto files.
default_runtime: the implicitly default runtime which will be depended on by
the generated cc_library target.
**kargs: other keyword arguments that are passed to cc_library.
@@ -144,7 +245,7 @@
if internal_bootstrap_hack:
# For pre-checked-in generated files, we add the internal_bootstrap_hack
# which will skip the codegen action.
- _proto_gen(
+ proto_gen(
name=name + "_genproto",
srcs=srcs,
deps=[s + "_genproto" for s in deps],
@@ -158,56 +259,95 @@
**kargs)
return
- outs = _CcOuts(srcs)
- _proto_gen(
+ grpc_cpp_plugin = None
+ if use_grpc_plugin:
+ grpc_cpp_plugin = "//external:grpc_cpp_plugin"
+
+ gen_srcs = _CcSrcs(srcs, use_grpc_plugin)
+ gen_hdrs = _CcHdrs(srcs, use_grpc_plugin)
+ outs = gen_srcs + gen_hdrs
+
+ proto_gen(
name=name + "_genproto",
srcs=srcs,
deps=[s + "_genproto" for s in deps],
includes=includes,
protoc=protoc,
+ plugin=grpc_cpp_plugin,
+ plugin_language="grpc",
gen_cc=1,
outs=outs,
+ compatible_with = compatible_with,
visibility=["//visibility:public"],
)
if default_runtime and not default_runtime in cc_libs:
cc_libs = cc_libs + [default_runtime]
+ if use_grpc_plugin:
+ cc_libs = cc_libs + ["//external:grpc_lib"]
native.cc_library(
name=name,
- srcs=outs,
+ srcs=gen_srcs,
+ hdrs=gen_hdrs,
deps=cc_libs + deps,
includes=includes,
+ copts = COPTS + copts,
+ compatible_with = compatible_with,
**kargs)
-def internal_copied_filegroup(
- name,
- srcs,
- include,
- **kargs):
- """Bazel rule to fix sources file to workaround with python path issues.
+def internal_gen_well_known_protos_java(srcs):
+ """Bazel rule to generate the gen_well_known_protos_java genrule
Args:
- name: the name of the internal_copied_filegroup rule, which will be the
- name of the generated filegroup.
- srcs: the source files to be copied.
- include: the expected import root of the source.
- **kargs: extra arguments that will be passed into the filegroup.
+ srcs: the well known protos
"""
- outs = [_RelativeOutputPath(s, include) for s in srcs]
+ root = Label("%s//protobuf_java" % (REPOSITORY_NAME)).workspace_root
+ pkg = PACKAGE_NAME + "/" if PACKAGE_NAME else ""
+ if root == "":
+ include = " -I%ssrc " % pkg
+ else:
+ include = " -I%s/%ssrc " % (root, pkg)
+ native.genrule(
+ name = "gen_well_known_protos_java",
+ srcs = srcs,
+ outs = [
+ "wellknown.srcjar",
+ ],
+ cmd = "$(location :protoc) --java_out=$(@D)/wellknown.jar" +
+ " %s $(SRCS) " % include +
+ " && mv $(@D)/wellknown.jar $(@D)/wellknown.srcjar",
+ tools = [":protoc"],
+ )
+
+def internal_copied_filegroup(name, srcs, strip_prefix, dest, **kwargs):
+ """Macro to copy files to a different directory and then create a filegroup.
+
+ This is used by the //:protobuf_python py_proto_library target to work around
+ an issue caused by Python source files that are part of the same Python
+ package being in separate directories.
+
+ Args:
+ srcs: The source files to copy and add to the filegroup.
+ strip_prefix: Path to the root of the files to copy.
+ dest: The directory to copy the source files into.
+ **kwargs: extra arguments that will be passesd to the filegroup.
+ """
+ outs = [_RelativeOutputPath(s, strip_prefix, dest) for s in srcs]
native.genrule(
- name=name+"_genrule",
- srcs=srcs,
- outs=outs,
- cmd=" && ".join(["cp $(location %s) $(location %s)" %
- (s, _RelativeOutputPath(s, include))
- for s in srcs]))
+ name = name + "_genrule",
+ srcs = srcs,
+ outs = outs,
+ cmd = " && ".join(
+ ["cp $(location %s) $(location %s)" %
+ (s, _RelativeOutputPath(s, strip_prefix, dest)) for s in srcs]),
+ )
native.filegroup(
- name=name,
- srcs=outs,
- **kargs)
+ name = name,
+ srcs = outs,
+ **kwargs)
def py_proto_library(
name,
@@ -217,8 +357,9 @@
py_extra_srcs=[],
py_imports=[],
include=None,
- default_runtime="//google/protobuf:protobuf_python",
- protoc="//google/protobuf:protoc",
+ default_runtime="@com_google_protobuf//:protobuf_python",
+ protoc="@com_google_protobuf//:protoc",
+ use_grpc_plugin=False,
**kargs):
"""Bazel rule to create a Python protobuf library from proto source files
@@ -238,16 +379,25 @@
default_runtime: the implicitly default runtime which will be depended on by
the generated py_library target.
protoc: the label of the protocol compiler to generate the sources.
+ use_grpc_plugin: a flag to indicate whether to call the Python C++ plugin
+ when processing the proto files.
**kargs: other keyword arguments that are passed to cc_library.
"""
- outs = _PyOuts(srcs)
+ outs = _PyOuts(srcs, use_grpc_plugin)
includes = []
if include != None:
includes = [include]
- _proto_gen(
+ grpc_python_plugin = None
+ if use_grpc_plugin:
+ grpc_python_plugin = "//external:grpc_python_plugin"
+ # Note: Generated grpc code depends on Python grpc module. This dependency
+ # is not explicitly listed in py_libs. Instead, host system is assumed to
+ # have grpc installed.
+
+ proto_gen(
name=name + "_genproto",
srcs=srcs,
deps=[s + "_genproto" for s in deps],
@@ -256,26 +406,18 @@
gen_py=1,
outs=outs,
visibility=["//visibility:public"],
+ plugin=grpc_python_plugin,
+ plugin_language="grpc"
)
- if include != None:
- # Copy the output files to the desired location to make the import work.
- internal_copied_filegroup_name=name + "_internal_copied_filegroup"
- internal_copied_filegroup(
- name=internal_copied_filegroup_name,
- srcs=outs,
- include=include)
- outs=[internal_copied_filegroup_name]
-
if default_runtime and not default_runtime in py_libs + deps:
py_libs = py_libs + [default_runtime]
- py_libs = py_libs
native.py_library(
name=name,
srcs=outs+py_extra_srcs,
deps=py_libs+deps,
- imports=py_imports,
+ imports=includes,
**kargs)
def internal_protobuf_py_tests(
@@ -292,10 +434,21 @@
"""
for m in modules:
- s = _RelativeOutputPath(
- "python/google/protobuf/internal/%s.py" % m, "python")
+ s = "python/google/protobuf/internal/%s.py" % m
native.py_test(
name="py_%s" % m,
srcs=[s],
main=s,
**kargs)
+
+
+def check_protobuf_required_bazel_version():
+ """For WORKSPACE files, to check the installed version of bazel.
+
+ This ensures bazel supports our approach to proto_library() depending on a
+ copied filegroup. (Fixed in bazel 0.5.4)
+ """
+ expected = apple_common.dotted_version("0.5.4")
+ current = apple_common.dotted_version(native.bazel_version)
+ if current.compare_to(expected) < 0:
+ fail("Bazel must be newer than 0.5.4")