structure saas with tools

This commit is contained in:
Davidson Gomes
2025-04-25 15:30:54 -03:00
commit 1aef473937
16434 changed files with 6584257 additions and 0 deletions

View File

@@ -0,0 +1,72 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .enums import Enum
from .fields import Field
from .fields import MapField
from .fields import RepeatedField
from .marshal import Marshal
from .message import Message
from .modules import define_module as module
from .primitives import ProtoType
from .version import __version__
DOUBLE = ProtoType.DOUBLE
FLOAT = ProtoType.FLOAT
INT64 = ProtoType.INT64
UINT64 = ProtoType.UINT64
INT32 = ProtoType.INT32
FIXED64 = ProtoType.FIXED64
FIXED32 = ProtoType.FIXED32
BOOL = ProtoType.BOOL
STRING = ProtoType.STRING
MESSAGE = ProtoType.MESSAGE
BYTES = ProtoType.BYTES
UINT32 = ProtoType.UINT32
ENUM = ProtoType.ENUM
SFIXED32 = ProtoType.SFIXED32
SFIXED64 = ProtoType.SFIXED64
SINT32 = ProtoType.SINT32
SINT64 = ProtoType.SINT64
__all__ = (
"__version__",
"Enum",
"Field",
"MapField",
"RepeatedField",
"Marshal",
"Message",
"module",
# Expose the types directly.
"DOUBLE",
"FLOAT",
"INT64",
"UINT64",
"INT32",
"FIXED64",
"FIXED32",
"BOOL",
"STRING",
"MESSAGE",
"BYTES",
"UINT32",
"ENUM",
"SFIXED32",
"SFIXED64",
"SINT32",
"SINT64",
)

View File

@@ -0,0 +1,196 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import inspect
import logging
from google.protobuf import descriptor_pb2
from google.protobuf import descriptor_pool
from google.protobuf import message
from google.protobuf import reflection
from proto.marshal.rules.message import MessageRule
log = logging.getLogger("_FileInfo")
class _FileInfo(
collections.namedtuple(
"_FileInfo",
["descriptor", "messages", "enums", "name", "nested", "nested_enum"],
)
):
registry = {} # Mapping[str, '_FileInfo']
@classmethod
def maybe_add_descriptor(cls, filename, package):
descriptor = cls.registry.get(filename)
if not descriptor:
descriptor = cls.registry[filename] = cls(
descriptor=descriptor_pb2.FileDescriptorProto(
name=filename,
package=package,
syntax="proto3",
),
enums=collections.OrderedDict(),
messages=collections.OrderedDict(),
name=filename,
nested={},
nested_enum={},
)
return descriptor
@staticmethod
def proto_file_name(name):
return "{0}.proto".format(name.replace(".", "/"))
def _get_manifest(self, new_class):
module = inspect.getmodule(new_class)
if hasattr(module, "__protobuf__"):
return frozenset(module.__protobuf__.manifest)
return frozenset()
def _get_remaining_manifest(self, new_class):
return self._get_manifest(new_class) - {new_class.__name__}
def _calculate_salt(self, new_class, fallback):
manifest = self._get_manifest(new_class)
if manifest and new_class.__name__ not in manifest:
log.warning(
"proto-plus module {module} has a declared manifest but {class_name} is not in it".format(
module=inspect.getmodule(new_class).__name__,
class_name=new_class.__name__,
)
)
return "" if new_class.__name__ in manifest else (fallback or "").lower()
def generate_file_pb(self, new_class, fallback_salt=""):
"""Generate the descriptors for all protos in the file.
This method takes the file descriptor attached to the parent
message and generates the immutable descriptors for all of the
messages in the file descriptor. (This must be done in one fell
swoop for immutability and to resolve proto cross-referencing.)
This is run automatically when the last proto in the file is
generated, as determined by the module's __all__ tuple.
"""
pool = descriptor_pool.Default()
# Salt the filename in the descriptor.
# This allows re-use of the filename by other proto messages if
# needed (e.g. if __all__ is not used).
salt = self._calculate_salt(new_class, fallback_salt)
self.descriptor.name = "{name}.proto".format(
name="_".join([self.descriptor.name[:-6], salt]).rstrip("_"),
)
# Add the file descriptor.
pool.Add(self.descriptor)
# Adding the file descriptor to the pool created a descriptor for
# each message; go back through our wrapper messages and associate
# them with the internal protobuf version.
for full_name, proto_plus_message in self.messages.items():
# Get the descriptor from the pool, and create the protobuf
# message based on it.
descriptor = pool.FindMessageTypeByName(full_name)
pb_message = reflection.GeneratedProtocolMessageType(
descriptor.name,
(message.Message,),
{"DESCRIPTOR": descriptor, "__module__": None},
)
# Register the message with the marshal so it is wrapped
# appropriately.
#
# We do this here (rather than at class creation) because it
# is not until this point that we have an actual protobuf
# message subclass, which is what we need to use.
proto_plus_message._meta._pb = pb_message
proto_plus_message._meta.marshal.register(
pb_message, MessageRule(pb_message, proto_plus_message)
)
# Iterate over any fields on the message and, if their type
# is a message still referenced as a string, resolve the reference.
for field in proto_plus_message._meta.fields.values():
if field.message and isinstance(field.message, str):
field.message = self.messages[field.message]
elif field.enum and isinstance(field.enum, str):
field.enum = self.enums[field.enum]
# Same thing for enums
for full_name, proto_plus_enum in self.enums.items():
descriptor = pool.FindEnumTypeByName(full_name)
proto_plus_enum._meta.pb = descriptor
# We no longer need to track this file's info; remove it from
# the module's registry and from this object.
self.registry.pop(self.name)
def ready(self, new_class):
"""Return True if a file descriptor may added, False otherwise.
This determine if all the messages that we plan to create have been
created, as best as we are able.
Since messages depend on one another, we create descriptor protos
(which reference each other using strings) and wait until we have
built everything that is going to be in the module, and then
use the descriptor protos to instantiate the actual descriptors in
one fell swoop.
Args:
new_class (~.MessageMeta): The new class currently undergoing
creation.
"""
# If there are any nested descriptors that have not been assigned to
# the descriptors that should contain them, then we are not ready.
if len(self.nested) or len(self.nested_enum):
return False
# If there are any unresolved fields (fields with a composite message
# declared as a string), ensure that the corresponding message is
# declared.
for field in self.unresolved_fields:
if (field.message and field.message not in self.messages) or (
field.enum and field.enum not in self.enums
):
return False
# If the module in which this class is defined provides a
# __protobuf__ property, it may have a manifest.
#
# Do not generate the file descriptor until every member of the
# manifest has been populated.
module = inspect.getmodule(new_class)
manifest = self._get_remaining_manifest(new_class)
# We are ready if all members have been populated.
return all(hasattr(module, i) for i in manifest)
@property
def unresolved_fields(self):
"""Return fields with referencing message types as strings."""
for proto_plus_message in self.messages.values():
for field in proto_plus_message._meta.fields.values():
if (field.message and isinstance(field.message, str)) or (
field.enum and isinstance(field.enum, str)
):
yield field

View File

@@ -0,0 +1,50 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from proto.marshal import Marshal
def compile(name, attrs):
"""Return the package and marshal to use.
Args:
name (str): The name of the new class, as sent to ``type.__new__``.
attrs (Mapping[str, Any]): The attrs for a new class, as sent
to ``type.__new__``
Returns:
Tuple[str, ~.Marshal]:
- The proto package, if any (empty string otherwise).
- The marshal object to use.
"""
# Pull a reference to the module where this class is being
# declared.
module = sys.modules.get(attrs.get("__module__"))
module_name = module.__name__ if hasattr(module, __name__) else ""
proto_module = getattr(module, "__protobuf__", object())
# A package should be present; get the marshal from there.
# TODO: Revert to empty string as a package value after protobuf fix.
# When package is empty, upb based protobuf fails with an
# "TypeError: Couldn't build proto file into descriptor pool: invalid name: empty part ()' means"
# during an attempt to add to descriptor pool.
package = getattr(
proto_module, "package", module_name if module_name else "_default_package"
)
marshal = Marshal(name=getattr(proto_module, "marshal", package))
# Done; return the data.
return (package, marshal)

View File

@@ -0,0 +1,225 @@
# Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for :mod:`datetime`."""
import calendar
import datetime
import re
from google.protobuf import timestamp_pb2
_UTC_EPOCH = datetime.datetime.fromtimestamp(0, datetime.timezone.utc)
_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
# datetime.strptime cannot handle nanosecond precision: parse w/ regex
_RFC3339_NANOS = re.compile(
r"""
(?P<no_fraction>
\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS
)
( # Optional decimal part
\. # decimal point
(?P<nanos>\d{1,9}) # nanoseconds, maybe truncated
)?
Z # Zulu
""",
re.VERBOSE,
)
def _from_microseconds(value):
"""Convert timestamp in microseconds since the unix epoch to datetime.
Args:
value (float): The timestamp to convert, in microseconds.
Returns:
datetime.datetime: The datetime object equivalent to the timestamp in
UTC.
"""
return _UTC_EPOCH + datetime.timedelta(microseconds=value)
def _to_rfc3339(value, ignore_zone=True):
"""Convert a datetime to an RFC3339 timestamp string.
Args:
value (datetime.datetime):
The datetime object to be converted to a string.
ignore_zone (bool): If True, then the timezone (if any) of the
datetime object is ignored and the datetime is treated as UTC.
Returns:
str: The RFC3339 formatted string representing the datetime.
"""
if not ignore_zone and value.tzinfo is not None:
# Convert to UTC and remove the time zone info.
value = value.replace(tzinfo=None) - value.utcoffset()
return value.strftime(_RFC3339_MICROS)
class DatetimeWithNanoseconds(datetime.datetime):
"""Track nanosecond in addition to normal datetime attrs.
Nanosecond can be passed only as a keyword argument.
"""
__slots__ = ("_nanosecond",)
# pylint: disable=arguments-differ
def __new__(cls, *args, **kw):
nanos = kw.pop("nanosecond", 0)
if nanos > 0:
if "microsecond" in kw:
raise TypeError("Specify only one of 'microsecond' or 'nanosecond'")
kw["microsecond"] = nanos // 1000
inst = datetime.datetime.__new__(cls, *args, **kw)
inst._nanosecond = nanos or 0
return inst
# pylint: disable=arguments-differ
def replace(self, *args, **kw):
"""Return a date with the same value, except for those parameters given
new values by whichever keyword arguments are specified. For example,
if d == date(2002, 12, 31), then
d.replace(day=26) == date(2002, 12, 26).
NOTE: nanosecond and microsecond are mutually exclusive arguments.
"""
ms_provided = "microsecond" in kw
ns_provided = "nanosecond" in kw
provided_ns = kw.pop("nanosecond", 0)
prev_nanos = self.nanosecond
if ms_provided and ns_provided:
raise TypeError("Specify only one of 'microsecond' or 'nanosecond'")
if ns_provided:
# if nanos were provided, manipulate microsecond kw arg to super
kw["microsecond"] = provided_ns // 1000
inst = super().replace(*args, **kw)
if ms_provided:
# ms were provided, nanos are invalid, build from ms
inst._nanosecond = inst.microsecond * 1000
elif ns_provided:
# ns were provided, replace nanoseconds to match after calling super
inst._nanosecond = provided_ns
else:
# if neither ms or ns were provided, passthru previous nanos.
inst._nanosecond = prev_nanos
return inst
@property
def nanosecond(self):
"""Read-only: nanosecond precision."""
return self._nanosecond or self.microsecond * 1000
def rfc3339(self):
"""Return an RFC3339-compliant timestamp.
Returns:
(str): Timestamp string according to RFC3339 spec.
"""
if self._nanosecond == 0:
return _to_rfc3339(self)
nanos = str(self._nanosecond).rjust(9, "0").rstrip("0")
return "{}.{}Z".format(self.strftime(_RFC3339_NO_FRACTION), nanos)
@classmethod
def from_rfc3339(cls, stamp):
"""Parse RFC3339-compliant timestamp, preserving nanoseconds.
Args:
stamp (str): RFC3339 stamp, with up to nanosecond precision
Returns:
:class:`DatetimeWithNanoseconds`:
an instance matching the timestamp string
Raises:
ValueError: if `stamp` does not match the expected format
"""
with_nanos = _RFC3339_NANOS.match(stamp)
if with_nanos is None:
raise ValueError(
"Timestamp: {}, does not match pattern: {}".format(
stamp, _RFC3339_NANOS.pattern
)
)
bare = datetime.datetime.strptime(
with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
)
fraction = with_nanos.group("nanos")
if fraction is None:
nanos = 0
else:
scale = 9 - len(fraction)
nanos = int(fraction) * (10**scale)
return cls(
bare.year,
bare.month,
bare.day,
bare.hour,
bare.minute,
bare.second,
nanosecond=nanos,
tzinfo=datetime.timezone.utc,
)
def timestamp_pb(self):
"""Return a timestamp message.
Returns:
(:class:`~google.protobuf.timestamp_pb2.Timestamp`): Timestamp message
"""
inst = (
self
if self.tzinfo is not None
else self.replace(tzinfo=datetime.timezone.utc)
)
delta = inst - _UTC_EPOCH
seconds = int(delta.total_seconds())
nanos = self._nanosecond or self.microsecond * 1000
return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
@classmethod
def from_timestamp_pb(cls, stamp):
"""Parse RFC3339-compliant timestamp, preserving nanoseconds.
Args:
stamp (:class:`~google.protobuf.timestamp_pb2.Timestamp`): timestamp message
Returns:
:class:`DatetimeWithNanoseconds`:
an instance matching the timestamp message
"""
microseconds = int(stamp.seconds * 1e6)
bare = _from_microseconds(microseconds)
return cls(
bare.year,
bare.month,
bare.day,
bare.hour,
bare.minute,
bare.second,
nanosecond=stamp.nanos,
tzinfo=datetime.timezone.utc,
)

View File

@@ -0,0 +1,165 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
from google.protobuf import descriptor_pb2
from proto import _file_info
from proto import _package_info
from proto.marshal.rules.enums import EnumRule
class ProtoEnumMeta(enum.EnumMeta):
"""A metaclass for building and registering protobuf enums."""
def __new__(mcls, name, bases, attrs):
# Do not do any special behavior for `proto.Enum` itself.
if bases[0] == enum.IntEnum:
return super().__new__(mcls, name, bases, attrs)
# Get the essential information about the proto package, and where
# this component belongs within the file.
package, marshal = _package_info.compile(name, attrs)
# Determine the local path of this proto component within the file.
local_path = tuple(attrs.get("__qualname__", name).split("."))
# Sanity check: We get the wrong full name if a class is declared
# inside a function local scope; correct this.
if "<locals>" in local_path:
ix = local_path.index("<locals>")
local_path = local_path[: ix - 1] + local_path[ix + 1 :]
# Determine the full name in protocol buffers.
full_name = ".".join((package,) + local_path).lstrip(".")
filename = _file_info._FileInfo.proto_file_name(
attrs.get("__module__", name.lower())
)
# Retrieve any enum options.
# We expect something that looks like an EnumOptions message,
# either an actual instance or a dict-like representation.
pb_options = "_pb_options"
opts = attrs.pop(pb_options, {})
# This is the only portable way to remove the _pb_options name
# from the enum attrs.
# In 3.7 onwards, we can define an _ignore_ attribute and do some
# mucking around with that.
if pb_options in attrs._member_names:
if isinstance(attrs._member_names, list):
idx = attrs._member_names.index(pb_options)
attrs._member_names.pop(idx)
elif isinstance(attrs._member_names, set): # PyPy
attrs._member_names.discard(pb_options)
else: # Python 3.11.0b3
del attrs._member_names[pb_options]
# Make the descriptor.
enum_desc = descriptor_pb2.EnumDescriptorProto(
name=name,
# Note: the superclass ctor removes the variants, so get them now.
# Note: proto3 requires that the first variant value be zero.
value=sorted(
(
descriptor_pb2.EnumValueDescriptorProto(name=name, number=number)
# Minor hack to get all the enum variants out.
# Use the `_member_names` property to get only the enum members
# See https://github.com/googleapis/proto-plus-python/issues/490
for name, number in attrs.items()
if name in attrs._member_names and isinstance(number, int)
),
key=lambda v: v.number,
),
options=opts,
)
file_info = _file_info._FileInfo.maybe_add_descriptor(filename, package)
if len(local_path) == 1:
file_info.descriptor.enum_type.add().MergeFrom(enum_desc)
else:
file_info.nested_enum[local_path] = enum_desc
# Run the superclass constructor.
cls = super().__new__(mcls, name, bases, attrs)
# We can't just add a "_meta" element to attrs because the Enum
# machinery doesn't know what to do with a non-int value.
# The pb is set later, in generate_file_pb
cls._meta = _EnumInfo(full_name=full_name, pb=None)
file_info.enums[full_name] = cls
# Register the enum with the marshal.
marshal.register(cls, EnumRule(cls))
# Generate the descriptor for the file if it is ready.
if file_info.ready(new_class=cls):
file_info.generate_file_pb(new_class=cls, fallback_salt=full_name)
# Done; return the class.
return cls
class Enum(enum.IntEnum, metaclass=ProtoEnumMeta):
"""A enum object that also builds a protobuf enum descriptor."""
def _comparable(self, other):
# Avoid 'isinstance' to prevent other IntEnums from matching
return type(other) in (type(self), int)
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
if not self._comparable(other):
return NotImplemented
return self.value == int(other)
def __ne__(self, other):
if not self._comparable(other):
return NotImplemented
return self.value != int(other)
def __lt__(self, other):
if not self._comparable(other):
return NotImplemented
return self.value < int(other)
def __le__(self, other):
if not self._comparable(other):
return NotImplemented
return self.value <= int(other)
def __ge__(self, other):
if not self._comparable(other):
return NotImplemented
return self.value >= int(other)
def __gt__(self, other):
if not self._comparable(other):
return NotImplemented
return self.value > int(other)
class _EnumInfo:
def __init__(self, *, full_name: str, pb):
self.full_name = full_name
self.pb = pb

View File

@@ -0,0 +1,165 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import EnumMeta
from google.protobuf import descriptor_pb2
from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
from proto.primitives import ProtoType
class Field:
"""A representation of a type of field in protocol buffers."""
# Fields are NOT repeated nor maps.
# The RepeatedField overrides this values.
repeated = False
def __init__(
self,
proto_type,
*,
number: int,
message=None,
enum=None,
oneof: str = None,
json_name: str = None,
optional: bool = False
):
# This class is not intended to stand entirely alone;
# data is augmented by the metaclass for Message.
self.mcls_data = None
self.parent = None
# If the proto type sent is an object or a string, it is really
# a message or enum.
if not isinstance(proto_type, int):
# Note: We only support the "shortcut syntax" for enums
# when receiving the actual class.
if isinstance(proto_type, (EnumMeta, EnumTypeWrapper)):
enum = proto_type
proto_type = ProtoType.ENUM
else:
message = proto_type
proto_type = ProtoType.MESSAGE
# Save the direct arguments.
self.number = number
self.proto_type = proto_type
self.message = message
self.enum = enum
self.json_name = json_name
self.optional = optional
self.oneof = oneof
# Once the descriptor is accessed the first time, cache it.
# This is important because in rare cases the message or enum
# types are written later.
self._descriptor = None
@property
def descriptor(self):
"""Return the descriptor for the field."""
if not self._descriptor:
# Resolve the message type, if any, to a string.
type_name = None
if isinstance(self.message, str):
if not self.message.startswith(self.package):
self.message = "{package}.{name}".format(
package=self.package,
name=self.message,
)
type_name = self.message
elif self.message:
type_name = (
self.message.DESCRIPTOR.full_name
if hasattr(self.message, "DESCRIPTOR")
else self.message._meta.full_name
)
elif isinstance(self.enum, str):
if not self.enum.startswith(self.package):
self.enum = "{package}.{name}".format(
package=self.package,
name=self.enum,
)
type_name = self.enum
elif self.enum:
type_name = (
self.enum.DESCRIPTOR.full_name
if hasattr(self.enum, "DESCRIPTOR")
else self.enum._meta.full_name
)
# Set the descriptor.
self._descriptor = descriptor_pb2.FieldDescriptorProto(
name=self.name,
number=self.number,
label=3 if self.repeated else 1,
type=self.proto_type,
type_name=type_name,
json_name=self.json_name,
proto3_optional=self.optional,
)
# Return the descriptor.
return self._descriptor
@property
def name(self) -> str:
"""Return the name of the field."""
return self.mcls_data["name"]
@property
def package(self) -> str:
"""Return the package of the field."""
return self.mcls_data["package"]
@property
def pb_type(self):
"""Return the composite type of the field, or the primitive type if a primitive."""
# For enums, return the Python enum.
if self.enum:
return self.enum
# For primitive fields, we still want to know
# what the type is.
if not self.message:
return self.proto_type
# Return the internal protobuf message.
if hasattr(self.message, "_meta"):
return self.message.pb()
return self.message
class RepeatedField(Field):
"""A representation of a repeated field in protocol buffers."""
repeated = True
class MapField(Field):
"""A representation of a map field in protocol buffers."""
def __init__(self, key_type, value_type, *, number: int, message=None, enum=None):
super().__init__(value_type, number=number, message=message, enum=enum)
self.map_key_type = key_type
__all__ = (
"Field",
"MapField",
"RepeatedField",
)

View File

@@ -0,0 +1,18 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .marshal import Marshal
__all__ = ("Marshal",)

View File

@@ -0,0 +1,24 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .maps import MapComposite
from .repeated import Repeated
from .repeated import RepeatedComposite
__all__ = (
"MapComposite",
"Repeated",
"RepeatedComposite",
)

View File

@@ -0,0 +1,82 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from proto.utils import cached_property
from google.protobuf.message import Message
class MapComposite(collections.abc.MutableMapping):
"""A view around a mutable sequence in protocol buffers.
This implements the full Python MutableMapping interface, but all methods
modify the underlying field container directly.
"""
@cached_property
def _pb_type(self):
"""Return the protocol buffer type for this sequence."""
# Huzzah, another hack. Still less bad than RepeatedComposite.
return type(self.pb.GetEntryClass()().value)
def __init__(self, sequence, *, marshal):
"""Initialize a wrapper around a protobuf map.
Args:
sequence: A protocol buffers map.
marshal (~.MarshalRegistry): An instantiated marshal, used to
convert values going to and from this map.
"""
self._pb = sequence
self._marshal = marshal
def __contains__(self, key):
# Protocol buffers is so permissive that querying for the existence
# of a key will in of itself create it.
#
# By taking a tuple of the keys and querying that, we avoid sending
# the lookup to protocol buffers and therefore avoid creating the key.
return key in tuple(self.keys())
def __getitem__(self, key):
# We handle raising KeyError ourselves, because otherwise protocol
# buffers will create the key if it does not exist.
if key not in self:
raise KeyError(key)
return self._marshal.to_python(self._pb_type, self.pb[key])
def __setitem__(self, key, value):
pb_value = self._marshal.to_proto(self._pb_type, value, strict=True)
# Directly setting a key is not allowed; however, protocol buffers
# is so permissive that querying for the existence of a key will in
# of itself create it.
#
# Therefore, we create a key that way (clearing any fields that may
# be set) and then merge in our values.
self.pb[key].Clear()
self.pb[key].MergeFrom(pb_value)
def __delitem__(self, key):
self.pb.pop(key)
def __len__(self):
return len(self.pb)
def __iter__(self):
return iter(self.pb)
@property
def pb(self):
return self._pb

View File

@@ -0,0 +1,189 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import copy
from typing import Iterable
from proto.utils import cached_property
class Repeated(collections.abc.MutableSequence):
"""A view around a mutable sequence in protocol buffers.
This implements the full Python MutableSequence interface, but all methods
modify the underlying field container directly.
"""
def __init__(self, sequence, *, marshal, proto_type=None):
"""Initialize a wrapper around a protobuf repeated field.
Args:
sequence: A protocol buffers repeated field.
marshal (~.MarshalRegistry): An instantiated marshal, used to
convert values going to and from this map.
"""
self._pb = sequence
self._marshal = marshal
self._proto_type = proto_type
def __copy__(self):
"""Copy this object and return the copy."""
return type(self)(self.pb[:], marshal=self._marshal)
def __delitem__(self, key):
"""Delete the given item."""
del self.pb[key]
def __eq__(self, other):
if hasattr(other, "pb"):
return tuple(self.pb) == tuple(other.pb)
return tuple(self.pb) == tuple(other) if isinstance(other, Iterable) else False
def __getitem__(self, key):
"""Return the given item."""
return self.pb[key]
def __len__(self):
"""Return the length of the sequence."""
return len(self.pb)
def __ne__(self, other):
return not self == other
def __repr__(self):
return repr([*self])
def __setitem__(self, key, value):
self.pb[key] = value
def insert(self, index: int, value):
"""Insert ``value`` in the sequence before ``index``."""
self.pb.insert(index, value)
def sort(self, *, key: str = None, reverse: bool = False):
"""Stable sort *IN PLACE*."""
self.pb.sort(key=key, reverse=reverse)
@property
def pb(self):
return self._pb
class RepeatedComposite(Repeated):
"""A view around a mutable sequence of messages in protocol buffers.
This implements the full Python MutableSequence interface, but all methods
modify the underlying field container directly.
"""
@cached_property
def _pb_type(self):
"""Return the protocol buffer type for this sequence."""
# Provide the marshal-given proto_type, if any.
# Used for RepeatedComposite of Enum.
if self._proto_type is not None:
return self._proto_type
# There is no public-interface mechanism to determine the type
# of what should go in the list (and the C implementation seems to
# have no exposed mechanism at all).
#
# If the list has members, use the existing list members to
# determine the type.
if len(self.pb) > 0:
return type(self.pb[0])
# We have no members in the list, so we get the type from the attributes.
if hasattr(self.pb, "_message_descriptor") and hasattr(
self.pb._message_descriptor, "_concrete_class"
):
return self.pb._message_descriptor._concrete_class
# Fallback logic in case attributes are not available
# In order to get the type, we create a throw-away copy and add a
# blank member to it.
canary = copy.deepcopy(self.pb).add()
return type(canary)
def __eq__(self, other):
if super().__eq__(other):
return True
return (
tuple([i for i in self]) == tuple(other)
if isinstance(other, Iterable)
else False
)
def __getitem__(self, key):
return self._marshal.to_python(self._pb_type, self.pb[key])
def __setitem__(self, key, value):
# The underlying protocol buffer does not define __setitem__, so we
# have to implement all the operations on our own.
# If ``key`` is an integer, as in list[index] = value:
if isinstance(key, int):
if -len(self) <= key < len(self):
self.pop(key) # Delete the old item.
self.insert(key, value) # Insert the new item in its place.
else:
raise IndexError("list assignment index out of range")
# If ``key`` is a slice object, as in list[start:stop:step] = [values]:
elif isinstance(key, slice):
start, stop, step = key.indices(len(self))
if not isinstance(value, collections.abc.Iterable):
raise TypeError("can only assign an iterable")
if step == 1: # Is not an extended slice.
# Assign all the new values to the sliced part, replacing the
# old values, if any, and unconditionally inserting those
# values whose indices already exceed the slice length.
for index, item in enumerate(value):
if start + index < stop:
self.pop(start + index)
self.insert(start + index, item)
# If there are less values than the length of the slice, remove
# the remaining elements so that the slice adapts to the
# newly provided values.
for _ in range(stop - start - len(value)):
self.pop(start + len(value))
else: # Is an extended slice.
indices = range(start, stop, step)
if len(value) != len(indices): # XXX: Use PEP 572 on 3.8+
raise ValueError(
f"attempt to assign sequence of size "
f"{len(value)} to extended slice of size "
f"{len(indices)}"
)
# Assign each value to its index, calling this function again
# with individual integer indexes that get processed above.
for index, item in zip(indices, value):
self[index] = item
else:
raise TypeError(
f"list indices must be integers or slices, not {type(key).__name__}"
)
def insert(self, index: int, value):
"""Insert ``value`` in the sequence before ``index``."""
pb_value = self._marshal.to_proto(self._pb_type, value)
self.pb.insert(index, pb_value)

View File

@@ -0,0 +1,64 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file pulls in the container types from internal protocol buffers,
# and exports the types available.
#
# If the C extensions were not installed, then their container types will
# not be included.
from google.protobuf.internal import containers
# Import all message types to ensure that pyext types are recognized
# when upb types exist. Conda's protobuf defaults to pyext despite upb existing.
# See https://github.com/googleapis/proto-plus-python/issues/470
try:
from google._upb import _message as _message_upb
except ImportError:
_message_upb = None
try:
from google.protobuf.pyext import _message as _message_pyext
except ImportError:
_message_pyext = None
repeated_composite_types = (containers.RepeatedCompositeFieldContainer,)
repeated_scalar_types = (containers.RepeatedScalarFieldContainer,)
map_composite_types = (containers.MessageMap,)
# In `proto/marshal.py`, for compatibility with protobuf 5.x,
# we'll use `map_composite_type_names` to check whether
# the name of the class of a protobuf type is
# `MessageMapContainer`, and, if `True`, return a MapComposite.
# See https://github.com/protocolbuffers/protobuf/issues/16596
map_composite_type_names = ("MessageMapContainer",)
for message in [_message_upb, _message_pyext]:
if message:
repeated_composite_types += (message.RepeatedCompositeContainer,)
repeated_scalar_types += (message.RepeatedScalarContainer,)
try:
map_composite_types += (message.MessageMapContainer,)
except AttributeError:
# The `MessageMapContainer` attribute is not available in Protobuf 5.x+
pass
__all__ = (
"repeated_composite_types",
"repeated_scalar_types",
"map_composite_types",
"map_composite_type_names",
)

View File

@@ -0,0 +1,297 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import enum
from google.protobuf import message
from google.protobuf import duration_pb2
from google.protobuf import timestamp_pb2
from google.protobuf import field_mask_pb2
from google.protobuf import struct_pb2
from google.protobuf import wrappers_pb2
from proto.marshal import compat
from proto.marshal.collections import MapComposite
from proto.marshal.collections import Repeated
from proto.marshal.collections import RepeatedComposite
from proto.marshal.rules import bytes as pb_bytes
from proto.marshal.rules import stringy_numbers
from proto.marshal.rules import dates
from proto.marshal.rules import struct
from proto.marshal.rules import wrappers
from proto.marshal.rules import field_mask
from proto.primitives import ProtoType
class Rule(abc.ABC):
"""Abstract class definition for marshal rules."""
@classmethod
def __subclasshook__(cls, C):
if hasattr(C, "to_python") and hasattr(C, "to_proto"):
return True
return NotImplemented
class BaseMarshal:
"""The base class to translate between protobuf and Python classes.
Protocol buffers defines many common types (e.g. Timestamp, Duration)
which also exist in the Python standard library. The marshal essentially
translates between these: it keeps a registry of common protocol buffers
and their Python representations, and translates back and forth.
The protocol buffer class is always the "key" in this relationship; when
presenting a message, the declared field types are used to determine
whether a value should be transformed into another class. Similarly,
when accepting a Python value (when setting a field, for example),
the declared field type is still used. This means that, if appropriate,
multiple protocol buffer types may use the same Python type.
The primary implementation of this is :class:`Marshal`, which should
usually be used instead of this class directly.
"""
def __init__(self):
self._rules = {}
self._noop = NoopRule()
self.reset()
def register(self, proto_type: type, rule: Rule = None):
"""Register a rule against the given ``proto_type``.
This function expects a ``proto_type`` (the descriptor class) and
a ``rule``; an object with a ``to_python`` and ``to_proto`` method.
Each method should return the appropriate Python or protocol buffer
type, and be idempotent (e.g. accept either type as input).
This function can also be used as a decorator::
@marshal.register(timestamp_pb2.Timestamp)
class TimestampRule:
...
In this case, the class will be initialized for you with zero
arguments.
Args:
proto_type (type): A protocol buffer message type.
rule: A marshal object
"""
# If a rule was provided, register it and be done.
if rule:
# Ensure the rule implements Rule.
if not isinstance(rule, Rule):
raise TypeError(
"Marshal rule instances must implement "
"`to_proto` and `to_python` methods."
)
# Register the rule.
self._rules[proto_type] = rule
return
# Create an inner function that will register an instance of the
# marshal class to this object's registry, and return it.
def register_rule_class(rule_class: type):
# Ensure the rule class is a valid rule.
if not issubclass(rule_class, Rule):
raise TypeError(
"Marshal rule subclasses must implement "
"`to_proto` and `to_python` methods."
)
# Register the rule class.
self._rules[proto_type] = rule_class()
return rule_class
return register_rule_class
def reset(self):
"""Reset the registry to its initial state."""
self._rules.clear()
# Register date and time wrappers.
self.register(timestamp_pb2.Timestamp, dates.TimestampRule())
self.register(duration_pb2.Duration, dates.DurationRule())
# Register FieldMask wrappers.
self.register(field_mask_pb2.FieldMask, field_mask.FieldMaskRule())
# Register nullable primitive wrappers.
self.register(wrappers_pb2.BoolValue, wrappers.BoolValueRule())
self.register(wrappers_pb2.BytesValue, wrappers.BytesValueRule())
self.register(wrappers_pb2.DoubleValue, wrappers.DoubleValueRule())
self.register(wrappers_pb2.FloatValue, wrappers.FloatValueRule())
self.register(wrappers_pb2.Int32Value, wrappers.Int32ValueRule())
self.register(wrappers_pb2.Int64Value, wrappers.Int64ValueRule())
self.register(wrappers_pb2.StringValue, wrappers.StringValueRule())
self.register(wrappers_pb2.UInt32Value, wrappers.UInt32ValueRule())
self.register(wrappers_pb2.UInt64Value, wrappers.UInt64ValueRule())
# Register the google.protobuf.Struct wrappers.
#
# These are aware of the marshal that created them, because they
# create RepeatedComposite and MapComposite instances directly and
# need to pass the marshal to them.
self.register(struct_pb2.Value, struct.ValueRule(marshal=self))
self.register(struct_pb2.ListValue, struct.ListValueRule(marshal=self))
self.register(struct_pb2.Struct, struct.StructRule(marshal=self))
# Special case for bytes to allow base64 encode/decode
self.register(ProtoType.BYTES, pb_bytes.BytesRule())
# Special case for int64 from strings because of dict round trip.
# See https://github.com/protocolbuffers/protobuf/issues/2679
for rule_class in stringy_numbers.STRINGY_NUMBER_RULES:
self.register(rule_class._proto_type, rule_class())
def get_rule(self, proto_type):
# Rules are needed to convert values between proto-plus and pb.
# Retrieve the rule for the specified proto type.
# The NoopRule will be used when a rule is not found.
rule = self._rules.get(proto_type, self._noop)
# If we don't find a rule, also check under `_instances`
# in case there is a rule in another package.
# See https://github.com/googleapis/proto-plus-python/issues/349
if rule == self._noop and hasattr(self, "_instances"):
for _, instance in self._instances.items():
rule = instance._rules.get(proto_type, self._noop)
if rule != self._noop:
break
return rule
def to_python(self, proto_type, value, *, absent: bool = None):
# Internal protobuf has its own special type for lists of values.
# Return a view around it that implements MutableSequence.
value_type = type(value) # Minor performance boost over isinstance
if value_type in compat.repeated_composite_types:
return RepeatedComposite(value, marshal=self)
if value_type in compat.repeated_scalar_types:
if isinstance(proto_type, type):
return RepeatedComposite(value, marshal=self, proto_type=proto_type)
else:
return Repeated(value, marshal=self)
# Same thing for maps of messages.
# See https://github.com/protocolbuffers/protobuf/issues/16596
# We need to look up the name of the type in compat.map_composite_type_names
# as class `MessageMapContainer` is no longer exposed
# This is done to avoid taking a breaking change in proto-plus.
if (
value_type in compat.map_composite_types
or value_type.__name__ in compat.map_composite_type_names
):
return MapComposite(value, marshal=self)
return self.get_rule(proto_type=proto_type).to_python(value, absent=absent)
def to_proto(self, proto_type, value, *, strict: bool = False):
# The protos in google/protobuf/struct.proto are exceptional cases,
# because they can and should represent themselves as lists and dicts.
# These cases are handled in their rule classes.
if proto_type not in (
struct_pb2.Value,
struct_pb2.ListValue,
struct_pb2.Struct,
):
# For our repeated and map view objects, simply return the
# underlying pb.
if isinstance(value, (Repeated, MapComposite)):
return value.pb
# Convert lists and tuples recursively.
if isinstance(value, (list, tuple)):
return type(value)(self.to_proto(proto_type, i) for i in value)
# Convert dictionaries recursively when the proto type is a map.
# This is slightly more complicated than converting a list or tuple
# because we have to step through the magic that protocol buffers does.
#
# Essentially, a type of map<string, Foo> will show up here as
# a FoosEntry with a `key` field, `value` field, and a `map_entry`
# annotation. We need to do the conversion based on the `value`
# field's type.
if isinstance(value, dict) and (
proto_type.DESCRIPTOR.has_options
and proto_type.DESCRIPTOR.GetOptions().map_entry
):
recursive_type = type(proto_type().value)
return {k: self.to_proto(recursive_type, v) for k, v in value.items()}
pb_value = self.get_rule(proto_type=proto_type).to_proto(value)
# Sanity check: If we are in strict mode, did we get the value we want?
if strict and not isinstance(pb_value, proto_type):
raise TypeError(
"Parameter must be instance of the same class; "
"expected {expected}, got {got}".format(
expected=proto_type.__name__,
got=pb_value.__class__.__name__,
),
)
# Return the final value.
return pb_value
class Marshal(BaseMarshal):
"""The translator between protocol buffer and Python instances.
The bulk of the implementation is in :class:`BaseMarshal`. This class
adds identity tracking: multiple instantiations of :class:`Marshal` with
the same name will provide the same instance.
"""
_instances = {}
def __new__(cls, *, name: str):
"""Create a marshal instance.
Args:
name (str): The name of the marshal. Instantiating multiple
marshals with the same ``name`` argument will provide the
same marshal each time.
"""
klass = cls._instances.get(name)
if klass is None:
klass = cls._instances[name] = super().__new__(cls)
return klass
def __init__(self, *, name: str):
"""Instantiate a marshal.
Args:
name (str): The name of the marshal. Instantiating multiple
marshals with the same ``name`` argument will provide the
same marshal each time.
"""
self._name = name
if not hasattr(self, "_rules"):
super().__init__()
class NoopRule:
"""A catch-all rule that does nothing."""
def to_python(self, pb_value, *, absent: bool = None):
return pb_value
def to_proto(self, value):
return value
__all__ = ("Marshal",)

View File

@@ -0,0 +1,13 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,44 @@
# Copyright (C) 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
class BytesRule:
"""A marshal between Python strings and protobuf bytes.
Note: this conversion is asymmetric because Python does have a bytes type.
It is sometimes necessary to convert proto bytes fields to strings, e.g. for
JSON encoding, marshalling a message to a dict. Because bytes fields can
represent arbitrary data, bytes fields are base64 encoded when they need to
be represented as strings.
It is necessary to have the conversion be bidirectional, i.e.
my_message == MyMessage(MyMessage.to_dict(my_message))
To accomplish this, we need to intercept assignments from strings and
base64 decode them back into bytes.
"""
def to_python(self, value, *, absent: bool = None):
return value
def to_proto(self, value):
if isinstance(value, str):
value = value.encode("utf-8")
value += b"=" * (4 - len(value) % 4) # padding
value = base64.urlsafe_b64decode(value)
return value

View File

@@ -0,0 +1,85 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from datetime import timedelta
from datetime import timezone
from google.protobuf import duration_pb2
from google.protobuf import timestamp_pb2
from proto import datetime_helpers, utils
class TimestampRule:
"""A marshal between Python datetimes and protobuf timestamps.
Note: Python datetimes are less precise than protobuf datetimes
(microsecond vs. nanosecond level precision). If nanosecond-level
precision matters, it is recommended to interact with the internal
proto directly.
"""
def to_python(
self, value, *, absent: bool = None
) -> datetime_helpers.DatetimeWithNanoseconds:
if isinstance(value, timestamp_pb2.Timestamp):
if absent:
return None
return datetime_helpers.DatetimeWithNanoseconds.from_timestamp_pb(value)
return value
def to_proto(self, value) -> timestamp_pb2.Timestamp:
if isinstance(value, datetime_helpers.DatetimeWithNanoseconds):
return value.timestamp_pb()
if isinstance(value, datetime):
return timestamp_pb2.Timestamp(
seconds=int(value.timestamp()),
nanos=value.microsecond * 1000,
)
if isinstance(value, str):
timestamp_value = timestamp_pb2.Timestamp()
timestamp_value.FromJsonString(value=value)
return timestamp_value
return value
class DurationRule:
"""A marshal between Python timedeltas and protobuf durations.
Note: Python timedeltas are less precise than protobuf durations
(microsecond vs. nanosecond level precision). If nanosecond-level
precision matters, it is recommended to interact with the internal
proto directly.
"""
def to_python(self, value, *, absent: bool = None) -> timedelta:
if isinstance(value, duration_pb2.Duration):
return timedelta(
days=value.seconds // 86400,
seconds=value.seconds % 86400,
microseconds=value.nanos // 1000,
)
return value
def to_proto(self, value) -> duration_pb2.Duration:
if isinstance(value, timedelta):
return duration_pb2.Duration(
seconds=value.days * 86400 + value.seconds,
nanos=value.microseconds * 1000,
)
if isinstance(value, str):
duration_value = duration_pb2.Duration()
duration_value.FromJsonString(value=value)
return duration_value
return value

View File

@@ -0,0 +1,59 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Type
import enum
import warnings
class EnumRule:
"""A marshal for converting between integer values and enum values."""
def __init__(self, enum_class: Type[enum.IntEnum]):
self._enum = enum_class
def to_python(self, value, *, absent: bool = None):
if isinstance(value, int) and not isinstance(value, self._enum):
try:
# Coerce the int on the wire to the enum value.
return self._enum(value)
except ValueError:
# Since it is possible to add values to enums, we do
# not want to flatly error on this.
#
# However, it is useful to make some noise about it so
# the user realizes that an unexpected value came along.
warnings.warn(
"Unrecognized {name} enum value: {value}".format(
name=self._enum.__name__,
value=value,
)
)
return value
def to_proto(self, value):
# Accept enum values and coerce to the pure integer.
# This is not strictly necessary (protocol buffers can take these
# objects as they subclass int) but nevertheless seems like the
# right thing to do.
if isinstance(value, self._enum):
return value.value
# If a string is provided that matches an enum value, coerce it
# to the enum value.
if isinstance(value, str):
return self._enum[value].value
# We got a pure integer; pass it on.
return value

View File

@@ -0,0 +1,36 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.protobuf import field_mask_pb2
class FieldMaskRule:
"""A marshal between FieldMask and strings.
See https://github.com/googleapis/proto-plus-python/issues/333
and
https://developers.google.com/protocol-buffers/docs/proto3#json
for more details.
"""
def to_python(self, value, *, absent: bool = None):
return value
def to_proto(self, value):
if isinstance(value, str):
field_mask_value = field_mask_pb2.FieldMask()
field_mask_value.FromJsonString(value=value)
return field_mask_value
return value

View File

@@ -0,0 +1,53 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class MessageRule:
"""A marshal for converting between a descriptor and proto.Message."""
def __init__(self, descriptor: type, wrapper: type):
self._descriptor = descriptor
self._wrapper = wrapper
def to_python(self, value, *, absent: bool = None):
if isinstance(value, self._descriptor):
return self._wrapper.wrap(value)
return value
def to_proto(self, value):
if isinstance(value, self._wrapper):
return self._wrapper.pb(value)
if isinstance(value, dict) and not self.is_map:
# We need to use the wrapper's marshaling to handle
# potentially problematic nested messages.
try:
# Try the fast path first.
return self._descriptor(**value)
except (TypeError, ValueError, AttributeError) as ex:
# If we have a TypeError, ValueError or AttributeError,
# try the slow path in case the error
# was:
# - an int64/string issue.
# - a missing key issue in case a key only exists with a `_` suffix.
# See related issue: https://github.com/googleapis/python-api-core/issues/227.
# - a missing key issue due to nested struct. See: https://github.com/googleapis/proto-plus-python/issues/424.
# - a missing key issue due to nested duration. See: https://github.com/googleapis/google-cloud-python/issues/13350.
return self._wrapper(value)._pb
return value
@property
def is_map(self):
"""Return True if the descriptor is a map entry, False otherwise."""
desc = self._descriptor.DESCRIPTOR
return desc.has_options and desc.GetOptions().map_entry

View File

@@ -0,0 +1,71 @@
# Copyright (C) 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from proto.primitives import ProtoType
class StringyNumberRule:
"""A marshal between certain numeric types and strings
This is a necessary hack to allow round trip conversion
from messages to dicts back to messages.
See https://github.com/protocolbuffers/protobuf/issues/2679
and
https://developers.google.com/protocol-buffers/docs/proto3#json
for more details.
"""
def to_python(self, value, *, absent: bool = None):
return value
def to_proto(self, value):
if value is not None:
return self._python_type(value)
return None
class Int64Rule(StringyNumberRule):
_python_type = int
_proto_type = ProtoType.INT64
class UInt64Rule(StringyNumberRule):
_python_type = int
_proto_type = ProtoType.UINT64
class SInt64Rule(StringyNumberRule):
_python_type = int
_proto_type = ProtoType.SINT64
class Fixed64Rule(StringyNumberRule):
_python_type = int
_proto_type = ProtoType.FIXED64
class SFixed64Rule(StringyNumberRule):
_python_type = int
_proto_type = ProtoType.SFIXED64
STRINGY_NUMBER_RULES = [
Int64Rule,
UInt64Rule,
SInt64Rule,
Fixed64Rule,
SFixed64Rule,
]

View File

@@ -0,0 +1,143 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections.abc
from google.protobuf import struct_pb2
from proto.marshal.collections import maps
from proto.marshal.collections import repeated
class ValueRule:
"""A rule to marshal between google.protobuf.Value and Python values."""
def __init__(self, *, marshal):
self._marshal = marshal
def to_python(self, value, *, absent: bool = None):
"""Coerce the given value to the appropriate Python type.
Note that both NullValue and absent fields return None.
In order to disambiguate between these two options,
use containment check,
E.g.
"value" in foo
which is True for NullValue and False for an absent value.
"""
kind = value.WhichOneof("kind")
if kind == "null_value" or absent:
return None
if kind == "bool_value":
return bool(value.bool_value)
if kind == "number_value":
return float(value.number_value)
if kind == "string_value":
return str(value.string_value)
if kind == "struct_value":
return self._marshal.to_python(
struct_pb2.Struct,
value.struct_value,
absent=False,
)
if kind == "list_value":
return self._marshal.to_python(
struct_pb2.ListValue,
value.list_value,
absent=False,
)
# If more variants are ever added, we want to fail loudly
# instead of tacitly returning None.
raise ValueError("Unexpected kind: %s" % kind) # pragma: NO COVER
def to_proto(self, value) -> struct_pb2.Value:
"""Return a protobuf Value object representing this value."""
if isinstance(value, struct_pb2.Value):
return value
if value is None:
return struct_pb2.Value(null_value=0)
if isinstance(value, bool):
return struct_pb2.Value(bool_value=value)
if isinstance(value, (int, float)):
return struct_pb2.Value(number_value=float(value))
if isinstance(value, str):
return struct_pb2.Value(string_value=value)
if isinstance(value, collections.abc.Sequence):
return struct_pb2.Value(
list_value=self._marshal.to_proto(struct_pb2.ListValue, value),
)
if isinstance(value, collections.abc.Mapping):
return struct_pb2.Value(
struct_value=self._marshal.to_proto(struct_pb2.Struct, value),
)
raise ValueError("Unable to coerce value: %r" % value)
class ListValueRule:
"""A rule translating google.protobuf.ListValue and list-like objects."""
def __init__(self, *, marshal):
self._marshal = marshal
def to_python(self, value, *, absent: bool = None):
"""Coerce the given value to a Python sequence."""
return (
None
if absent
else repeated.RepeatedComposite(value.values, marshal=self._marshal)
)
def to_proto(self, value) -> struct_pb2.ListValue:
# We got a proto, or else something we sent originally.
# Preserve the instance we have.
if isinstance(value, struct_pb2.ListValue):
return value
if isinstance(value, repeated.RepeatedComposite):
return struct_pb2.ListValue(values=[v for v in value.pb])
# We got a list (or something list-like); convert it.
return struct_pb2.ListValue(
values=[self._marshal.to_proto(struct_pb2.Value, v) for v in value]
)
class StructRule:
"""A rule translating google.protobuf.Struct and dict-like objects."""
def __init__(self, *, marshal):
self._marshal = marshal
def to_python(self, value, *, absent: bool = None):
"""Coerce the given value to a Python mapping."""
return (
None if absent else maps.MapComposite(value.fields, marshal=self._marshal)
)
def to_proto(self, value) -> struct_pb2.Struct:
# We got a proto, or else something we sent originally.
# Preserve the instance we have.
if isinstance(value, struct_pb2.Struct):
return value
if isinstance(value, maps.MapComposite):
return struct_pb2.Struct(
fields={k: v for k, v in value.pb.items()},
)
# We got a dict (or something dict-like); convert it.
answer = struct_pb2.Struct(
fields={
k: self._marshal.to_proto(struct_pb2.Value, v) for k, v in value.items()
}
)
return answer

View File

@@ -0,0 +1,84 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.protobuf import wrappers_pb2
class WrapperRule:
"""A marshal for converting the protobuf wrapper classes to Python.
This class converts between ``google.protobuf.BoolValue``,
``google.protobuf.StringValue``, and their siblings to the appropriate
Python equivalents.
These are effectively similar to the protobuf primitives except
that None becomes a possible value.
"""
def to_python(self, value, *, absent: bool = None):
if isinstance(value, self._proto_type):
if absent:
return None
return value.value
return value
def to_proto(self, value):
if isinstance(value, self._python_type):
return self._proto_type(value=value)
return value
class DoubleValueRule(WrapperRule):
_proto_type = wrappers_pb2.DoubleValue
_python_type = float
class FloatValueRule(WrapperRule):
_proto_type = wrappers_pb2.FloatValue
_python_type = float
class Int64ValueRule(WrapperRule):
_proto_type = wrappers_pb2.Int64Value
_python_type = int
class UInt64ValueRule(WrapperRule):
_proto_type = wrappers_pb2.UInt64Value
_python_type = int
class Int32ValueRule(WrapperRule):
_proto_type = wrappers_pb2.Int32Value
_python_type = int
class UInt32ValueRule(WrapperRule):
_proto_type = wrappers_pb2.UInt32Value
_python_type = int
class BoolValueRule(WrapperRule):
_proto_type = wrappers_pb2.BoolValue
_python_type = bool
class StringValueRule(WrapperRule):
_proto_type = wrappers_pb2.StringValue
_python_type = str
class BytesValueRule(WrapperRule):
_proto_type = wrappers_pb2.BytesValue
_python_type = bytes

View File

@@ -0,0 +1,969 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import collections.abc
import copy
import re
from typing import Any, Dict, List, Optional, Type
import warnings
import google.protobuf
from google.protobuf import descriptor_pb2
from google.protobuf import message
from google.protobuf.json_format import MessageToDict, MessageToJson, Parse
from proto import _file_info
from proto import _package_info
from proto.fields import Field
from proto.fields import MapField
from proto.fields import RepeatedField
from proto.marshal import Marshal
from proto.primitives import ProtoType
from proto.utils import has_upb
PROTOBUF_VERSION = google.protobuf.__version__
_upb = has_upb() # Important to cache result here.
class MessageMeta(type):
"""A metaclass for building and registering Message subclasses."""
def __new__(mcls, name, bases, attrs):
# Do not do any special behavior for Message itself.
if not bases:
return super().__new__(mcls, name, bases, attrs)
# Get the essential information about the proto package, and where
# this component belongs within the file.
package, marshal = _package_info.compile(name, attrs)
# Determine the local path of this proto component within the file.
local_path = tuple(attrs.get("__qualname__", name).split("."))
# Sanity check: We get the wrong full name if a class is declared
# inside a function local scope; correct this.
if "<locals>" in local_path:
ix = local_path.index("<locals>")
local_path = local_path[: ix - 1] + local_path[ix + 1 :]
# Determine the full name in protocol buffers.
full_name = ".".join((package,) + local_path).lstrip(".")
# Special case: Maps. Map fields are special; they are essentially
# shorthand for a nested message and a repeated field of that message.
# Decompose each map into its constituent form.
# https://developers.google.com/protocol-buffers/docs/proto3#maps
map_fields = {}
for key, field in attrs.items():
if not isinstance(field, MapField):
continue
# Determine the name of the entry message.
msg_name = "{pascal_key}Entry".format(
pascal_key=re.sub(
r"_\w",
lambda m: m.group()[1:].upper(),
key,
).replace(key[0], key[0].upper(), 1),
)
# Create the "entry" message (with the key and value fields).
#
# Note: We instantiate an ordered dictionary here and then
# attach key and value in order to ensure that the fields are
# iterated in the correct order when the class is created.
# This is only an issue in Python 3.5, where the order is
# random (and the wrong order causes the pool to refuse to add
# the descriptor because reasons).
entry_attrs = collections.OrderedDict(
{
"__module__": attrs.get("__module__", None),
"__qualname__": "{prefix}.{name}".format(
prefix=attrs.get("__qualname__", name),
name=msg_name,
),
"_pb_options": {"map_entry": True},
}
)
entry_attrs["key"] = Field(field.map_key_type, number=1)
entry_attrs["value"] = Field(
field.proto_type,
number=2,
enum=field.enum,
message=field.message,
)
map_fields[msg_name] = MessageMeta(msg_name, (Message,), entry_attrs)
# Create the repeated field for the entry message.
map_fields[key] = RepeatedField(
ProtoType.MESSAGE,
number=field.number,
message=map_fields[msg_name],
)
# Add the new entries to the attrs
attrs.update(map_fields)
# Okay, now we deal with all the rest of the fields.
# Iterate over all the attributes and separate the fields into
# their own sequence.
fields = []
new_attrs = {}
oneofs = collections.OrderedDict()
proto_imports = set()
index = 0
for key, field in attrs.items():
# Sanity check: If this is not a field, do nothing.
if not isinstance(field, Field):
# The field objects themselves should not be direct attributes.
new_attrs[key] = field
continue
# Add data that the field requires that we do not take in the
# constructor because we can derive it from the metaclass.
# (The goal is to make the declaration syntax as nice as possible.)
field.mcls_data = {
"name": key,
"parent_name": full_name,
"index": index,
"package": package,
}
# Add the field to the list of fields.
fields.append(field)
# If this field is part of a "oneof", ensure the oneof itself
# is represented.
if field.oneof:
# Keep a running tally of the index of each oneof, and assign
# that index to the field's descriptor.
oneofs.setdefault(field.oneof, len(oneofs))
field.descriptor.oneof_index = oneofs[field.oneof]
# If this field references a message, it may be from another
# proto file; ensure we know about the import (to faithfully
# construct our file descriptor proto).
if field.message and not isinstance(field.message, str):
field_msg = field.message
if hasattr(field_msg, "pb") and callable(field_msg.pb):
field_msg = field_msg.pb()
# Sanity check: The field's message may not yet be defined if
# it was a Message defined in the same file, and the file
# descriptor proto has not yet been generated.
#
# We do nothing in this situation; everything will be handled
# correctly when the file descriptor is created later.
if field_msg:
proto_imports.add(field_msg.DESCRIPTOR.file.name)
# Same thing, but for enums.
elif field.enum and not isinstance(field.enum, str):
field_enum = (
field.enum._meta.pb
if hasattr(field.enum, "_meta")
else field.enum.DESCRIPTOR
)
if field_enum:
proto_imports.add(field_enum.file.name)
# Increment the field index counter.
index += 1
# As per descriptor.proto, all synthetic oneofs must be ordered after
# 'real' oneofs.
opt_attrs = {}
for field in fields:
if field.optional:
field.oneof = "_{}".format(field.name)
field.descriptor.oneof_index = oneofs[field.oneof] = len(oneofs)
opt_attrs[field.name] = field.name
# Generating a metaclass dynamically provides class attributes that
# instances can't see. This provides idiomatically named constants
# that enable the following pattern to check for field presence:
#
# class MyMessage(proto.Message):
# field = proto.Field(proto.INT32, number=1, optional=True)
#
# m = MyMessage()
# MyMessage.field in m
if opt_attrs:
mcls = type("AttrsMeta", (mcls,), opt_attrs)
# Determine the filename.
# We determine an appropriate proto filename based on the
# Python module.
filename = _file_info._FileInfo.proto_file_name(
new_attrs.get("__module__", name.lower())
)
# Get or create the information about the file, including the
# descriptor to which the new message descriptor shall be added.
file_info = _file_info._FileInfo.maybe_add_descriptor(filename, package)
# Ensure any imports that would be necessary are assigned to the file
# descriptor proto being created.
for proto_import in proto_imports:
if proto_import not in file_info.descriptor.dependency:
file_info.descriptor.dependency.append(proto_import)
# Retrieve any message options.
opts = descriptor_pb2.MessageOptions(**new_attrs.pop("_pb_options", {}))
# Create the underlying proto descriptor.
desc = descriptor_pb2.DescriptorProto(
name=name,
field=[i.descriptor for i in fields],
oneof_decl=[
descriptor_pb2.OneofDescriptorProto(name=i) for i in oneofs.keys()
],
options=opts,
)
# If any descriptors were nested under this one, they need to be
# attached as nested types here.
child_paths = [p for p in file_info.nested.keys() if local_path == p[:-1]]
for child_path in child_paths:
desc.nested_type.add().MergeFrom(file_info.nested.pop(child_path))
# Same thing, but for enums
child_paths = [p for p in file_info.nested_enum.keys() if local_path == p[:-1]]
for child_path in child_paths:
desc.enum_type.add().MergeFrom(file_info.nested_enum.pop(child_path))
# Add the descriptor to the file if it is a top-level descriptor,
# or to a "holding area" for nested messages otherwise.
if len(local_path) == 1:
file_info.descriptor.message_type.add().MergeFrom(desc)
else:
file_info.nested[local_path] = desc
# Create the MessageInfo instance to be attached to this message.
new_attrs["_meta"] = _MessageInfo(
fields=fields,
full_name=full_name,
marshal=marshal,
options=opts,
package=package,
)
# Run the superclass constructor.
cls = super().__new__(mcls, name, bases, new_attrs)
# The info class and fields need a reference to the class just created.
cls._meta.parent = cls
for field in cls._meta.fields.values():
field.parent = cls
# Add this message to the _FileInfo instance; this allows us to
# associate the descriptor with the message once the descriptor
# is generated.
file_info.messages[full_name] = cls
# Generate the descriptor for the file if it is ready.
if file_info.ready(new_class=cls):
file_info.generate_file_pb(new_class=cls, fallback_salt=full_name)
# Done; return the class.
return cls
@classmethod
def __prepare__(mcls, name, bases, **kwargs):
return collections.OrderedDict()
@property
def meta(cls):
return cls._meta
def __dir__(self):
try:
names = set(dir(type))
names.update(
(
"meta",
"pb",
"wrap",
"serialize",
"deserialize",
"to_json",
"from_json",
"to_dict",
"copy_from",
)
)
desc = self.pb().DESCRIPTOR
names.update(t.name for t in desc.nested_types)
names.update(e.name for e in desc.enum_types)
return names
except AttributeError:
return dir(type)
def pb(cls, obj=None, *, coerce: bool = False):
"""Return the underlying protobuf Message class or instance.
Args:
obj: If provided, and an instance of ``cls``, return the
underlying protobuf instance.
coerce (bool): If provided, will attempt to coerce ``obj`` to
``cls`` if it is not already an instance.
"""
if obj is None:
return cls.meta.pb
if not isinstance(obj, cls):
if coerce:
obj = cls(obj)
else:
raise TypeError(
"%r is not an instance of %s"
% (
obj,
cls.__name__,
)
)
return obj._pb
def wrap(cls, pb):
"""Return a Message object that shallowly wraps the descriptor.
Args:
pb: A protocol buffer object, such as would be returned by
:meth:`pb`.
"""
# Optimized fast path.
instance = cls.__new__(cls)
super(cls, instance).__setattr__("_pb", pb)
return instance
def serialize(cls, instance) -> bytes:
"""Return the serialized proto.
Args:
instance: An instance of this message type, or something
compatible (accepted by the type's constructor).
Returns:
bytes: The serialized representation of the protocol buffer.
"""
return cls.pb(instance, coerce=True).SerializeToString()
def deserialize(cls, payload: bytes) -> "Message":
"""Given a serialized proto, deserialize it into a Message instance.
Args:
payload (bytes): The serialized proto.
Returns:
~.Message: An instance of the message class against which this
method was called.
"""
return cls.wrap(cls.pb().FromString(payload))
def _warn_if_including_default_value_fields_is_used_protobuf_5(
cls, including_default_value_fields: Optional[bool]
) -> None:
"""
Warn Protobuf 5.x+ users that `including_default_value_fields` is deprecated if it is set.
Args:
including_default_value_fields (Optional(bool)): The value of `including_default_value_fields` set by the user.
"""
if (
PROTOBUF_VERSION[0] not in ("3", "4")
and including_default_value_fields is not None
):
warnings.warn(
"""The argument `including_default_value_fields` has been removed from
Protobuf 5.x. Please use `always_print_fields_with_no_presence` instead.
""",
DeprecationWarning,
)
def _raise_if_print_fields_values_are_set_and_differ(
cls,
always_print_fields_with_no_presence: Optional[bool],
including_default_value_fields: Optional[bool],
) -> None:
"""
Raise Exception if both `always_print_fields_with_no_presence` and `including_default_value_fields` are set
and the values differ.
Args:
always_print_fields_with_no_presence (Optional(bool)): The value of `always_print_fields_with_no_presence` set by the user.
including_default_value_fields (Optional(bool)): The value of `including_default_value_fields` set by the user.
Returns:
None
Raises:
ValueError: if both `always_print_fields_with_no_presence` and `including_default_value_fields` are set and
the values differ.
"""
if (
always_print_fields_with_no_presence is not None
and including_default_value_fields is not None
and always_print_fields_with_no_presence != including_default_value_fields
):
raise ValueError(
"Arguments `always_print_fields_with_no_presence` and `including_default_value_fields` must match"
)
def _normalize_print_fields_without_presence(
cls,
always_print_fields_with_no_presence: Optional[bool],
including_default_value_fields: Optional[bool],
) -> bool:
"""
Return true if fields with no presence should be included in the results.
By default, fields with no presence will be included in the results
when both `always_print_fields_with_no_presence` and
`including_default_value_fields` are not set
Args:
always_print_fields_with_no_presence (Optional(bool)): The value of `always_print_fields_with_no_presence` set by the user.
including_default_value_fields (Optional(bool)): The value of `including_default_value_fields` set by the user.
Returns:
None
Raises:
ValueError: if both `always_print_fields_with_no_presence` and `including_default_value_fields` are set and
the values differ.
"""
cls._warn_if_including_default_value_fields_is_used_protobuf_5(
including_default_value_fields
)
cls._raise_if_print_fields_values_are_set_and_differ(
always_print_fields_with_no_presence, including_default_value_fields
)
# Default to True if neither `always_print_fields_with_no_presence` or `including_default_value_fields` is set
return (
(
always_print_fields_with_no_presence is None
and including_default_value_fields is None
)
or always_print_fields_with_no_presence
or including_default_value_fields
)
def to_json(
cls,
instance,
*,
use_integers_for_enums=True,
including_default_value_fields=None,
preserving_proto_field_name=False,
sort_keys=False,
indent=2,
float_precision=None,
always_print_fields_with_no_presence=None,
) -> str:
"""Given a message instance, serialize it to json
Args:
instance: An instance of this message type, or something
compatible (accepted by the type's constructor).
use_integers_for_enums (Optional(bool)): An option that determines whether enum
values should be represented by strings (False) or integers (True).
Default is True.
including_default_value_fields (Optional(bool)): Deprecated. Use argument
`always_print_fields_with_no_presence` instead. An option that
determines whether the default field values should be included in the results.
This value must match `always_print_fields_with_no_presence`,
if both arguments are explicitly set.
preserving_proto_field_name (Optional(bool)): An option that
determines whether field name representations preserve
proto case (snake_case) or use lowerCamelCase. Default is False.
sort_keys (Optional(bool)): If True, then the output will be sorted by field names.
Default is False.
indent (Optional(int)): The JSON object will be pretty-printed with this indent level.
An indent level of 0 or negative will only insert newlines.
Pass None for the most compact representation without newlines.
float_precision (Optional(int)): If set, use this to specify float field valid digits.
Default is None.
always_print_fields_with_no_presence (Optional(bool)): If True, fields without
presence (implicit presence scalars, repeated fields, and map fields) will
always be serialized. Any field that supports presence is not affected by
this option (including singular message fields and oneof fields).
This value must match `including_default_value_fields`,
if both arguments are explicitly set.
Returns:
str: The json string representation of the protocol buffer.
"""
print_fields = cls._normalize_print_fields_without_presence(
always_print_fields_with_no_presence, including_default_value_fields
)
if PROTOBUF_VERSION[0] in ("3", "4"):
return MessageToJson(
cls.pb(instance),
use_integers_for_enums=use_integers_for_enums,
including_default_value_fields=print_fields,
preserving_proto_field_name=preserving_proto_field_name,
sort_keys=sort_keys,
indent=indent,
float_precision=float_precision,
)
else:
# The `including_default_value_fields` argument was removed from protobuf 5.x
# and replaced with `always_print_fields_with_no_presence` which very similar but has
# handles optional fields consistently by not affecting them.
# The old flag accidentally had inconsistent behavior between proto2
# optional and proto3 optional fields.
return MessageToJson(
cls.pb(instance),
use_integers_for_enums=use_integers_for_enums,
always_print_fields_with_no_presence=print_fields,
preserving_proto_field_name=preserving_proto_field_name,
sort_keys=sort_keys,
indent=indent,
float_precision=float_precision,
)
def from_json(cls, payload, *, ignore_unknown_fields=False) -> "Message":
"""Given a json string representing an instance,
parse it into a message.
Args:
payload: A json string representing a message.
ignore_unknown_fields (Optional(bool)): If True, do not raise errors
for unknown fields.
Returns:
~.Message: An instance of the message class against which this
method was called.
"""
instance = cls()
Parse(payload, instance._pb, ignore_unknown_fields=ignore_unknown_fields)
return instance
def to_dict(
cls,
instance,
*,
use_integers_for_enums=True,
preserving_proto_field_name=True,
including_default_value_fields=None,
float_precision=None,
always_print_fields_with_no_presence=None,
) -> Dict[str, Any]:
"""Given a message instance, return its representation as a python dict.
Args:
instance: An instance of this message type, or something
compatible (accepted by the type's constructor).
use_integers_for_enums (Optional(bool)): An option that determines whether enum
values should be represented by strings (False) or integers (True).
Default is True.
preserving_proto_field_name (Optional(bool)): An option that
determines whether field name representations preserve
proto case (snake_case) or use lowerCamelCase. Default is True.
including_default_value_fields (Optional(bool)): Deprecated. Use argument
`always_print_fields_with_no_presence` instead. An option that
determines whether the default field values should be included in the results.
This value must match `always_print_fields_with_no_presence`,
if both arguments are explicitly set.
float_precision (Optional(int)): If set, use this to specify float field valid digits.
Default is None.
always_print_fields_with_no_presence (Optional(bool)): If True, fields without
presence (implicit presence scalars, repeated fields, and map fields) will
always be serialized. Any field that supports presence is not affected by
this option (including singular message fields and oneof fields). This value
must match `including_default_value_fields`, if both arguments are explicitly set.
Returns:
dict: A representation of the protocol buffer using pythonic data structures.
Messages and map fields are represented as dicts,
repeated fields are represented as lists.
"""
print_fields = cls._normalize_print_fields_without_presence(
always_print_fields_with_no_presence, including_default_value_fields
)
if PROTOBUF_VERSION[0] in ("3", "4"):
return MessageToDict(
cls.pb(instance),
including_default_value_fields=print_fields,
preserving_proto_field_name=preserving_proto_field_name,
use_integers_for_enums=use_integers_for_enums,
float_precision=float_precision,
)
else:
# The `including_default_value_fields` argument was removed from protobuf 5.x
# and replaced with `always_print_fields_with_no_presence` which very similar but has
# handles optional fields consistently by not affecting them.
# The old flag accidentally had inconsistent behavior between proto2
# optional and proto3 optional fields.
return MessageToDict(
cls.pb(instance),
always_print_fields_with_no_presence=print_fields,
preserving_proto_field_name=preserving_proto_field_name,
use_integers_for_enums=use_integers_for_enums,
float_precision=float_precision,
)
def copy_from(cls, instance, other):
"""Equivalent for protobuf.Message.CopyFrom
Args:
instance: An instance of this message type
other: (Union[dict, ~.Message):
A dictionary or message to reinitialize the values for this message.
"""
if isinstance(other, cls):
# Just want the underlying proto.
other = Message.pb(other)
elif isinstance(other, cls.pb()):
# Don't need to do anything.
pass
elif isinstance(other, collections.abc.Mapping):
# Coerce into a proto
other = cls._meta.pb(**other)
else:
raise TypeError(
"invalid argument type to copy to {}: {}".format(
cls.__name__, other.__class__.__name__
)
)
# Note: we can't just run self.__init__ because this may be a message field
# for a higher order proto; the memory layout for protos is NOT LIKE the
# python memory model. We cannot rely on just setting things by reference.
# Non-trivial complexity is (partially) hidden by the protobuf runtime.
cls.pb(instance).CopyFrom(other)
class Message(metaclass=MessageMeta):
"""The abstract base class for a message.
Args:
mapping (Union[dict, ~.Message]): A dictionary or message to be
used to determine the values for this message.
ignore_unknown_fields (Optional(bool)): If True, do not raise errors for
unknown fields. Only applied if `mapping` is a mapping type or there
are keyword parameters.
kwargs (dict): Keys and values corresponding to the fields of the
message.
"""
def __init__(
self,
mapping=None,
*,
ignore_unknown_fields=False,
**kwargs,
):
# We accept several things for `mapping`:
# * An instance of this class.
# * An instance of the underlying protobuf descriptor class.
# * A dict
# * Nothing (keyword arguments only).
if mapping is None:
if not kwargs:
# Special fast path for empty construction.
super().__setattr__("_pb", self._meta.pb())
return
mapping = kwargs
elif isinstance(mapping, self._meta.pb):
# Make a copy of the mapping.
# This is a constructor for a new object, so users will assume
# that it will not have side effects on the arguments being
# passed in.
#
# The `wrap` method on the metaclass is the public API for taking
# ownership of the passed in protobuf object.
mapping = copy.deepcopy(mapping)
if kwargs:
mapping.MergeFrom(self._meta.pb(**kwargs))
super().__setattr__("_pb", mapping)
return
elif isinstance(mapping, type(self)):
# Just use the above logic on mapping's underlying pb.
self.__init__(mapping=mapping._pb, **kwargs)
return
elif isinstance(mapping, collections.abc.Mapping):
# Can't have side effects on mapping.
mapping = copy.copy(mapping)
# kwargs entries take priority for duplicate keys.
mapping.update(kwargs)
else:
# Sanity check: Did we get something not a map? Error if so.
raise TypeError(
"Invalid constructor input for %s: %r"
% (
self.__class__.__name__,
mapping,
)
)
params = {}
# Update the mapping to address any values that need to be
# coerced.
marshal = self._meta.marshal
for key, value in mapping.items():
(key, pb_type) = self._get_pb_type_from_key(key)
if pb_type is None:
if ignore_unknown_fields:
continue
raise ValueError(
"Unknown field for {}: {}".format(self.__class__.__name__, key)
)
pb_value = marshal.to_proto(pb_type, value)
if pb_value is not None:
params[key] = pb_value
# Create the internal protocol buffer.
super().__setattr__("_pb", self._meta.pb(**params))
def _get_pb_type_from_key(self, key):
"""Given a key, return the corresponding pb_type.
Args:
key(str): The name of the field.
Returns:
A tuple containing a key and pb_type. The pb_type will be
the composite type of the field, or the primitive type if a primitive.
If no corresponding field exists, return None.
"""
pb_type = None
try:
pb_type = self._meta.fields[key].pb_type
except KeyError:
# Underscores may be appended to field names
# that collide with python or proto-plus keywords.
# In case a key only exists with a `_` suffix, coerce the key
# to include the `_` suffix. It's not possible to
# natively define the same field with a trailing underscore in protobuf.
# See related issue
# https://github.com/googleapis/python-api-core/issues/227
if f"{key}_" in self._meta.fields:
key = f"{key}_"
pb_type = self._meta.fields[key].pb_type
return (key, pb_type)
def __dir__(self):
desc = type(self).pb().DESCRIPTOR
names = {f_name for f_name in self._meta.fields.keys()}
names.update(m.name for m in desc.nested_types)
names.update(e.name for e in desc.enum_types)
names.update(dir(object()))
# Can't think of a better way of determining
# the special methods than manually listing them.
names.update(
(
"__bool__",
"__contains__",
"__dict__",
"__getattr__",
"__getstate__",
"__module__",
"__setstate__",
"__weakref__",
)
)
return names
def __bool__(self):
"""Return True if any field is truthy, False otherwise."""
return any(k in self and getattr(self, k) for k in self._meta.fields.keys())
def __contains__(self, key):
"""Return True if this field was set to something non-zero on the wire.
In most cases, this method will return True when ``__getattr__``
would return a truthy value and False when it would return a falsy
value, so explicitly calling this is not useful.
The exception case is empty messages explicitly set on the wire,
which are falsy from ``__getattr__``. This method allows to
distinguish between an explicitly provided empty message and the
absence of that message, which is useful in some edge cases.
The most common edge case is the use of ``google.protobuf.BoolValue``
to get a boolean that distinguishes between ``False`` and ``None``
(or the same for a string, int, etc.). This library transparently
handles that case for you, but this method remains available to
accommodate cases not automatically covered.
Args:
key (str): The name of the field.
Returns:
bool: Whether the field's value corresponds to a non-empty
wire serialization.
"""
pb_value = getattr(self._pb, key)
try:
# Protocol buffers "HasField" is unfriendly; it only works
# against composite, non-repeated fields, and raises ValueError
# against any repeated field or primitive.
#
# There is no good way to test whether it is valid to provide
# a field to this method, so sadly we are stuck with a
# somewhat inefficient try/except.
return self._pb.HasField(key)
except ValueError:
return bool(pb_value)
def __delattr__(self, key):
"""Delete the value on the given field.
This is generally equivalent to setting a falsy value.
"""
self._pb.ClearField(key)
def __eq__(self, other):
"""Return True if the messages are equal, False otherwise."""
# If these are the same type, use internal protobuf's equality check.
if isinstance(other, type(self)):
return self._pb == other._pb
# If the other type is the target protobuf object, honor that also.
if isinstance(other, self._meta.pb):
return self._pb == other
# Ask the other object.
return NotImplemented
def __getattr__(self, key):
"""Retrieve the given field's value.
In protocol buffers, the presence of a field on a message is
sufficient for it to always be "present".
For primitives, a value of the correct type will always be returned
(the "falsy" values in protocol buffers consistently match those
in Python). For repeated fields, the falsy value is always an empty
sequence.
For messages, protocol buffers does distinguish between an empty
message and absence, but this distinction is subtle and rarely
relevant. Therefore, this method always returns an empty message
(following the official implementation). To check for message
presence, use ``key in self`` (in other words, ``__contains__``).
.. note::
Some well-known protocol buffer types
(e.g. ``google.protobuf.Timestamp``) will be converted to
their Python equivalents. See the ``marshal`` module for
more details.
"""
(key, pb_type) = self._get_pb_type_from_key(key)
if pb_type is None:
raise AttributeError(
"Unknown field for {}: {}".format(self.__class__.__name__, key)
)
pb_value = getattr(self._pb, key)
marshal = self._meta.marshal
return marshal.to_python(pb_type, pb_value, absent=key not in self)
def __ne__(self, other):
"""Return True if the messages are unequal, False otherwise."""
return not self == other
def __repr__(self):
return repr(self._pb)
def __setattr__(self, key, value):
"""Set the value on the given field.
For well-known protocol buffer types which are marshalled, either
the protocol buffer object or the Python equivalent is accepted.
"""
if key[0] == "_":
return super().__setattr__(key, value)
marshal = self._meta.marshal
(key, pb_type) = self._get_pb_type_from_key(key)
if pb_type is None:
raise AttributeError(
"Unknown field for {}: {}".format(self.__class__.__name__, key)
)
pb_value = marshal.to_proto(pb_type, value)
# Clear the existing field.
# This is the only way to successfully write nested falsy values,
# because otherwise MergeFrom will no-op on them.
self._pb.ClearField(key)
# Merge in the value being set.
if pb_value is not None:
self._pb.MergeFrom(self._meta.pb(**{key: pb_value}))
def __getstate__(self):
"""Serialize for pickling."""
return self._pb.SerializeToString()
def __setstate__(self, value):
"""Deserialization for pickling."""
new_pb = self._meta.pb().FromString(value)
super().__setattr__("_pb", new_pb)
class _MessageInfo:
"""Metadata about a message.
Args:
fields (Tuple[~.fields.Field]): The fields declared on the message.
package (str): The proto package.
full_name (str): The full name of the message.
file_info (~._FileInfo): The file descriptor and messages for the
file containing this message.
marshal (~.Marshal): The marshal instance to which this message was
automatically registered.
options (~.descriptor_pb2.MessageOptions): Any options that were
set on the message.
"""
def __init__(
self,
*,
fields: List[Field],
package: str,
full_name: str,
marshal: Marshal,
options: descriptor_pb2.MessageOptions,
) -> None:
self.package = package
self.full_name = full_name
self.options = options
self.fields = collections.OrderedDict((i.name, i) for i in fields)
self.fields_by_number = collections.OrderedDict((i.number, i) for i in fields)
self.marshal = marshal
self._pb = None
@property
def pb(self) -> Type[message.Message]:
"""Return the protobuf message type for this descriptor.
If a field on the message references another message which has not
loaded, then this method returns None.
"""
return self._pb
__all__ = ("Message",)

View File

@@ -0,0 +1,50 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Set
import collections
_ProtoModule = collections.namedtuple(
"ProtoModule",
["package", "marshal", "manifest"],
)
def define_module(
*, package: str, marshal: str = None, manifest: Set[str] = frozenset()
) -> _ProtoModule:
"""Define a protocol buffers module.
The settings defined here are used for all protobuf messages
declared in the module of the given name.
Args:
package (str): The proto package name.
marshal (str): The name of the marshal to use. It is recommended
to use one marshal per Python library (e.g. package on PyPI).
manifest (Set[str]): A set of messages and enums to be created. Setting
this adds a slight efficiency in piecing together proto
descriptors under the hood.
"""
if not marshal:
marshal = package
return _ProtoModule(
package=package,
marshal=marshal,
manifest=frozenset(manifest),
)
__all__ = ("define_module",)

View File

@@ -0,0 +1,38 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
class ProtoType(enum.IntEnum):
"""The set of basic types in protocol buffers."""
# These values come from google/protobuf/descriptor.proto
DOUBLE = 1
FLOAT = 2
INT64 = 3
UINT64 = 4
INT32 = 5
FIXED64 = 6
FIXED32 = 7
BOOL = 8
STRING = 9
MESSAGE = 11
BYTES = 12
UINT32 = 13
ENUM = 14
SFIXED32 = 15
SFIXED64 = 16
SINT32 = 17
SINT64 = 18

View File

@@ -0,0 +1,58 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
def has_upb():
try:
from google._upb import _message # pylint: disable=unused-import
has_upb = True
except ImportError:
has_upb = False
return has_upb
def cached_property(fx):
"""Make the callable into a cached property.
Similar to @property, but the function will only be called once per
object.
Args:
fx (Callable[]): The property function.
Returns:
Callable[]: The wrapped function.
"""
@functools.wraps(fx)
def inner(self):
# Sanity check: If there is no cache at all, create an empty cache.
if not hasattr(self, "_cached_values"):
object.__setattr__(self, "_cached_values", {})
# If and only if the function's result is not in the cache,
# run the function.
if fx.__name__ not in self._cached_values:
self._cached_values[fx.__name__] = fx(self)
# Return the value from cache.
return self._cached_values[fx.__name__]
return property(inner)
__all__ = ("cached_property",)

View File

@@ -0,0 +1,15 @@
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__version__ = "1.26.1"