# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler.  DO NOT EDIT!
# source: mediapipe/calculators/video/tool/flow_quantizer_model.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)

_sym_db = _symbol_database.Default()




DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n;mediapipe/calculators/video/tool/flow_quantizer_model.proto\x12\tmediapipe\":\n\x12QuantizerModelData\x12\x11\n\tmin_value\x18\x01 \x03(\x02\x12\x11\n\tmax_value\x18\x02 \x03(\x02')



_QUANTIZERMODELDATA = DESCRIPTOR.message_types_by_name['QuantizerModelData']
QuantizerModelData = _reflection.GeneratedProtocolMessageType('QuantizerModelData', (_message.Message,), {
  'DESCRIPTOR' : _QUANTIZERMODELDATA,
  '__module__' : 'mediapipe.calculators.video.tool.flow_quantizer_model_pb2'
  # @@protoc_insertion_point(class_scope:mediapipe.QuantizerModelData)
  })
_sym_db.RegisterMessage(QuantizerModelData)

if _descriptor._USE_C_DESCRIPTORS == False:

  DESCRIPTOR._options = None
  _QUANTIZERMODELDATA._serialized_start=74
  _QUANTIZERMODELDATA._serialized_end=132
# @@protoc_insertion_point(module_scope)
