Atualiza a estrutura do projeto Evo AI, adicionando novos scripts de seeders para criar dados iniciais, incluindo usuários, agentes, clientes e ferramentas. Implementa rotas de autenticação e auditoria, além de configurar o middleware JWT. Atualiza as configurações de ambiente e o README para refletir as mudanças. Adiciona novas dependências para autenticação e envio de e-mails.
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -1,23 +1,23 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: https://pyasn1.readthedocs.io/en/latest/license.html
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from pyasn1 import debug
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.ber import eoo
|
||||
from pyasn1.compat import _MISSING
|
||||
from pyasn1.compat.integer import to_bytes
|
||||
from pyasn1.compat.octets import (int2oct, oct2int, ints2octs, null,
|
||||
str2octs, isOctetsType)
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
__all__ = ['Encoder', 'encode']
|
||||
__all__ = ['encode']
|
||||
|
||||
LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER)
|
||||
|
||||
@@ -27,7 +27,7 @@ class AbstractItemEncoder(object):
|
||||
|
||||
# An outcome of otherwise legit call `encodeFun(eoo.endOfOctets)`
|
||||
eooIntegerSubstrate = (0, 0)
|
||||
eooOctetsSubstrate = bytes(eooIntegerSubstrate)
|
||||
eooOctetsSubstrate = ints2octs(eooIntegerSubstrate)
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def encodeTag(self, singleTag, isConstructed):
|
||||
@@ -89,7 +89,7 @@ class AbstractItemEncoder(object):
|
||||
|
||||
defMode = options.get('defMode', True)
|
||||
|
||||
substrate = b''
|
||||
substrate = null
|
||||
|
||||
for idx, singleTag in enumerate(tagSet.superTags):
|
||||
|
||||
@@ -102,9 +102,10 @@ class AbstractItemEncoder(object):
|
||||
value, asn1Spec, encodeFun, **options
|
||||
)
|
||||
|
||||
except error.PyAsn1Error as exc:
|
||||
except error.PyAsn1Error:
|
||||
exc = sys.exc_info()
|
||||
raise error.PyAsn1Error(
|
||||
'Error encoding %r: %s' % (value, exc))
|
||||
'Error encoding %r: %s' % (value, exc[1]))
|
||||
|
||||
if LOG:
|
||||
LOG('encoded %svalue %s into %s' % (
|
||||
@@ -125,16 +126,16 @@ class AbstractItemEncoder(object):
|
||||
if LOG:
|
||||
LOG('encoded %stag %s into %s' % (
|
||||
isConstructed and 'constructed ' or '',
|
||||
singleTag, debug.hexdump(bytes(header))))
|
||||
singleTag, debug.hexdump(ints2octs(header))))
|
||||
|
||||
header += self.encodeLength(len(substrate), defModeOverride)
|
||||
|
||||
if LOG:
|
||||
LOG('encoded %s octets (tag + payload) into %s' % (
|
||||
len(substrate), debug.hexdump(bytes(header))))
|
||||
len(substrate), debug.hexdump(ints2octs(header))))
|
||||
|
||||
if isOctets:
|
||||
substrate = bytes(header) + substrate
|
||||
substrate = ints2octs(header) + substrate
|
||||
|
||||
if not defModeOverride:
|
||||
substrate += self.eooOctetsSubstrate
|
||||
@@ -146,14 +147,14 @@ class AbstractItemEncoder(object):
|
||||
substrate += self.eooIntegerSubstrate
|
||||
|
||||
if not isOctets:
|
||||
substrate = bytes(substrate)
|
||||
substrate = ints2octs(substrate)
|
||||
|
||||
return substrate
|
||||
|
||||
|
||||
class EndOfOctetsEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
return b'', False, True
|
||||
return null, False, True
|
||||
|
||||
|
||||
class BooleanEncoder(AbstractItemEncoder):
|
||||
@@ -198,7 +199,7 @@ class BitStringEncoder(AbstractItemEncoder):
|
||||
maxChunkSize = options.get('maxChunkSize', 0)
|
||||
if not maxChunkSize or len(alignedValue) <= maxChunkSize * 8:
|
||||
substrate = alignedValue.asOctets()
|
||||
return bytes((len(substrate) * 8 - valueLength,)) + substrate, False, True
|
||||
return int2oct(len(substrate) * 8 - valueLength) + substrate, False, True
|
||||
|
||||
if LOG:
|
||||
LOG('encoding into up to %s-octet chunks' % maxChunkSize)
|
||||
@@ -215,7 +216,7 @@ class BitStringEncoder(AbstractItemEncoder):
|
||||
alignedValue = alignedValue.clone(tagSet=tagSet)
|
||||
|
||||
stop = 0
|
||||
substrate = b''
|
||||
substrate = null
|
||||
while stop < valueLength:
|
||||
start = stop
|
||||
stop = min(start + maxChunkSize * 8, valueLength)
|
||||
@@ -231,7 +232,7 @@ class OctetStringEncoder(AbstractItemEncoder):
|
||||
if asn1Spec is None:
|
||||
substrate = value.asOctets()
|
||||
|
||||
elif not isinstance(value, bytes):
|
||||
elif not isOctetsType(value):
|
||||
substrate = asn1Spec.clone(value).asOctets()
|
||||
|
||||
else:
|
||||
@@ -259,7 +260,7 @@ class OctetStringEncoder(AbstractItemEncoder):
|
||||
|
||||
asn1Spec = value.clone(tagSet=tagSet)
|
||||
|
||||
elif not isinstance(value, bytes):
|
||||
elif not isOctetsType(value):
|
||||
baseTag = asn1Spec.tagSet.baseTag
|
||||
|
||||
# strip off explicit tags
|
||||
@@ -272,7 +273,7 @@ class OctetStringEncoder(AbstractItemEncoder):
|
||||
asn1Spec = asn1Spec.clone(tagSet=tagSet)
|
||||
|
||||
pos = 0
|
||||
substrate = b''
|
||||
substrate = null
|
||||
|
||||
while True:
|
||||
chunk = value[pos:pos + maxChunkSize]
|
||||
@@ -289,7 +290,7 @@ class NullEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
return b'', False, True
|
||||
return null, False, True
|
||||
|
||||
|
||||
class ObjectIdentifierEncoder(AbstractItemEncoder):
|
||||
@@ -351,41 +352,8 @@ class ObjectIdentifierEncoder(AbstractItemEncoder):
|
||||
return octets, False, False
|
||||
|
||||
|
||||
class RelativeOIDEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is not None:
|
||||
value = asn1Spec.clone(value)
|
||||
|
||||
octets = ()
|
||||
|
||||
# Cycle through subIds
|
||||
for subOid in value.asTuple():
|
||||
if 0 <= subOid <= 127:
|
||||
# Optimize for the common case
|
||||
octets += (subOid,)
|
||||
|
||||
elif subOid > 127:
|
||||
# Pack large Sub-Object IDs
|
||||
res = (subOid & 0x7f,)
|
||||
subOid >>= 7
|
||||
|
||||
while subOid:
|
||||
res = (0x80 | (subOid & 0x7f),) + res
|
||||
subOid >>= 7
|
||||
|
||||
# Add packed Sub-Object ID to resulted RELATIVE-OID
|
||||
octets += res
|
||||
|
||||
else:
|
||||
raise error.PyAsn1Error('Negative RELATIVE-OID arc %s at %s' % (subOid, value))
|
||||
|
||||
return octets, False, False
|
||||
|
||||
|
||||
class RealEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
supportIndefLenMode = 0
|
||||
binEncBase = 2 # set to None to choose encoding base automatically
|
||||
|
||||
@staticmethod
|
||||
@@ -462,13 +430,13 @@ class RealEncoder(AbstractItemEncoder):
|
||||
m, b, e = value
|
||||
|
||||
if not m:
|
||||
return b'', False, True
|
||||
return null, False, True
|
||||
|
||||
if b == 10:
|
||||
if LOG:
|
||||
LOG('encoding REAL into character form')
|
||||
|
||||
return b'\x03%dE%s%d' % (m, e == 0 and b'+' or b'', e), False, True
|
||||
return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), False, True
|
||||
|
||||
elif b == 2:
|
||||
fo = 0x80 # binary encoding
|
||||
@@ -505,20 +473,20 @@ class RealEncoder(AbstractItemEncoder):
|
||||
raise error.PyAsn1Error('Scale factor overflow') # bug if raised
|
||||
|
||||
fo |= sf << 2
|
||||
eo = b''
|
||||
eo = null
|
||||
if e == 0 or e == -1:
|
||||
eo = bytes((e & 0xff,))
|
||||
eo = int2oct(e & 0xff)
|
||||
|
||||
else:
|
||||
while e not in (0, -1):
|
||||
eo = bytes((e & 0xff,)) + eo
|
||||
eo = int2oct(e & 0xff) + eo
|
||||
e >>= 8
|
||||
|
||||
if e == 0 and eo and eo[0] & 0x80:
|
||||
eo = bytes((0,)) + eo
|
||||
if e == 0 and eo and oct2int(eo[0]) & 0x80:
|
||||
eo = int2oct(0) + eo
|
||||
|
||||
if e == -1 and eo and not (eo[0] & 0x80):
|
||||
eo = bytes((0xff,)) + eo
|
||||
if e == -1 and eo and not (oct2int(eo[0]) & 0x80):
|
||||
eo = int2oct(0xff) + eo
|
||||
|
||||
n = len(eo)
|
||||
if n > 0xff:
|
||||
@@ -535,15 +503,15 @@ class RealEncoder(AbstractItemEncoder):
|
||||
|
||||
else:
|
||||
fo |= 3
|
||||
eo = bytes((n & 0xff,)) + eo
|
||||
eo = int2oct(n & 0xff) + eo
|
||||
|
||||
po = b''
|
||||
po = null
|
||||
|
||||
while m:
|
||||
po = bytes((m & 0xff,)) + po
|
||||
po = int2oct(m & 0xff) + po
|
||||
m >>= 8
|
||||
|
||||
substrate = bytes((fo,)) + eo + po
|
||||
substrate = int2oct(fo) + eo + po
|
||||
|
||||
return substrate, False, True
|
||||
|
||||
@@ -558,7 +526,7 @@ class SequenceEncoder(AbstractItemEncoder):
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
substrate = b''
|
||||
substrate = null
|
||||
|
||||
omitEmptyOptionals = options.get(
|
||||
'omitEmptyOptionals', self.omitEmptyOptionals)
|
||||
@@ -571,8 +539,7 @@ class SequenceEncoder(AbstractItemEncoder):
|
||||
# instance of ASN.1 schema
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise error.PyAsn1Error(
|
||||
f"ASN.1 object {value.__class__.__name__} is inconsistent")
|
||||
raise inconsistency
|
||||
|
||||
namedTypes = value.componentType
|
||||
|
||||
@@ -680,8 +647,7 @@ class SequenceOfEncoder(AbstractItemEncoder):
|
||||
if asn1Spec is None:
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise error.PyAsn1Error(
|
||||
f"ASN.1 object {value.__class__.__name__} is inconsistent")
|
||||
raise inconsistency
|
||||
|
||||
else:
|
||||
asn1Spec = asn1Spec.componentType
|
||||
@@ -709,7 +675,7 @@ class SequenceOfEncoder(AbstractItemEncoder):
|
||||
chunks = self._encodeComponents(
|
||||
value, asn1Spec, encodeFun, **options)
|
||||
|
||||
return b''.join(chunks), True, True
|
||||
return null.join(chunks), True, True
|
||||
|
||||
|
||||
class ChoiceEncoder(AbstractItemEncoder):
|
||||
@@ -734,13 +700,13 @@ class AnyEncoder(OctetStringEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is None:
|
||||
value = value.asOctets()
|
||||
elif not isinstance(value, bytes):
|
||||
elif not isOctetsType(value):
|
||||
value = asn1Spec.clone(value).asOctets()
|
||||
|
||||
return value, not options.get('defMode', True), True
|
||||
|
||||
|
||||
TAG_MAP = {
|
||||
tagMap = {
|
||||
eoo.endOfOctets.tagSet: EndOfOctetsEncoder(),
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.Integer.tagSet: IntegerEncoder(),
|
||||
@@ -748,7 +714,6 @@ TAG_MAP = {
|
||||
univ.OctetString.tagSet: OctetStringEncoder(),
|
||||
univ.Null.tagSet: NullEncoder(),
|
||||
univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
|
||||
univ.RelativeOID.tagSet: RelativeOIDEncoder(),
|
||||
univ.Enumerated.tagSet: IntegerEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
@@ -774,14 +739,13 @@ TAG_MAP = {
|
||||
}
|
||||
|
||||
# Put in ambiguous & non-ambiguous types for faster codec lookup
|
||||
TYPE_MAP = {
|
||||
typeMap = {
|
||||
univ.Boolean.typeId: BooleanEncoder(),
|
||||
univ.Integer.typeId: IntegerEncoder(),
|
||||
univ.BitString.typeId: BitStringEncoder(),
|
||||
univ.OctetString.typeId: OctetStringEncoder(),
|
||||
univ.Null.typeId: NullEncoder(),
|
||||
univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(),
|
||||
univ.RelativeOID.typeId: RelativeOIDEncoder(),
|
||||
univ.Enumerated.typeId: IntegerEncoder(),
|
||||
univ.Real.typeId: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
@@ -810,16 +774,14 @@ TYPE_MAP = {
|
||||
}
|
||||
|
||||
|
||||
class SingleItemEncoder(object):
|
||||
class Encoder(object):
|
||||
fixedDefLengthMode = None
|
||||
fixedChunkSize = None
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
def __init__(self, tagMap=_MISSING, typeMap=_MISSING, **ignored):
|
||||
self._tagMap = tagMap if tagMap is not _MISSING else self.TAG_MAP
|
||||
self._typeMap = typeMap if typeMap is not _MISSING else self.TYPE_MAP
|
||||
# noinspection PyDefaultArgument
|
||||
def __init__(self, tagMap, typeMap={}):
|
||||
self.__tagMap = tagMap
|
||||
self.__typeMap = typeMap
|
||||
|
||||
def __call__(self, value, asn1Spec=None, **options):
|
||||
try:
|
||||
@@ -833,11 +795,8 @@ class SingleItemEncoder(object):
|
||||
'and "asn1Spec" not given' % (value,))
|
||||
|
||||
if LOG:
|
||||
LOG('encoder called in %sdef mode, chunk size %s for type %s, '
|
||||
'value:\n%s' % (not options.get('defMode', True) and 'in' or '',
|
||||
options.get('maxChunkSize', 0),
|
||||
asn1Spec is None and value.prettyPrintType() or
|
||||
asn1Spec.prettyPrintType(), value))
|
||||
LOG('encoder called in %sdef mode, chunk size %s for '
|
||||
'type %s, value:\n%s' % (not options.get('defMode', True) and 'in' or '', options.get('maxChunkSize', 0), asn1Spec is None and value.prettyPrintType() or asn1Spec.prettyPrintType(), value))
|
||||
|
||||
if self.fixedDefLengthMode is not None:
|
||||
options.update(defMode=self.fixedDefLengthMode)
|
||||
@@ -845,12 +804,12 @@ class SingleItemEncoder(object):
|
||||
if self.fixedChunkSize is not None:
|
||||
options.update(maxChunkSize=self.fixedChunkSize)
|
||||
|
||||
|
||||
try:
|
||||
concreteEncoder = self._typeMap[typeId]
|
||||
concreteEncoder = self.__typeMap[typeId]
|
||||
|
||||
if LOG:
|
||||
LOG('using value codec %s chosen by type ID '
|
||||
'%s' % (concreteEncoder.__class__.__name__, typeId))
|
||||
LOG('using value codec %s chosen by type ID %s' % (concreteEncoder.__class__.__name__, typeId))
|
||||
|
||||
except KeyError:
|
||||
if asn1Spec is None:
|
||||
@@ -862,38 +821,21 @@ class SingleItemEncoder(object):
|
||||
baseTagSet = tag.TagSet(tagSet.baseTag, tagSet.baseTag)
|
||||
|
||||
try:
|
||||
concreteEncoder = self._tagMap[baseTagSet]
|
||||
concreteEncoder = self.__tagMap[baseTagSet]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('No encoder for %r (%s)' % (value, tagSet))
|
||||
|
||||
if LOG:
|
||||
LOG('using value codec %s chosen by tagSet '
|
||||
'%s' % (concreteEncoder.__class__.__name__, tagSet))
|
||||
LOG('using value codec %s chosen by tagSet %s' % (concreteEncoder.__class__.__name__, tagSet))
|
||||
|
||||
substrate = concreteEncoder.encode(value, asn1Spec, self, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('codec %s built %s octets of substrate: %s\nencoder '
|
||||
'completed' % (concreteEncoder, len(substrate),
|
||||
debug.hexdump(substrate)))
|
||||
LOG('codec %s built %s octets of substrate: %s\nencoder completed' % (concreteEncoder, len(substrate), debug.hexdump(substrate)))
|
||||
|
||||
return substrate
|
||||
|
||||
|
||||
class Encoder(object):
|
||||
SINGLE_ITEM_ENCODER = SingleItemEncoder
|
||||
|
||||
def __init__(self, tagMap=_MISSING, typeMap=_MISSING, **options):
|
||||
self._singleItemEncoder = self.SINGLE_ITEM_ENCODER(
|
||||
tagMap=tagMap, typeMap=typeMap, **options
|
||||
)
|
||||
|
||||
def __call__(self, pyObject, asn1Spec=None, **options):
|
||||
return self._singleItemEncoder(
|
||||
pyObject, asn1Spec=asn1Spec, **options)
|
||||
|
||||
|
||||
#: Turns ASN.1 object into BER octet stream.
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
@@ -918,7 +860,7 @@ class Encoder(object):
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`bytes`
|
||||
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: Given ASN.1 object encoded into BER octetstream
|
||||
#:
|
||||
#: Raises
|
||||
@@ -945,10 +887,4 @@ class Encoder(object):
|
||||
#: >>> encode(seq)
|
||||
#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
|
||||
#:
|
||||
encode = Encoder()
|
||||
|
||||
def __getattr__(attr: str):
|
||||
if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr):
|
||||
warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning)
|
||||
return globals()[newAttr]
|
||||
raise AttributeError(attr)
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: https://pyasn1.readthedocs.io/en/latest/license.html
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1.type import base
|
||||
from pyasn1.type import tag
|
||||
|
||||
@@ -1,93 +1,64 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: https://pyasn1.readthedocs.io/en/latest/license.html
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import warnings
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.streaming import readFromStream
|
||||
from pyasn1.codec.ber import decoder
|
||||
from pyasn1.compat.octets import oct2int
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['decode', 'StreamingDecoder']
|
||||
|
||||
SubstrateUnderrunError = error.SubstrateUnderrunError
|
||||
__all__ = ['decode']
|
||||
|
||||
|
||||
class BooleanPayloadDecoder(decoder.AbstractSimplePayloadDecoder):
|
||||
class BooleanDecoder(decoder.AbstractSimpleDecoder):
|
||||
protoComponent = univ.Boolean(0)
|
||||
|
||||
def valueDecoder(self, substrate, asn1Spec,
|
||||
tagSet=None, length=None, state=None,
|
||||
decodeFun=None, substrateFun=None,
|
||||
**options):
|
||||
|
||||
if length != 1:
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
if not head or length != 1:
|
||||
raise error.PyAsn1Error('Not single-octet Boolean payload')
|
||||
|
||||
for chunk in readFromStream(substrate, length, options):
|
||||
if isinstance(chunk, SubstrateUnderrunError):
|
||||
yield chunk
|
||||
|
||||
byte = chunk[0]
|
||||
|
||||
byte = oct2int(head[0])
|
||||
# CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while
|
||||
# BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1
|
||||
# in https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
|
||||
if byte == 0xff:
|
||||
value = 1
|
||||
|
||||
elif byte == 0x00:
|
||||
value = 0
|
||||
|
||||
else:
|
||||
raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte)
|
||||
|
||||
yield self._createComponent(asn1Spec, tagSet, value, **options)
|
||||
|
||||
return self._createComponent(asn1Spec, tagSet, value, **options), tail
|
||||
|
||||
# TODO: prohibit non-canonical encoding
|
||||
BitStringPayloadDecoder = decoder.BitStringPayloadDecoder
|
||||
OctetStringPayloadDecoder = decoder.OctetStringPayloadDecoder
|
||||
RealPayloadDecoder = decoder.RealPayloadDecoder
|
||||
BitStringDecoder = decoder.BitStringDecoder
|
||||
OctetStringDecoder = decoder.OctetStringDecoder
|
||||
RealDecoder = decoder.RealDecoder
|
||||
|
||||
TAG_MAP = decoder.TAG_MAP.copy()
|
||||
TAG_MAP.update(
|
||||
{univ.Boolean.tagSet: BooleanPayloadDecoder(),
|
||||
univ.BitString.tagSet: BitStringPayloadDecoder(),
|
||||
univ.OctetString.tagSet: OctetStringPayloadDecoder(),
|
||||
univ.Real.tagSet: RealPayloadDecoder()}
|
||||
tagMap = decoder.tagMap.copy()
|
||||
tagMap.update(
|
||||
{univ.Boolean.tagSet: BooleanDecoder(),
|
||||
univ.BitString.tagSet: BitStringDecoder(),
|
||||
univ.OctetString.tagSet: OctetStringDecoder(),
|
||||
univ.Real.tagSet: RealDecoder()}
|
||||
)
|
||||
|
||||
TYPE_MAP = decoder.TYPE_MAP.copy()
|
||||
typeMap = decoder.typeMap.copy()
|
||||
|
||||
# Put in non-ambiguous types for faster codec lookup
|
||||
for typeDecoder in TAG_MAP.values():
|
||||
for typeDecoder in tagMap.values():
|
||||
if typeDecoder.protoComponent is not None:
|
||||
typeId = typeDecoder.protoComponent.__class__.typeId
|
||||
if typeId is not None and typeId not in TYPE_MAP:
|
||||
TYPE_MAP[typeId] = typeDecoder
|
||||
|
||||
|
||||
class SingleItemDecoder(decoder.SingleItemDecoder):
|
||||
__doc__ = decoder.SingleItemDecoder.__doc__
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
|
||||
class StreamingDecoder(decoder.StreamingDecoder):
|
||||
__doc__ = decoder.StreamingDecoder.__doc__
|
||||
|
||||
SINGLE_ITEM_DECODER = SingleItemDecoder
|
||||
if typeId is not None and typeId not in typeMap:
|
||||
typeMap[typeId] = typeDecoder
|
||||
|
||||
|
||||
class Decoder(decoder.Decoder):
|
||||
__doc__ = decoder.Decoder.__doc__
|
||||
|
||||
STREAMING_DECODER = StreamingDecoder
|
||||
pass
|
||||
|
||||
|
||||
#: Turns CER octet stream into an ASN.1 object.
|
||||
@@ -98,7 +69,7 @@ class Decoder(decoder.Decoder):
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: substrate: :py:class:`bytes`
|
||||
#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: CER octet-stream
|
||||
#:
|
||||
#: Keyword Args
|
||||
@@ -140,10 +111,4 @@ class Decoder(decoder.Decoder):
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
decode = Decoder()
|
||||
|
||||
def __getattr__(attr: str):
|
||||
if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr):
|
||||
warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning)
|
||||
return globals()[newAttr]
|
||||
raise AttributeError(attr)
|
||||
decode = Decoder(tagMap, decoder.typeMap)
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: https://pyasn1.readthedocs.io/en/latest/license.html
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import warnings
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.ber import encoder
|
||||
from pyasn1.compat.octets import str2octs, null
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
__all__ = ['Encoder', 'encode']
|
||||
__all__ = ['encode']
|
||||
|
||||
|
||||
class BooleanEncoder(encoder.IntegerEncoder):
|
||||
@@ -117,7 +116,7 @@ class SetOfEncoder(encoder.SequenceOfEncoder):
|
||||
|
||||
# sort by serialised and padded components
|
||||
if len(chunks) > 1:
|
||||
zero = b'\x00'
|
||||
zero = str2octs('\x00')
|
||||
maxLen = max(map(len, chunks))
|
||||
paddedChunks = [
|
||||
(x.ljust(maxLen, zero), x) for x in chunks
|
||||
@@ -126,19 +125,19 @@ class SetOfEncoder(encoder.SequenceOfEncoder):
|
||||
|
||||
chunks = [x[1] for x in paddedChunks]
|
||||
|
||||
return b''.join(chunks), True, True
|
||||
return null.join(chunks), True, True
|
||||
|
||||
|
||||
class SequenceOfEncoder(encoder.SequenceOfEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
if options.get('ifNotEmpty', False) and not len(value):
|
||||
return b'', True, True
|
||||
return null, True, True
|
||||
|
||||
chunks = self._encodeComponents(
|
||||
value, asn1Spec, encodeFun, **options)
|
||||
|
||||
return b''.join(chunks), True, True
|
||||
return null.join(chunks), True, True
|
||||
|
||||
|
||||
class SetEncoder(encoder.SequenceEncoder):
|
||||
@@ -163,7 +162,7 @@ class SetEncoder(encoder.SequenceEncoder):
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
substrate = b''
|
||||
substrate = null
|
||||
|
||||
comps = []
|
||||
compsMap = {}
|
||||
@@ -172,8 +171,7 @@ class SetEncoder(encoder.SequenceEncoder):
|
||||
# instance of ASN.1 schema
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise error.PyAsn1Error(
|
||||
f"ASN.1 object {value.__class__.__name__} is inconsistent")
|
||||
raise inconsistency
|
||||
|
||||
namedTypes = value.componentType
|
||||
|
||||
@@ -236,9 +234,8 @@ class SequenceEncoder(encoder.SequenceEncoder):
|
||||
omitEmptyOptionals = True
|
||||
|
||||
|
||||
TAG_MAP = encoder.TAG_MAP.copy()
|
||||
|
||||
TAG_MAP.update({
|
||||
tagMap = encoder.tagMap.copy()
|
||||
tagMap.update({
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
|
||||
@@ -248,9 +245,8 @@ TAG_MAP.update({
|
||||
univ.Sequence.typeId: SequenceEncoder()
|
||||
})
|
||||
|
||||
TYPE_MAP = encoder.TYPE_MAP.copy()
|
||||
|
||||
TYPE_MAP.update({
|
||||
typeMap = encoder.typeMap.copy()
|
||||
typeMap.update({
|
||||
univ.Boolean.typeId: BooleanEncoder(),
|
||||
univ.Real.typeId: RealEncoder(),
|
||||
useful.GeneralizedTime.typeId: GeneralizedTimeEncoder(),
|
||||
@@ -263,18 +259,10 @@ TYPE_MAP.update({
|
||||
})
|
||||
|
||||
|
||||
class SingleItemEncoder(encoder.SingleItemEncoder):
|
||||
class Encoder(encoder.Encoder):
|
||||
fixedDefLengthMode = False
|
||||
fixedChunkSize = 1000
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
|
||||
class Encoder(encoder.Encoder):
|
||||
SINGLE_ITEM_ENCODER = SingleItemEncoder
|
||||
|
||||
|
||||
#: Turns ASN.1 object into CER octet stream.
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
@@ -293,7 +281,7 @@ class Encoder(encoder.Encoder):
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`bytes`
|
||||
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: Given ASN.1 object encoded into BER octet-stream
|
||||
#:
|
||||
#: Raises
|
||||
@@ -320,12 +308,6 @@ class Encoder(encoder.Encoder):
|
||||
#: >>> encode(seq)
|
||||
#: b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00'
|
||||
#:
|
||||
encode = Encoder()
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
|
||||
# EncoderFactory queries class instance and builds a map of tags -> encoders
|
||||
|
||||
def __getattr__(attr: str):
|
||||
if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr):
|
||||
warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning)
|
||||
return globals()[newAttr]
|
||||
raise AttributeError(attr)
|
||||
|
||||
@@ -1,64 +1,44 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: https://pyasn1.readthedocs.io/en/latest/license.html
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import warnings
|
||||
|
||||
from pyasn1.codec.cer import decoder
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['decode', 'StreamingDecoder']
|
||||
__all__ = ['decode']
|
||||
|
||||
|
||||
class BitStringPayloadDecoder(decoder.BitStringPayloadDecoder):
|
||||
class BitStringDecoder(decoder.BitStringDecoder):
|
||||
supportConstructedForm = False
|
||||
|
||||
|
||||
class OctetStringPayloadDecoder(decoder.OctetStringPayloadDecoder):
|
||||
class OctetStringDecoder(decoder.OctetStringDecoder):
|
||||
supportConstructedForm = False
|
||||
|
||||
|
||||
# TODO: prohibit non-canonical encoding
|
||||
RealPayloadDecoder = decoder.RealPayloadDecoder
|
||||
RealDecoder = decoder.RealDecoder
|
||||
|
||||
TAG_MAP = decoder.TAG_MAP.copy()
|
||||
TAG_MAP.update(
|
||||
{univ.BitString.tagSet: BitStringPayloadDecoder(),
|
||||
univ.OctetString.tagSet: OctetStringPayloadDecoder(),
|
||||
univ.Real.tagSet: RealPayloadDecoder()}
|
||||
tagMap = decoder.tagMap.copy()
|
||||
tagMap.update(
|
||||
{univ.BitString.tagSet: BitStringDecoder(),
|
||||
univ.OctetString.tagSet: OctetStringDecoder(),
|
||||
univ.Real.tagSet: RealDecoder()}
|
||||
)
|
||||
|
||||
TYPE_MAP = decoder.TYPE_MAP.copy()
|
||||
typeMap = decoder.typeMap.copy()
|
||||
|
||||
# Put in non-ambiguous types for faster codec lookup
|
||||
for typeDecoder in TAG_MAP.values():
|
||||
for typeDecoder in tagMap.values():
|
||||
if typeDecoder.protoComponent is not None:
|
||||
typeId = typeDecoder.protoComponent.__class__.typeId
|
||||
if typeId is not None and typeId not in TYPE_MAP:
|
||||
TYPE_MAP[typeId] = typeDecoder
|
||||
|
||||
|
||||
class SingleItemDecoder(decoder.SingleItemDecoder):
|
||||
__doc__ = decoder.SingleItemDecoder.__doc__
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
supportIndefLength = False
|
||||
|
||||
|
||||
class StreamingDecoder(decoder.StreamingDecoder):
|
||||
__doc__ = decoder.StreamingDecoder.__doc__
|
||||
|
||||
SINGLE_ITEM_DECODER = SingleItemDecoder
|
||||
if typeId is not None and typeId not in typeMap:
|
||||
typeMap[typeId] = typeDecoder
|
||||
|
||||
|
||||
class Decoder(decoder.Decoder):
|
||||
__doc__ = decoder.Decoder.__doc__
|
||||
|
||||
STREAMING_DECODER = StreamingDecoder
|
||||
supportIndefLength = False
|
||||
|
||||
|
||||
#: Turns DER octet stream into an ASN.1 object.
|
||||
@@ -69,7 +49,7 @@ class Decoder(decoder.Decoder):
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: substrate: :py:class:`bytes`
|
||||
#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: DER octet-stream
|
||||
#:
|
||||
#: Keyword Args
|
||||
@@ -111,10 +91,4 @@ class Decoder(decoder.Decoder):
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
decode = Decoder()
|
||||
|
||||
def __getattr__(attr: str):
|
||||
if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr):
|
||||
warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning)
|
||||
return globals()[newAttr]
|
||||
raise AttributeError(attr)
|
||||
decode = Decoder(tagMap, typeMap)
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: https://pyasn1.readthedocs.io/en/latest/license.html
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import warnings
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.cer import encoder
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['Encoder', 'encode']
|
||||
__all__ = ['encode']
|
||||
|
||||
|
||||
class SetEncoder(encoder.SetEncoder):
|
||||
@@ -44,34 +42,23 @@ class SetEncoder(encoder.SetEncoder):
|
||||
else:
|
||||
return compType.tagSet
|
||||
|
||||
|
||||
TAG_MAP = encoder.TAG_MAP.copy()
|
||||
|
||||
TAG_MAP.update({
|
||||
tagMap = encoder.tagMap.copy()
|
||||
tagMap.update({
|
||||
# Set & SetOf have same tags
|
||||
univ.Set.tagSet: SetEncoder()
|
||||
})
|
||||
|
||||
TYPE_MAP = encoder.TYPE_MAP.copy()
|
||||
|
||||
TYPE_MAP.update({
|
||||
typeMap = encoder.typeMap.copy()
|
||||
typeMap.update({
|
||||
# Set & SetOf have same tags
|
||||
univ.Set.typeId: SetEncoder()
|
||||
})
|
||||
|
||||
|
||||
class SingleItemEncoder(encoder.SingleItemEncoder):
|
||||
class Encoder(encoder.Encoder):
|
||||
fixedDefLengthMode = True
|
||||
fixedChunkSize = 0
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
|
||||
class Encoder(encoder.Encoder):
|
||||
SINGLE_ITEM_ENCODER = SingleItemEncoder
|
||||
|
||||
|
||||
#: Turns ASN.1 object into DER octet stream.
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
@@ -90,7 +77,7 @@ class Encoder(encoder.Encoder):
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`bytes`
|
||||
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: Given ASN.1 object encoded into BER octet-stream
|
||||
#:
|
||||
#: Raises
|
||||
@@ -117,10 +104,4 @@ class Encoder(encoder.Encoder):
|
||||
#: >>> encode(seq)
|
||||
#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
|
||||
#:
|
||||
encode = Encoder()
|
||||
|
||||
def __getattr__(attr: str):
|
||||
if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr):
|
||||
warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning)
|
||||
return globals()[newAttr]
|
||||
raise AttributeError(attr)
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: https://pyasn1.readthedocs.io/en/latest/license.html
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import warnings
|
||||
|
||||
from pyasn1 import debug
|
||||
from pyasn1 import error
|
||||
from pyasn1.compat import _MISSING
|
||||
from pyasn1.type import base
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
@@ -20,17 +17,17 @@ __all__ = ['decode']
|
||||
LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER)
|
||||
|
||||
|
||||
class AbstractScalarPayloadDecoder(object):
|
||||
class AbstractScalarDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
return asn1Spec.clone(pyObject)
|
||||
|
||||
|
||||
class BitStringPayloadDecoder(AbstractScalarPayloadDecoder):
|
||||
class BitStringDecoder(AbstractScalarDecoder):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
return asn1Spec.clone(univ.BitString.fromBinaryString(pyObject))
|
||||
|
||||
|
||||
class SequenceOrSetPayloadDecoder(object):
|
||||
class SequenceOrSetDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
asn1Value = asn1Spec.clone()
|
||||
|
||||
@@ -43,7 +40,7 @@ class SequenceOrSetPayloadDecoder(object):
|
||||
return asn1Value
|
||||
|
||||
|
||||
class SequenceOfOrSetOfPayloadDecoder(object):
|
||||
class SequenceOfOrSetOfDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
asn1Value = asn1Spec.clone()
|
||||
|
||||
@@ -53,7 +50,7 @@ class SequenceOfOrSetOfPayloadDecoder(object):
|
||||
return asn1Value
|
||||
|
||||
|
||||
class ChoicePayloadDecoder(object):
|
||||
class ChoiceDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
asn1Value = asn1Spec.clone()
|
||||
|
||||
@@ -67,134 +64,112 @@ class ChoicePayloadDecoder(object):
|
||||
return asn1Value
|
||||
|
||||
|
||||
TAG_MAP = {
|
||||
univ.Integer.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.Boolean.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.BitString.tagSet: BitStringPayloadDecoder(),
|
||||
univ.OctetString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.Null.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.ObjectIdentifier.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.RelativeOID.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.Enumerated.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.Real.tagSet: AbstractScalarPayloadDecoder(),
|
||||
univ.Sequence.tagSet: SequenceOrSetPayloadDecoder(), # conflicts with SequenceOf
|
||||
univ.Set.tagSet: SequenceOrSetPayloadDecoder(), # conflicts with SetOf
|
||||
univ.Choice.tagSet: ChoicePayloadDecoder(), # conflicts with Any
|
||||
tagMap = {
|
||||
univ.Integer.tagSet: AbstractScalarDecoder(),
|
||||
univ.Boolean.tagSet: AbstractScalarDecoder(),
|
||||
univ.BitString.tagSet: BitStringDecoder(),
|
||||
univ.OctetString.tagSet: AbstractScalarDecoder(),
|
||||
univ.Null.tagSet: AbstractScalarDecoder(),
|
||||
univ.ObjectIdentifier.tagSet: AbstractScalarDecoder(),
|
||||
univ.Enumerated.tagSet: AbstractScalarDecoder(),
|
||||
univ.Real.tagSet: AbstractScalarDecoder(),
|
||||
univ.Sequence.tagSet: SequenceOrSetDecoder(), # conflicts with SequenceOf
|
||||
univ.Set.tagSet: SequenceOrSetDecoder(), # conflicts with SetOf
|
||||
univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any
|
||||
# character string types
|
||||
char.UTF8String.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.NumericString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.PrintableString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.TeletexString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.VideotexString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.IA5String.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.GraphicString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.VisibleString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.GeneralString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.UniversalString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.BMPString.tagSet: AbstractScalarPayloadDecoder(),
|
||||
char.UTF8String.tagSet: AbstractScalarDecoder(),
|
||||
char.NumericString.tagSet: AbstractScalarDecoder(),
|
||||
char.PrintableString.tagSet: AbstractScalarDecoder(),
|
||||
char.TeletexString.tagSet: AbstractScalarDecoder(),
|
||||
char.VideotexString.tagSet: AbstractScalarDecoder(),
|
||||
char.IA5String.tagSet: AbstractScalarDecoder(),
|
||||
char.GraphicString.tagSet: AbstractScalarDecoder(),
|
||||
char.VisibleString.tagSet: AbstractScalarDecoder(),
|
||||
char.GeneralString.tagSet: AbstractScalarDecoder(),
|
||||
char.UniversalString.tagSet: AbstractScalarDecoder(),
|
||||
char.BMPString.tagSet: AbstractScalarDecoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.tagSet: AbstractScalarPayloadDecoder(),
|
||||
useful.GeneralizedTime.tagSet: AbstractScalarPayloadDecoder(),
|
||||
useful.UTCTime.tagSet: AbstractScalarPayloadDecoder()
|
||||
useful.ObjectDescriptor.tagSet: AbstractScalarDecoder(),
|
||||
useful.GeneralizedTime.tagSet: AbstractScalarDecoder(),
|
||||
useful.UTCTime.tagSet: AbstractScalarDecoder()
|
||||
}
|
||||
|
||||
# Put in ambiguous & non-ambiguous types for faster codec lookup
|
||||
TYPE_MAP = {
|
||||
univ.Integer.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.Boolean.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.BitString.typeId: BitStringPayloadDecoder(),
|
||||
univ.OctetString.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.Null.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.ObjectIdentifier.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.RelativeOID.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.Enumerated.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.Real.typeId: AbstractScalarPayloadDecoder(),
|
||||
typeMap = {
|
||||
univ.Integer.typeId: AbstractScalarDecoder(),
|
||||
univ.Boolean.typeId: AbstractScalarDecoder(),
|
||||
univ.BitString.typeId: BitStringDecoder(),
|
||||
univ.OctetString.typeId: AbstractScalarDecoder(),
|
||||
univ.Null.typeId: AbstractScalarDecoder(),
|
||||
univ.ObjectIdentifier.typeId: AbstractScalarDecoder(),
|
||||
univ.Enumerated.typeId: AbstractScalarDecoder(),
|
||||
univ.Real.typeId: AbstractScalarDecoder(),
|
||||
# ambiguous base types
|
||||
univ.Set.typeId: SequenceOrSetPayloadDecoder(),
|
||||
univ.SetOf.typeId: SequenceOfOrSetOfPayloadDecoder(),
|
||||
univ.Sequence.typeId: SequenceOrSetPayloadDecoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfOrSetOfPayloadDecoder(),
|
||||
univ.Choice.typeId: ChoicePayloadDecoder(),
|
||||
univ.Any.typeId: AbstractScalarPayloadDecoder(),
|
||||
univ.Set.typeId: SequenceOrSetDecoder(),
|
||||
univ.SetOf.typeId: SequenceOfOrSetOfDecoder(),
|
||||
univ.Sequence.typeId: SequenceOrSetDecoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfOrSetOfDecoder(),
|
||||
univ.Choice.typeId: ChoiceDecoder(),
|
||||
univ.Any.typeId: AbstractScalarDecoder(),
|
||||
# character string types
|
||||
char.UTF8String.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.NumericString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.PrintableString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.TeletexString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.VideotexString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.IA5String.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.GraphicString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.VisibleString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.GeneralString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.UniversalString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.BMPString.typeId: AbstractScalarPayloadDecoder(),
|
||||
char.UTF8String.typeId: AbstractScalarDecoder(),
|
||||
char.NumericString.typeId: AbstractScalarDecoder(),
|
||||
char.PrintableString.typeId: AbstractScalarDecoder(),
|
||||
char.TeletexString.typeId: AbstractScalarDecoder(),
|
||||
char.VideotexString.typeId: AbstractScalarDecoder(),
|
||||
char.IA5String.typeId: AbstractScalarDecoder(),
|
||||
char.GraphicString.typeId: AbstractScalarDecoder(),
|
||||
char.VisibleString.typeId: AbstractScalarDecoder(),
|
||||
char.GeneralString.typeId: AbstractScalarDecoder(),
|
||||
char.UniversalString.typeId: AbstractScalarDecoder(),
|
||||
char.BMPString.typeId: AbstractScalarDecoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.typeId: AbstractScalarPayloadDecoder(),
|
||||
useful.GeneralizedTime.typeId: AbstractScalarPayloadDecoder(),
|
||||
useful.UTCTime.typeId: AbstractScalarPayloadDecoder()
|
||||
useful.ObjectDescriptor.typeId: AbstractScalarDecoder(),
|
||||
useful.GeneralizedTime.typeId: AbstractScalarDecoder(),
|
||||
useful.UTCTime.typeId: AbstractScalarDecoder()
|
||||
}
|
||||
|
||||
|
||||
class SingleItemDecoder(object):
|
||||
class Decoder(object):
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
def __init__(self, tagMap=_MISSING, typeMap=_MISSING, **ignored):
|
||||
self._tagMap = tagMap if tagMap is not _MISSING else self.TAG_MAP
|
||||
self._typeMap = typeMap if typeMap is not _MISSING else self.TYPE_MAP
|
||||
# noinspection PyDefaultArgument
|
||||
def __init__(self, tagMap, typeMap):
|
||||
self.__tagMap = tagMap
|
||||
self.__typeMap = typeMap
|
||||
|
||||
def __call__(self, pyObject, asn1Spec, **options):
|
||||
|
||||
if LOG:
|
||||
debug.scope.push(type(pyObject).__name__)
|
||||
LOG('decoder called at scope %s, working with '
|
||||
'type %s' % (debug.scope, type(pyObject).__name__))
|
||||
LOG('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__))
|
||||
|
||||
if asn1Spec is None or not isinstance(asn1Spec, base.Asn1Item):
|
||||
raise error.PyAsn1Error(
|
||||
'asn1Spec is not valid (should be an instance of an ASN.1 '
|
||||
'Item, not %s)' % asn1Spec.__class__.__name__)
|
||||
raise error.PyAsn1Error('asn1Spec is not valid (should be an instance of an ASN.1 Item, not %s)' % asn1Spec.__class__.__name__)
|
||||
|
||||
try:
|
||||
valueDecoder = self._typeMap[asn1Spec.typeId]
|
||||
valueDecoder = self.__typeMap[asn1Spec.typeId]
|
||||
|
||||
except KeyError:
|
||||
# use base type for codec lookup to recover untagged types
|
||||
baseTagSet = tag.TagSet(asn1Spec.tagSet.baseTag, asn1Spec.tagSet.baseTag)
|
||||
|
||||
try:
|
||||
valueDecoder = self._tagMap[baseTagSet]
|
||||
|
||||
valueDecoder = self.__tagMap[baseTagSet]
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Unknown ASN.1 tag %s' % asn1Spec.tagSet)
|
||||
|
||||
if LOG:
|
||||
LOG('calling decoder %s on Python type %s '
|
||||
'<%s>' % (type(valueDecoder).__name__,
|
||||
type(pyObject).__name__, repr(pyObject)))
|
||||
LOG('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject)))
|
||||
|
||||
value = valueDecoder(pyObject, asn1Spec, self, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('decoder %s produced ASN.1 type %s '
|
||||
'<%s>' % (type(valueDecoder).__name__,
|
||||
type(value).__name__, repr(value)))
|
||||
LOG('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value)))
|
||||
debug.scope.pop()
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class Decoder(object):
|
||||
SINGLE_ITEM_DECODER = SingleItemDecoder
|
||||
|
||||
def __init__(self, **options):
|
||||
self._singleItemDecoder = self.SINGLE_ITEM_DECODER(**options)
|
||||
|
||||
def __call__(self, pyObject, asn1Spec=None, **kwargs):
|
||||
return self._singleItemDecoder(pyObject, asn1Spec=asn1Spec, **kwargs)
|
||||
|
||||
|
||||
#: Turns Python objects of built-in types into ASN.1 objects.
|
||||
#:
|
||||
#: Takes Python objects of built-in types and turns them into a tree of
|
||||
@@ -235,10 +210,4 @@ class Decoder(object):
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
decode = Decoder()
|
||||
|
||||
def __getattr__(attr: str):
|
||||
if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr):
|
||||
warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning)
|
||||
return globals()[newAttr]
|
||||
raise AttributeError(attr)
|
||||
decode = Decoder(tagMap, typeMap)
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
|
||||
# License: https://pyasn1.readthedocs.io/en/latest/license.html
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from collections import OrderedDict
|
||||
import warnings
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
|
||||
except ImportError:
|
||||
OrderedDict = dict
|
||||
|
||||
from pyasn1 import debug
|
||||
from pyasn1 import error
|
||||
from pyasn1.compat import _MISSING
|
||||
from pyasn1.type import base
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
@@ -61,11 +63,6 @@ class ObjectIdentifierEncoder(AbstractItemEncoder):
|
||||
return str(value)
|
||||
|
||||
|
||||
class RelativeOIDEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return str(value)
|
||||
|
||||
|
||||
class RealEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return float(value)
|
||||
@@ -77,8 +74,7 @@ class SetEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise error.PyAsn1Error(
|
||||
f"ASN.1 object {value.__class__.__name__} is inconsistent")
|
||||
raise inconsistency
|
||||
|
||||
namedTypes = value.componentType
|
||||
substrate = self.protoDict()
|
||||
@@ -98,8 +94,7 @@ class SequenceOfEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise error.PyAsn1Error(
|
||||
f"ASN.1 object {value.__class__.__name__} is inconsistent")
|
||||
raise inconsistency
|
||||
return [encodeFun(x, **options) for x in value]
|
||||
|
||||
|
||||
@@ -112,14 +107,13 @@ class AnyEncoder(AbstractItemEncoder):
|
||||
return value.asOctets()
|
||||
|
||||
|
||||
TAG_MAP = {
|
||||
tagMap = {
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.Integer.tagSet: IntegerEncoder(),
|
||||
univ.BitString.tagSet: BitStringEncoder(),
|
||||
univ.OctetString.tagSet: OctetStringEncoder(),
|
||||
univ.Null.tagSet: NullEncoder(),
|
||||
univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
|
||||
univ.RelativeOID.tagSet: RelativeOIDEncoder(),
|
||||
univ.Enumerated.tagSet: IntegerEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
@@ -144,15 +138,15 @@ TAG_MAP = {
|
||||
useful.UTCTime.tagSet: OctetStringEncoder()
|
||||
}
|
||||
|
||||
|
||||
# Put in ambiguous & non-ambiguous types for faster codec lookup
|
||||
TYPE_MAP = {
|
||||
typeMap = {
|
||||
univ.Boolean.typeId: BooleanEncoder(),
|
||||
univ.Integer.typeId: IntegerEncoder(),
|
||||
univ.BitString.typeId: BitStringEncoder(),
|
||||
univ.OctetString.typeId: OctetStringEncoder(),
|
||||
univ.Null.typeId: NullEncoder(),
|
||||
univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(),
|
||||
univ.RelativeOID.typeId: RelativeOIDEncoder(),
|
||||
univ.Enumerated.typeId: IntegerEncoder(),
|
||||
univ.Real.typeId: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
@@ -181,66 +175,48 @@ TYPE_MAP = {
|
||||
}
|
||||
|
||||
|
||||
class SingleItemEncoder(object):
|
||||
class Encoder(object):
|
||||
|
||||
TAG_MAP = TAG_MAP
|
||||
TYPE_MAP = TYPE_MAP
|
||||
|
||||
def __init__(self, tagMap=_MISSING, typeMap=_MISSING, **ignored):
|
||||
self._tagMap = tagMap if tagMap is not _MISSING else self.TAG_MAP
|
||||
self._typeMap = typeMap if typeMap is not _MISSING else self.TYPE_MAP
|
||||
# noinspection PyDefaultArgument
|
||||
def __init__(self, tagMap, typeMap={}):
|
||||
self.__tagMap = tagMap
|
||||
self.__typeMap = typeMap
|
||||
|
||||
def __call__(self, value, **options):
|
||||
if not isinstance(value, base.Asn1Item):
|
||||
raise error.PyAsn1Error(
|
||||
'value is not valid (should be an instance of an ASN.1 Item)')
|
||||
raise error.PyAsn1Error('value is not valid (should be an instance of an ASN.1 Item)')
|
||||
|
||||
if LOG:
|
||||
debug.scope.push(type(value).__name__)
|
||||
LOG('encoder called for type %s '
|
||||
'<%s>' % (type(value).__name__, value.prettyPrint()))
|
||||
LOG('encoder called for type %s <%s>' % (type(value).__name__, value.prettyPrint()))
|
||||
|
||||
tagSet = value.tagSet
|
||||
|
||||
try:
|
||||
concreteEncoder = self._typeMap[value.typeId]
|
||||
concreteEncoder = self.__typeMap[value.typeId]
|
||||
|
||||
except KeyError:
|
||||
# use base type for codec lookup to recover untagged types
|
||||
baseTagSet = tag.TagSet(
|
||||
value.tagSet.baseTag, value.tagSet.baseTag)
|
||||
baseTagSet = tag.TagSet(value.tagSet.baseTag, value.tagSet.baseTag)
|
||||
|
||||
try:
|
||||
concreteEncoder = self._tagMap[baseTagSet]
|
||||
concreteEncoder = self.__tagMap[baseTagSet]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('No encoder for %s' % (value,))
|
||||
|
||||
if LOG:
|
||||
LOG('using value codec %s chosen by '
|
||||
'%s' % (concreteEncoder.__class__.__name__, tagSet))
|
||||
LOG('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet))
|
||||
|
||||
pyObject = concreteEncoder.encode(value, self, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('encoder %s produced: '
|
||||
'%s' % (type(concreteEncoder).__name__, repr(pyObject)))
|
||||
LOG('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject)))
|
||||
debug.scope.pop()
|
||||
|
||||
return pyObject
|
||||
|
||||
|
||||
class Encoder(object):
|
||||
SINGLE_ITEM_ENCODER = SingleItemEncoder
|
||||
|
||||
def __init__(self, **options):
|
||||
self._singleItemEncoder = self.SINGLE_ITEM_ENCODER(**options)
|
||||
|
||||
def __call__(self, pyObject, asn1Spec=None, **options):
|
||||
return self._singleItemEncoder(
|
||||
pyObject, asn1Spec=asn1Spec, **options)
|
||||
|
||||
|
||||
#: Turns ASN.1 object into a Python built-in type object(s).
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
@@ -248,7 +224,8 @@ class Encoder(object):
|
||||
#: of those.
|
||||
#:
|
||||
#: One exception is that instead of :py:class:`dict`, the :py:class:`OrderedDict`
|
||||
#: is used to preserve ordering of the components in ASN.1 SEQUENCE.
|
||||
#: can be produced (whenever available) to preserve ordering of the components
|
||||
#: in ASN.1 SEQUENCE.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
@@ -276,10 +253,4 @@ class Encoder(object):
|
||||
#: >>> encode(seq)
|
||||
#: [1, 2, 3]
|
||||
#:
|
||||
encode = SingleItemEncoder()
|
||||
|
||||
def __getattr__(attr: str):
|
||||
if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr):
|
||||
warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning)
|
||||
return globals()[newAttr]
|
||||
raise AttributeError(attr)
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
|
||||
@@ -1,234 +0,0 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: https://pyasn1.readthedocs.io/en/latest/license.html
|
||||
#
|
||||
import io
|
||||
import os
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import univ
|
||||
|
||||
class CachingStreamWrapper(io.IOBase):
|
||||
"""Wrapper around non-seekable streams.
|
||||
|
||||
Note that the implementation is tied to the decoder,
|
||||
not checking for dangerous arguments for the sake
|
||||
of performance.
|
||||
|
||||
The read bytes are kept in an internal cache until
|
||||
setting _markedPosition which may reset the cache.
|
||||
"""
|
||||
def __init__(self, raw):
|
||||
self._raw = raw
|
||||
self._cache = io.BytesIO()
|
||||
self._markedPosition = 0
|
||||
|
||||
def peek(self, n):
|
||||
result = self.read(n)
|
||||
self._cache.seek(-len(result), os.SEEK_CUR)
|
||||
return result
|
||||
|
||||
def seekable(self):
|
||||
return True
|
||||
|
||||
def seek(self, n=-1, whence=os.SEEK_SET):
|
||||
# Note that this not safe for seeking forward.
|
||||
return self._cache.seek(n, whence)
|
||||
|
||||
def read(self, n=-1):
|
||||
read_from_cache = self._cache.read(n)
|
||||
if n != -1:
|
||||
n -= len(read_from_cache)
|
||||
if not n: # 0 bytes left to read
|
||||
return read_from_cache
|
||||
|
||||
read_from_raw = self._raw.read(n)
|
||||
|
||||
self._cache.write(read_from_raw)
|
||||
|
||||
return read_from_cache + read_from_raw
|
||||
|
||||
@property
|
||||
def markedPosition(self):
|
||||
"""Position where the currently processed element starts.
|
||||
|
||||
This is used for back-tracking in SingleItemDecoder.__call__
|
||||
and (indefLen)ValueDecoder and should not be used for other purposes.
|
||||
The client is not supposed to ever seek before this position.
|
||||
"""
|
||||
return self._markedPosition
|
||||
|
||||
@markedPosition.setter
|
||||
def markedPosition(self, value):
|
||||
# By setting the value, we ensure we won't seek back before it.
|
||||
# `value` should be the same as the current position
|
||||
# We don't check for this for performance reasons.
|
||||
self._markedPosition = value
|
||||
|
||||
# Whenever we set _marked_position, we know for sure
|
||||
# that we will not return back, and thus it is
|
||||
# safe to drop all cached data.
|
||||
if self._cache.tell() > io.DEFAULT_BUFFER_SIZE:
|
||||
self._cache = io.BytesIO(self._cache.read())
|
||||
self._markedPosition = 0
|
||||
|
||||
def tell(self):
|
||||
return self._cache.tell()
|
||||
|
||||
|
||||
def asSeekableStream(substrate):
|
||||
"""Convert object to seekable byte-stream.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
substrate: :py:class:`bytes` or :py:class:`io.IOBase` or :py:class:`univ.OctetString`
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`io.IOBase`
|
||||
|
||||
Raises
|
||||
------
|
||||
: :py:class:`~pyasn1.error.PyAsn1Error`
|
||||
If the supplied substrate cannot be converted to a seekable stream.
|
||||
"""
|
||||
if isinstance(substrate, io.BytesIO):
|
||||
return substrate
|
||||
|
||||
elif isinstance(substrate, bytes):
|
||||
return io.BytesIO(substrate)
|
||||
|
||||
elif isinstance(substrate, univ.OctetString):
|
||||
return io.BytesIO(substrate.asOctets())
|
||||
|
||||
try:
|
||||
if substrate.seekable(): # Will fail for most invalid types
|
||||
return substrate
|
||||
else:
|
||||
return CachingStreamWrapper(substrate)
|
||||
|
||||
except AttributeError:
|
||||
raise error.UnsupportedSubstrateError(
|
||||
"Cannot convert " + substrate.__class__.__name__ +
|
||||
" to a seekable bit stream.")
|
||||
|
||||
|
||||
def isEndOfStream(substrate):
|
||||
"""Check whether we have reached the end of a stream.
|
||||
|
||||
Although it is more effective to read and catch exceptions, this
|
||||
function
|
||||
|
||||
Parameters
|
||||
----------
|
||||
substrate: :py:class:`IOBase`
|
||||
Stream to check
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`bool`
|
||||
"""
|
||||
if isinstance(substrate, io.BytesIO):
|
||||
cp = substrate.tell()
|
||||
substrate.seek(0, os.SEEK_END)
|
||||
result = substrate.tell() == cp
|
||||
substrate.seek(cp, os.SEEK_SET)
|
||||
yield result
|
||||
|
||||
else:
|
||||
received = substrate.read(1)
|
||||
if received is None:
|
||||
yield
|
||||
|
||||
if received:
|
||||
substrate.seek(-1, os.SEEK_CUR)
|
||||
|
||||
yield not received
|
||||
|
||||
|
||||
def peekIntoStream(substrate, size=-1):
|
||||
"""Peek into stream.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
substrate: :py:class:`IOBase`
|
||||
Stream to read from.
|
||||
|
||||
size: :py:class:`int`
|
||||
How many bytes to peek (-1 = all available)
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`bytes` or :py:class:`str`
|
||||
The return type depends on Python major version
|
||||
"""
|
||||
if hasattr(substrate, "peek"):
|
||||
received = substrate.peek(size)
|
||||
if received is None:
|
||||
yield
|
||||
|
||||
while len(received) < size:
|
||||
yield
|
||||
|
||||
yield received
|
||||
|
||||
else:
|
||||
current_position = substrate.tell()
|
||||
try:
|
||||
for chunk in readFromStream(substrate, size):
|
||||
yield chunk
|
||||
|
||||
finally:
|
||||
substrate.seek(current_position)
|
||||
|
||||
|
||||
def readFromStream(substrate, size=-1, context=None):
|
||||
"""Read from the stream.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
substrate: :py:class:`IOBase`
|
||||
Stream to read from.
|
||||
|
||||
Keyword parameters
|
||||
------------------
|
||||
size: :py:class:`int`
|
||||
How many bytes to read (-1 = all available)
|
||||
|
||||
context: :py:class:`dict`
|
||||
Opaque caller context will be attached to exception objects created
|
||||
by this function.
|
||||
|
||||
Yields
|
||||
------
|
||||
: :py:class:`bytes` or :py:class:`str` or :py:class:`SubstrateUnderrunError`
|
||||
Read data or :py:class:`~pyasn1.error.SubstrateUnderrunError`
|
||||
object if no `size` bytes is readily available in the stream. The
|
||||
data type depends on Python major version
|
||||
|
||||
Raises
|
||||
------
|
||||
: :py:class:`~pyasn1.error.EndOfStreamError`
|
||||
Input stream is exhausted
|
||||
"""
|
||||
while True:
|
||||
# this will block unless stream is non-blocking
|
||||
received = substrate.read(size)
|
||||
if received is None: # non-blocking stream can do this
|
||||
yield error.SubstrateUnderrunError(context=context)
|
||||
|
||||
elif not received and size != 0: # end-of-stream
|
||||
raise error.EndOfStreamError(context=context)
|
||||
|
||||
elif len(received) < size:
|
||||
substrate.seek(-len(received), os.SEEK_CUR)
|
||||
|
||||
# behave like a non-blocking stream
|
||||
yield error.SubstrateUnderrunError(context=context)
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
yield received
|
||||
Reference in New Issue
Block a user