From 8fda0bc62ef55ef4aecfa3c7cd043357ad26b095 Mon Sep 17 00:00:00 2001 From: Quacky Date: Sat, 22 Jul 2017 22:54:59 -0500 Subject: [PATCH] Add in bot, personify for RBXLegacy l8r Oh also it's pre installed with all requirements :smiley: --- RBXLegacyDiscordBot/.gitignore | 8 + RBXLegacyDiscordBot/.travis.yml | 12 + RBXLegacyDiscordBot/cogs/alias.py | 191 + RBXLegacyDiscordBot/cogs/audio.py | 2251 ++ RBXLegacyDiscordBot/cogs/customcom.py | 200 + RBXLegacyDiscordBot/cogs/downloader.py | 693 + RBXLegacyDiscordBot/cogs/economy.py | 736 + RBXLegacyDiscordBot/cogs/general.py | 433 + RBXLegacyDiscordBot/cogs/image.py | 168 + RBXLegacyDiscordBot/cogs/mod.py | 1720 ++ RBXLegacyDiscordBot/cogs/owner.py | 1095 + RBXLegacyDiscordBot/cogs/streams.py | 689 + RBXLegacyDiscordBot/cogs/trivia.py | 332 + RBXLegacyDiscordBot/cogs/utils/__init__.py | 0 .../cogs/utils/chat_formatting.py | 80 + RBXLegacyDiscordBot/cogs/utils/checks.py | 88 + RBXLegacyDiscordBot/cogs/utils/dataIO.py | 79 + RBXLegacyDiscordBot/cogs/utils/settings.py | 291 + RBXLegacyDiscordBot/launcher.py | 573 + .../PyNaCl-1.0.1.dist-info/DESCRIPTION.rst | 91 + .../lib/PyNaCl-1.0.1.dist-info/INSTALLER | 1 + .../lib/PyNaCl-1.0.1.dist-info/METADATA | 114 + .../lib/PyNaCl-1.0.1.dist-info/RECORD | 40 + .../lib/PyNaCl-1.0.1.dist-info/WHEEL | 5 + .../lib/PyNaCl-1.0.1.dist-info/top_level.txt | 2 + .../lib/_cffi_backend.cp36-win32.pyd | Bin 0 -> 128512 bytes .../lib/aiohttp-1.0.5-py3.6.egg-info/PKG-INFO | 352 + .../aiohttp-1.0.5-py3.6.egg-info/SOURCES.txt | 162 + .../dependency_links.txt | 1 + .../installed-files.txt | 66 + .../aiohttp-1.0.5-py3.6.egg-info/requires.txt | 3 + .../top_level.txt | 1 + RBXLegacyDiscordBot/lib/aiohttp/__init__.py | 41 + RBXLegacyDiscordBot/lib/aiohttp/_websocket.c | 2414 ++ .../lib/aiohttp/_websocket.cp36-win32.pyd | Bin 0 -> 18432 bytes .../lib/aiohttp/_websocket.pyx | 48 + RBXLegacyDiscordBot/lib/aiohttp/_ws_impl.py | 438 + RBXLegacyDiscordBot/lib/aiohttp/abc.py | 88 + RBXLegacyDiscordBot/lib/aiohttp/client.py | 786 + .../lib/aiohttp/client_reqrep.py | 801 + RBXLegacyDiscordBot/lib/aiohttp/client_ws.py | 193 + RBXLegacyDiscordBot/lib/aiohttp/connector.py | 783 + RBXLegacyDiscordBot/lib/aiohttp/cookiejar.py | 290 + RBXLegacyDiscordBot/lib/aiohttp/errors.py | 186 + .../lib/aiohttp/file_sender.py | 168 + RBXLegacyDiscordBot/lib/aiohttp/hdrs.py | 91 + RBXLegacyDiscordBot/lib/aiohttp/helpers.py | 534 + RBXLegacyDiscordBot/lib/aiohttp/log.py | 8 + RBXLegacyDiscordBot/lib/aiohttp/multipart.py | 973 + RBXLegacyDiscordBot/lib/aiohttp/parsers.py | 495 + RBXLegacyDiscordBot/lib/aiohttp/protocol.py | 916 + .../lib/aiohttp/pytest_plugin.py | 113 + RBXLegacyDiscordBot/lib/aiohttp/resolver.py | 100 + RBXLegacyDiscordBot/lib/aiohttp/server.py | 376 + RBXLegacyDiscordBot/lib/aiohttp/signals.py | 71 + RBXLegacyDiscordBot/lib/aiohttp/streams.py | 672 + RBXLegacyDiscordBot/lib/aiohttp/test_utils.py | 485 + RBXLegacyDiscordBot/lib/aiohttp/web.py | 376 + .../lib/aiohttp/web_exceptions.py | 349 + RBXLegacyDiscordBot/lib/aiohttp/web_reqrep.py | 895 + .../lib/aiohttp/web_urldispatcher.py | 825 + RBXLegacyDiscordBot/lib/aiohttp/web_ws.py | 320 + RBXLegacyDiscordBot/lib/aiohttp/worker.py | 195 + RBXLegacyDiscordBot/lib/aiohttp/wsgi.py | 235 + .../DESCRIPTION.rst | 76 + .../async_timeout-1.2.1.dist-info/INSTALLER | 1 + .../async_timeout-1.2.1.dist-info/METADATA | 95 + .../lib/async_timeout-1.2.1.dist-info/RECORD | 9 + .../lib/async_timeout-1.2.1.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../lib/async_timeout/__init__.py | 62 + .../DESCRIPTION.rst | 49 + .../lib/certifi-2017.4.17.dist-info/INSTALLER | 1 + .../lib/certifi-2017.4.17.dist-info/METADATA | 68 + .../lib/certifi-2017.4.17.dist-info/RECORD | 16 + .../lib/certifi-2017.4.17.dist-info/WHEEL | 6 + .../certifi-2017.4.17.dist-info/top_level.txt | 1 + RBXLegacyDiscordBot/lib/certifi/__init__.py | 3 + RBXLegacyDiscordBot/lib/certifi/__main__.py | 2 + RBXLegacyDiscordBot/lib/certifi/cacert.pem | 5246 +++++ RBXLegacyDiscordBot/lib/certifi/core.py | 36 + RBXLegacyDiscordBot/lib/certifi/old_root.pem | 414 + RBXLegacyDiscordBot/lib/certifi/weak.pem | 5660 +++++ .../lib/cffi-1.10.0.dist-info/DESCRIPTION.rst | 13 + .../lib/cffi-1.10.0.dist-info/INSTALLER | 1 + .../lib/cffi-1.10.0.dist-info/METADATA | 35 + .../lib/cffi-1.10.0.dist-info/RECORD | 42 + .../lib/cffi-1.10.0.dist-info/WHEEL | 5 + .../cffi-1.10.0.dist-info/entry_points.txt | 3 + .../lib/cffi-1.10.0.dist-info/top_level.txt | 2 + RBXLegacyDiscordBot/lib/cffi/__init__.py | 13 + RBXLegacyDiscordBot/lib/cffi/_cffi_include.h | 253 + RBXLegacyDiscordBot/lib/cffi/_embedding.h | 517 + RBXLegacyDiscordBot/lib/cffi/api.py | 916 + .../lib/cffi/backend_ctypes.py | 1114 + RBXLegacyDiscordBot/lib/cffi/cffi_opcode.py | 179 + RBXLegacyDiscordBot/lib/cffi/commontypes.py | 80 + RBXLegacyDiscordBot/lib/cffi/cparser.py | 876 + RBXLegacyDiscordBot/lib/cffi/error.py | 20 + RBXLegacyDiscordBot/lib/cffi/ffiplatform.py | 115 + RBXLegacyDiscordBot/lib/cffi/lock.py | 30 + RBXLegacyDiscordBot/lib/cffi/model.py | 601 + RBXLegacyDiscordBot/lib/cffi/parse_c_type.h | 177 + RBXLegacyDiscordBot/lib/cffi/recompiler.py | 1524 ++ .../lib/cffi/setuptools_ext.py | 188 + RBXLegacyDiscordBot/lib/cffi/vengine_cpy.py | 1011 + RBXLegacyDiscordBot/lib/cffi/vengine_gen.py | 672 + RBXLegacyDiscordBot/lib/cffi/verifier.py | 317 + .../chardet-3.0.4.dist-info/DESCRIPTION.rst | 70 + .../lib/chardet-3.0.4.dist-info/INSTALLER | 1 + .../lib/chardet-3.0.4.dist-info/METADATA | 96 + .../lib/chardet-3.0.4.dist-info/RECORD | 91 + .../lib/chardet-3.0.4.dist-info/WHEEL | 6 + .../chardet-3.0.4.dist-info/entry_points.txt | 3 + .../lib/chardet-3.0.4.dist-info/top_level.txt | 1 + RBXLegacyDiscordBot/lib/chardet/__init__.py | 39 + RBXLegacyDiscordBot/lib/chardet/big5freq.py | 386 + RBXLegacyDiscordBot/lib/chardet/big5prober.py | 47 + .../lib/chardet/chardistribution.py | 233 + .../lib/chardet/charsetgroupprober.py | 106 + .../lib/chardet/charsetprober.py | 145 + .../lib/chardet/cli/__init__.py | 1 + .../lib/chardet/cli/chardetect.py | 85 + .../lib/chardet/codingstatemachine.py | 88 + RBXLegacyDiscordBot/lib/chardet/compat.py | 34 + .../lib/chardet/cp949prober.py | 49 + RBXLegacyDiscordBot/lib/chardet/enums.py | 76 + RBXLegacyDiscordBot/lib/chardet/escprober.py | 101 + RBXLegacyDiscordBot/lib/chardet/escsm.py | 246 + .../lib/chardet/eucjpprober.py | 92 + RBXLegacyDiscordBot/lib/chardet/euckrfreq.py | 195 + .../lib/chardet/euckrprober.py | 47 + RBXLegacyDiscordBot/lib/chardet/euctwfreq.py | 387 + .../lib/chardet/euctwprober.py | 46 + RBXLegacyDiscordBot/lib/chardet/gb2312freq.py | 283 + .../lib/chardet/gb2312prober.py | 46 + .../lib/chardet/hebrewprober.py | 292 + RBXLegacyDiscordBot/lib/chardet/jisfreq.py | 325 + RBXLegacyDiscordBot/lib/chardet/jpcntx.py | 233 + .../lib/chardet/langbulgarianmodel.py | 228 + .../lib/chardet/langcyrillicmodel.py | 333 + .../lib/chardet/langgreekmodel.py | 225 + .../lib/chardet/langhebrewmodel.py | 200 + .../lib/chardet/langhungarianmodel.py | 225 + .../lib/chardet/langthaimodel.py | 199 + .../lib/chardet/langturkishmodel.py | 193 + .../lib/chardet/latin1prober.py | 145 + .../lib/chardet/mbcharsetprober.py | 91 + .../lib/chardet/mbcsgroupprober.py | 54 + RBXLegacyDiscordBot/lib/chardet/mbcssm.py | 572 + .../lib/chardet/sbcharsetprober.py | 132 + .../lib/chardet/sbcsgroupprober.py | 73 + RBXLegacyDiscordBot/lib/chardet/sjisprober.py | 92 + .../lib/chardet/universaldetector.py | 286 + RBXLegacyDiscordBot/lib/chardet/utf8prober.py | 82 + RBXLegacyDiscordBot/lib/chardet/version.py | 9 + .../discord.py-0.16.8-py3.6.egg-info/PKG-INFO | 128 + .../SOURCES.txt | 49 + .../dependency_links.txt | 1 + .../installed-files.txt | 83 + .../requires.txt | 5 + .../top_level.txt | 1 + RBXLegacyDiscordBot/lib/discord/__init__.py | 55 + RBXLegacyDiscordBot/lib/discord/calls.py | 156 + RBXLegacyDiscordBot/lib/discord/channel.py | 446 + RBXLegacyDiscordBot/lib/discord/client.py | 3319 +++ RBXLegacyDiscordBot/lib/discord/colour.py | 198 + RBXLegacyDiscordBot/lib/discord/compat.py | 131 + RBXLegacyDiscordBot/lib/discord/embeds.py | 475 + RBXLegacyDiscordBot/lib/discord/emoji.py | 107 + RBXLegacyDiscordBot/lib/discord/enums.py | 107 + RBXLegacyDiscordBot/lib/discord/errors.py | 125 + .../lib/discord/ext/__init__.py | 12 + .../lib/discord/ext/commands/__init__.py | 19 + .../lib/discord/ext/commands/bot.py | 857 + .../lib/discord/ext/commands/context.py | 121 + .../lib/discord/ext/commands/converter.py | 202 + .../lib/discord/ext/commands/cooldowns.py | 126 + .../lib/discord/ext/commands/core.py | 943 + .../lib/discord/ext/commands/errors.py | 127 + .../lib/discord/ext/commands/formatter.py | 352 + .../lib/discord/ext/commands/view.py | 167 + RBXLegacyDiscordBot/lib/discord/game.py | 80 + RBXLegacyDiscordBot/lib/discord/gateway.py | 688 + RBXLegacyDiscordBot/lib/discord/http.py | 648 + RBXLegacyDiscordBot/lib/discord/invite.py | 109 + RBXLegacyDiscordBot/lib/discord/iterators.py | 178 + RBXLegacyDiscordBot/lib/discord/member.py | 229 + RBXLegacyDiscordBot/lib/discord/message.py | 330 + RBXLegacyDiscordBot/lib/discord/mixins.py | 42 + RBXLegacyDiscordBot/lib/discord/object.py | 54 + RBXLegacyDiscordBot/lib/discord/opus.py | 277 + .../lib/discord/permissions.py | 597 + RBXLegacyDiscordBot/lib/discord/reaction.py | 80 + RBXLegacyDiscordBot/lib/discord/role.py | 145 + RBXLegacyDiscordBot/lib/discord/server.py | 341 + RBXLegacyDiscordBot/lib/discord/state.py | 739 + RBXLegacyDiscordBot/lib/discord/user.py | 158 + RBXLegacyDiscordBot/lib/discord/utils.py | 253 + .../lib/discord/voice_client.py | 695 + .../lib/idna-2.5.dist-info/DESCRIPTION.rst | 178 + .../lib/idna-2.5.dist-info/INSTALLER | 1 + .../lib/idna-2.5.dist-info/METADATA | 204 + .../lib/idna-2.5.dist-info/RECORD | 22 + .../lib/idna-2.5.dist-info/WHEEL | 6 + .../lib/idna-2.5.dist-info/top_level.txt | 1 + RBXLegacyDiscordBot/lib/idna/__init__.py | 1 + RBXLegacyDiscordBot/lib/idna/codec.py | 118 + RBXLegacyDiscordBot/lib/idna/compat.py | 12 + RBXLegacyDiscordBot/lib/idna/core.py | 387 + RBXLegacyDiscordBot/lib/idna/idnadata.py | 1584 ++ RBXLegacyDiscordBot/lib/idna/intranges.py | 53 + RBXLegacyDiscordBot/lib/idna/uts46data.py | 7633 ++++++ .../imgurpython-1.1.7-py3.6.egg-info/PKG-INFO | 19 + .../SOURCES.txt | 27 + .../dependency_links.txt | 1 + .../installed-files.txt | 45 + .../requires.txt | 1 + .../top_level.txt | 1 + .../lib/imgurpython/__init__.py | 1 + RBXLegacyDiscordBot/lib/imgurpython/client.py | 681 + .../lib/imgurpython/helpers/__init__.py | 4 + .../lib/imgurpython/helpers/error.py | 15 + .../lib/imgurpython/helpers/format.py | 83 + .../lib/imgurpython/imgur/__init__.py | 0 .../lib/imgurpython/imgur/models/__init__.py | 0 .../lib/imgurpython/imgur/models/account.py | 9 + .../imgur/models/account_settings.py | 13 + .../lib/imgurpython/imgur/models/album.py | 9 + .../lib/imgurpython/imgur/models/comment.py | 9 + .../imgurpython/imgur/models/conversation.py | 27 + .../imgur/models/custom_gallery.py | 16 + .../imgurpython/imgur/models/gallery_album.py | 9 + .../imgurpython/imgur/models/gallery_image.py | 9 + .../lib/imgurpython/imgur/models/image.py | 9 + .../lib/imgurpython/imgur/models/message.py | 10 + .../imgurpython/imgur/models/notification.py | 7 + .../lib/imgurpython/imgur/models/tag.py | 13 + .../lib/imgurpython/imgur/models/tag_vote.py | 7 + .../multidict-3.1.3.dist-info/DESCRIPTION.rst | 214 + .../lib/multidict-3.1.3.dist-info/INSTALLER | 1 + .../lib/multidict-3.1.3.dist-info/METADATA | 231 + .../lib/multidict-3.1.3.dist-info/RECORD | 17 + .../lib/multidict-3.1.3.dist-info/WHEEL | 5 + .../multidict-3.1.3.dist-info/top_level.txt | 1 + RBXLegacyDiscordBot/lib/multidict/__init__.py | 34 + .../lib/multidict/__init__.pyi | 69 + RBXLegacyDiscordBot/lib/multidict/_istr.c | 238 + .../lib/multidict/_istr.cp36-win32.pyd | Bin 0 -> 10240 bytes .../lib/multidict/_multidict.c | 19252 ++++++++++++++++ .../lib/multidict/_multidict.cp36-win32.pyd | Bin 0 -> 117248 bytes .../lib/multidict/_multidict.pyx | 811 + .../lib/multidict/_multidict_py.py | 449 + RBXLegacyDiscordBot/lib/nacl/__init__.py | 33 + .../lib/nacl/_sodium.cp36-win32.pyd | Bin 0 -> 183296 bytes .../lib/nacl/bindings/__init__.py | 97 + .../lib/nacl/bindings/crypto_box.py | 180 + .../lib/nacl/bindings/crypto_hash.py | 62 + .../lib/nacl/bindings/crypto_scalarmult.py | 54 + .../lib/nacl/bindings/crypto_secretbox.py | 77 + .../lib/nacl/bindings/crypto_sign.py | 152 + .../lib/nacl/bindings/randombytes.py | 30 + .../lib/nacl/bindings/sodium_core.py | 26 + RBXLegacyDiscordBot/lib/nacl/encoding.py | 90 + RBXLegacyDiscordBot/lib/nacl/exceptions.py | 27 + RBXLegacyDiscordBot/lib/nacl/hash.py | 26 + RBXLegacyDiscordBot/lib/nacl/public.py | 204 + RBXLegacyDiscordBot/lib/nacl/secret.py | 121 + RBXLegacyDiscordBot/lib/nacl/signing.py | 195 + RBXLegacyDiscordBot/lib/nacl/utils.py | 60 + .../lib/pip-9.0.1.dist-info/DESCRIPTION.rst | 39 + .../lib/pip-9.0.1.dist-info/INSTALLER | 1 + .../lib/pip-9.0.1.dist-info/METADATA | 69 + .../lib/pip-9.0.1.dist-info/RECORD | 501 + .../lib/pip-9.0.1.dist-info/WHEEL | 6 + .../lib/pip-9.0.1.dist-info/entry_points.txt | 5 + .../lib/pip-9.0.1.dist-info/top_level.txt | 1 + RBXLegacyDiscordBot/lib/pip/__init__.py | 331 + RBXLegacyDiscordBot/lib/pip/__main__.py | 19 + .../lib/pip/_vendor/__init__.py | 107 + .../lib/pip/_vendor/appdirs.py | 552 + .../lib/pip/_vendor/cachecontrol/__init__.py | 11 + .../lib/pip/_vendor/cachecontrol/_cmd.py | 60 + .../lib/pip/_vendor/cachecontrol/adapter.py | 125 + .../lib/pip/_vendor/cachecontrol/cache.py | 39 + .../_vendor/cachecontrol/caches/__init__.py | 18 + .../_vendor/cachecontrol/caches/file_cache.py | 116 + .../cachecontrol/caches/redis_cache.py | 41 + .../lib/pip/_vendor/cachecontrol/compat.py | 20 + .../pip/_vendor/cachecontrol/controller.py | 353 + .../pip/_vendor/cachecontrol/filewrapper.py | 78 + .../pip/_vendor/cachecontrol/heuristics.py | 138 + .../lib/pip/_vendor/cachecontrol/serialize.py | 196 + .../lib/pip/_vendor/cachecontrol/wrapper.py | 21 + .../lib/pip/_vendor/colorama/__init__.py | 7 + .../lib/pip/_vendor/colorama/ansi.py | 102 + .../lib/pip/_vendor/colorama/ansitowin32.py | 236 + .../lib/pip/_vendor/colorama/initialise.py | 82 + .../lib/pip/_vendor/colorama/win32.py | 154 + .../lib/pip/_vendor/colorama/winterm.py | 162 + .../lib/pip/_vendor/distlib/__init__.py | 23 + .../pip/_vendor/distlib/_backport/__init__.py | 6 + .../lib/pip/_vendor/distlib/_backport/misc.py | 41 + .../pip/_vendor/distlib/_backport/shutil.py | 761 + .../_vendor/distlib/_backport/sysconfig.cfg | 84 + .../_vendor/distlib/_backport/sysconfig.py | 788 + .../pip/_vendor/distlib/_backport/tarfile.py | 2607 +++ .../lib/pip/_vendor/distlib/compat.py | 1111 + .../lib/pip/_vendor/distlib/database.py | 1312 ++ .../lib/pip/_vendor/distlib/index.py | 515 + .../lib/pip/_vendor/distlib/locators.py | 1283 + .../lib/pip/_vendor/distlib/manifest.py | 393 + .../lib/pip/_vendor/distlib/markers.py | 190 + .../lib/pip/_vendor/distlib/metadata.py | 1068 + .../lib/pip/_vendor/distlib/resources.py | 355 + .../lib/pip/_vendor/distlib/scripts.py | 384 + .../lib/pip/_vendor/distlib/util.py | 1611 ++ .../lib/pip/_vendor/distlib/version.py | 742 + .../lib/pip/_vendor/distlib/wheel.py | 978 + RBXLegacyDiscordBot/lib/pip/_vendor/distro.py | 1081 + .../lib/pip/_vendor/html5lib/__init__.py | 25 + .../lib/pip/_vendor/html5lib/_ihatexml.py | 288 + .../lib/pip/_vendor/html5lib/_inputstream.py | 923 + .../lib/pip/_vendor/html5lib/_tokenizer.py | 1721 ++ .../pip/_vendor/html5lib/_trie/__init__.py | 14 + .../lib/pip/_vendor/html5lib/_trie/_base.py | 38 + .../lib/pip/_vendor/html5lib/_trie/datrie.py | 44 + .../lib/pip/_vendor/html5lib/_trie/py.py | 67 + .../lib/pip/_vendor/html5lib/_utils.py | 127 + .../lib/pip/_vendor/html5lib/constants.py | 2945 +++ .../pip/_vendor/html5lib/filters/__init__.py | 0 .../filters/alphabeticalattributes.py | 20 + .../lib/pip/_vendor/html5lib/filters/base.py | 12 + .../html5lib/filters/inject_meta_charset.py | 65 + .../lib/pip/_vendor/html5lib/filters/lint.py | 81 + .../_vendor/html5lib/filters/optionaltags.py | 206 + .../pip/_vendor/html5lib/filters/sanitizer.py | 865 + .../_vendor/html5lib/filters/whitespace.py | 38 + .../lib/pip/_vendor/html5lib/html5parser.py | 2733 +++ .../lib/pip/_vendor/html5lib/serializer.py | 334 + .../_vendor/html5lib/treeadapters/__init__.py | 12 + .../_vendor/html5lib/treeadapters/genshi.py | 47 + .../pip/_vendor/html5lib/treeadapters/sax.py | 44 + .../_vendor/html5lib/treebuilders/__init__.py | 76 + .../pip/_vendor/html5lib/treebuilders/base.py | 383 + .../pip/_vendor/html5lib/treebuilders/dom.py | 236 + .../_vendor/html5lib/treebuilders/etree.py | 340 + .../html5lib/treebuilders/etree_lxml.py | 367 + .../_vendor/html5lib/treewalkers/__init__.py | 143 + .../pip/_vendor/html5lib/treewalkers/base.py | 150 + .../pip/_vendor/html5lib/treewalkers/dom.py | 43 + .../pip/_vendor/html5lib/treewalkers/etree.py | 137 + .../html5lib/treewalkers/etree_lxml.py | 213 + .../_vendor/html5lib/treewalkers/genshi.py | 69 + .../lib/pip/_vendor/ipaddress.py | 2425 ++ .../lib/pip/_vendor/lockfile/__init__.py | 347 + .../lib/pip/_vendor/lockfile/linklockfile.py | 73 + .../lib/pip/_vendor/lockfile/mkdirlockfile.py | 84 + .../lib/pip/_vendor/lockfile/pidlockfile.py | 190 + .../pip/_vendor/lockfile/sqlitelockfile.py | 156 + .../pip/_vendor/lockfile/symlinklockfile.py | 70 + .../lib/pip/_vendor/ordereddict.py | 127 + .../lib/pip/_vendor/packaging/__about__.py | 21 + .../lib/pip/_vendor/packaging/__init__.py | 14 + .../lib/pip/_vendor/packaging/_compat.py | 30 + .../lib/pip/_vendor/packaging/_structures.py | 68 + .../lib/pip/_vendor/packaging/markers.py | 303 + .../lib/pip/_vendor/packaging/requirements.py | 129 + .../lib/pip/_vendor/packaging/specifiers.py | 774 + .../lib/pip/_vendor/packaging/utils.py | 14 + .../lib/pip/_vendor/packaging/version.py | 393 + .../lib/pip/_vendor/pkg_resources/__init__.py | 3052 +++ .../lib/pip/_vendor/progress/__init__.py | 123 + .../lib/pip/_vendor/progress/bar.py | 83 + .../lib/pip/_vendor/progress/counter.py | 47 + .../lib/pip/_vendor/progress/helpers.py | 91 + .../lib/pip/_vendor/progress/spinner.py | 40 + .../lib/pip/_vendor/pyparsing.py | 5696 +++++ .../lib/pip/_vendor/re-vendor.py | 34 + .../lib/pip/_vendor/requests/__init__.py | 88 + .../lib/pip/_vendor/requests/adapters.py | 503 + .../lib/pip/_vendor/requests/api.py | 148 + .../lib/pip/_vendor/requests/auth.py | 252 + .../lib/pip/_vendor/requests/cacert.pem | 5616 +++++ .../lib/pip/_vendor/requests/certs.py | 25 + .../lib/pip/_vendor/requests/compat.py | 68 + .../lib/pip/_vendor/requests/cookies.py | 540 + .../lib/pip/_vendor/requests/exceptions.py | 114 + .../lib/pip/_vendor/requests/hooks.py | 34 + .../lib/pip/_vendor/requests/models.py | 873 + .../pip/_vendor/requests/packages/__init__.py | 36 + .../requests/packages/chardet/__init__.py | 32 + .../requests/packages/chardet/big5freq.py | 925 + .../requests/packages/chardet/big5prober.py | 42 + .../requests/packages/chardet/chardetect.py | 80 + .../packages/chardet/chardistribution.py | 231 + .../packages/chardet/charsetgroupprober.py | 106 + .../packages/chardet/charsetprober.py | 62 + .../packages/chardet/codingstatemachine.py | 61 + .../requests/packages/chardet/compat.py | 34 + .../requests/packages/chardet/constants.py | 39 + .../requests/packages/chardet/cp949prober.py | 44 + .../requests/packages/chardet/escprober.py | 86 + .../requests/packages/chardet/escsm.py | 242 + .../requests/packages/chardet/eucjpprober.py | 90 + .../requests/packages/chardet/euckrfreq.py | 596 + .../requests/packages/chardet/euckrprober.py | 42 + .../requests/packages/chardet/euctwfreq.py | 428 + .../requests/packages/chardet/euctwprober.py | 41 + .../requests/packages/chardet/gb2312freq.py | 472 + .../requests/packages/chardet/gb2312prober.py | 41 + .../requests/packages/chardet/hebrewprober.py | 283 + .../requests/packages/chardet/jisfreq.py | 569 + .../requests/packages/chardet/jpcntx.py | 227 + .../packages/chardet/langbulgarianmodel.py | 229 + .../packages/chardet/langcyrillicmodel.py | 329 + .../packages/chardet/langgreekmodel.py | 225 + .../packages/chardet/langhebrewmodel.py | 201 + .../packages/chardet/langhungarianmodel.py | 225 + .../packages/chardet/langthaimodel.py | 200 + .../requests/packages/chardet/latin1prober.py | 139 + .../packages/chardet/mbcharsetprober.py | 86 + .../packages/chardet/mbcsgroupprober.py | 54 + .../requests/packages/chardet/mbcssm.py | 572 + .../packages/chardet/sbcharsetprober.py | 120 + .../packages/chardet/sbcsgroupprober.py | 69 + .../requests/packages/chardet/sjisprober.py | 91 + .../packages/chardet/universaldetector.py | 170 + .../requests/packages/chardet/utf8prober.py | 76 + .../requests/packages/urllib3/__init__.py | 96 + .../requests/packages/urllib3/_collections.py | 324 + .../requests/packages/urllib3/connection.py | 330 + .../packages/urllib3/connectionpool.py | 866 + .../packages/urllib3/contrib/__init__.py | 0 .../packages/urllib3/contrib/appengine.py | 231 + .../packages/urllib3/contrib/ntlmpool.py | 115 + .../packages/urllib3/contrib/pyopenssl.py | 358 + .../packages/urllib3/contrib/socks.py | 172 + .../requests/packages/urllib3/exceptions.py | 209 + .../requests/packages/urllib3/fields.py | 178 + .../requests/packages/urllib3/filepost.py | 94 + .../packages/urllib3/packages/__init__.py | 5 + .../packages/urllib3/packages/ordered_dict.py | 259 + .../requests/packages/urllib3/packages/six.py | 868 + .../packages/ssl_match_hostname/__init__.py | 13 + .../ssl_match_hostname/_implementation.py | 105 + .../requests/packages/urllib3/poolmanager.py | 367 + .../requests/packages/urllib3/request.py | 151 + .../requests/packages/urllib3/response.py | 530 + .../packages/urllib3/util/__init__.py | 46 + .../packages/urllib3/util/connection.py | 144 + .../requests/packages/urllib3/util/request.py | 72 + .../packages/urllib3/util/response.py | 74 + .../requests/packages/urllib3/util/retry.py | 300 + .../requests/packages/urllib3/util/ssl_.py | 320 + .../requests/packages/urllib3/util/timeout.py | 242 + .../requests/packages/urllib3/util/url.py | 217 + .../lib/pip/_vendor/requests/sessions.py | 712 + .../lib/pip/_vendor/requests/status_codes.py | 91 + .../lib/pip/_vendor/requests/structures.py | 105 + .../lib/pip/_vendor/requests/utils.py | 817 + .../lib/pip/_vendor/retrying.py | 267 + RBXLegacyDiscordBot/lib/pip/_vendor/six.py | 868 + .../lib/pip/_vendor/webencodings/__init__.py | 342 + .../lib/pip/_vendor/webencodings/labels.py | 231 + .../lib/pip/_vendor/webencodings/mklabels.py | 59 + .../lib/pip/_vendor/webencodings/tests.py | 153 + .../_vendor/webencodings/x_user_defined.py | 325 + RBXLegacyDiscordBot/lib/pip/basecommand.py | 337 + RBXLegacyDiscordBot/lib/pip/baseparser.py | 293 + RBXLegacyDiscordBot/lib/pip/cmdoptions.py | 633 + .../lib/pip/commands/__init__.py | 86 + RBXLegacyDiscordBot/lib/pip/commands/check.py | 39 + .../lib/pip/commands/completion.py | 81 + .../lib/pip/commands/download.py | 212 + .../lib/pip/commands/freeze.py | 87 + RBXLegacyDiscordBot/lib/pip/commands/hash.py | 57 + RBXLegacyDiscordBot/lib/pip/commands/help.py | 35 + .../lib/pip/commands/install.py | 437 + RBXLegacyDiscordBot/lib/pip/commands/list.py | 337 + .../lib/pip/commands/search.py | 133 + RBXLegacyDiscordBot/lib/pip/commands/show.py | 154 + .../lib/pip/commands/uninstall.py | 76 + RBXLegacyDiscordBot/lib/pip/commands/wheel.py | 208 + .../lib/pip/compat/__init__.py | 164 + .../lib/pip/compat/dictconfig.py | 565 + RBXLegacyDiscordBot/lib/pip/download.py | 906 + RBXLegacyDiscordBot/lib/pip/exceptions.py | 244 + RBXLegacyDiscordBot/lib/pip/index.py | 1102 + RBXLegacyDiscordBot/lib/pip/locations.py | 182 + .../lib/pip/models/__init__.py | 4 + RBXLegacyDiscordBot/lib/pip/models/index.py | 16 + .../lib/pip/operations/__init__.py | 0 .../lib/pip/operations/check.py | 49 + .../lib/pip/operations/freeze.py | 132 + RBXLegacyDiscordBot/lib/pip/pep425tags.py | 324 + RBXLegacyDiscordBot/lib/pip/req/__init__.py | 10 + RBXLegacyDiscordBot/lib/pip/req/req_file.py | 342 + .../lib/pip/req/req_install.py | 1204 + RBXLegacyDiscordBot/lib/pip/req/req_set.py | 798 + .../lib/pip/req/req_uninstall.py | 195 + RBXLegacyDiscordBot/lib/pip/status_codes.py | 8 + RBXLegacyDiscordBot/lib/pip/utils/__init__.py | 852 + RBXLegacyDiscordBot/lib/pip/utils/appdirs.py | 248 + RBXLegacyDiscordBot/lib/pip/utils/build.py | 42 + .../lib/pip/utils/deprecation.py | 76 + RBXLegacyDiscordBot/lib/pip/utils/encoding.py | 31 + .../lib/pip/utils/filesystem.py | 28 + RBXLegacyDiscordBot/lib/pip/utils/glibc.py | 81 + RBXLegacyDiscordBot/lib/pip/utils/hashes.py | 92 + RBXLegacyDiscordBot/lib/pip/utils/logging.py | 130 + RBXLegacyDiscordBot/lib/pip/utils/outdated.py | 162 + .../lib/pip/utils/packaging.py | 63 + .../lib/pip/utils/setuptools_build.py | 8 + RBXLegacyDiscordBot/lib/pip/utils/ui.py | 344 + RBXLegacyDiscordBot/lib/pip/vcs/__init__.py | 366 + RBXLegacyDiscordBot/lib/pip/vcs/bazaar.py | 116 + RBXLegacyDiscordBot/lib/pip/vcs/git.py | 300 + RBXLegacyDiscordBot/lib/pip/vcs/mercurial.py | 103 + RBXLegacyDiscordBot/lib/pip/vcs/subversion.py | 269 + RBXLegacyDiscordBot/lib/pip/wheel.py | 853 + .../pycparser-2.18-py3.6.egg-info/PKG-INFO | 17 + .../pycparser-2.18-py3.6.egg-info/SOURCES.txt | 153 + .../dependency_links.txt | 1 + .../installed-files.txt | 39 + .../top_level.txt | 1 + RBXLegacyDiscordBot/lib/pycparser/__init__.py | 93 + RBXLegacyDiscordBot/lib/pycparser/_ast_gen.py | 278 + .../lib/pycparser/_build_tables.py | 33 + RBXLegacyDiscordBot/lib/pycparser/_c_ast.cfg | 191 + .../lib/pycparser/ast_transforms.py | 105 + RBXLegacyDiscordBot/lib/pycparser/c_ast.py | 809 + .../lib/pycparser/c_generator.py | 411 + RBXLegacyDiscordBot/lib/pycparser/c_lexer.py | 485 + RBXLegacyDiscordBot/lib/pycparser/c_parser.py | 1782 ++ RBXLegacyDiscordBot/lib/pycparser/lextab.py | 10 + .../lib/pycparser/ply/__init__.py | 5 + RBXLegacyDiscordBot/lib/pycparser/ply/cpp.py | 907 + .../lib/pycparser/ply/ctokens.py | 133 + RBXLegacyDiscordBot/lib/pycparser/ply/lex.py | 1099 + RBXLegacyDiscordBot/lib/pycparser/ply/yacc.py | 3494 +++ RBXLegacyDiscordBot/lib/pycparser/ply/ygen.py | 74 + .../lib/pycparser/plyparser.py | 116 + RBXLegacyDiscordBot/lib/pycparser/yacctab.py | 332 + .../requests-2.18.1.dist-info/DESCRIPTION.rst | 1610 ++ .../lib/requests-2.18.1.dist-info/INSTALLER | 1 + .../lib/requests-2.18.1.dist-info/METADATA | 1646 ++ .../lib/requests-2.18.1.dist-info/RECORD | 43 + .../lib/requests-2.18.1.dist-info/WHEEL | 6 + .../requests-2.18.1.dist-info/top_level.txt | 1 + RBXLegacyDiscordBot/lib/requests/__init__.py | 121 + .../lib/requests/__version__.py | 14 + .../lib/requests/_internal_utils.py | 42 + RBXLegacyDiscordBot/lib/requests/adapters.py | 520 + RBXLegacyDiscordBot/lib/requests/api.py | 152 + RBXLegacyDiscordBot/lib/requests/auth.py | 293 + RBXLegacyDiscordBot/lib/requests/certs.py | 18 + RBXLegacyDiscordBot/lib/requests/compat.py | 71 + RBXLegacyDiscordBot/lib/requests/cookies.py | 542 + .../lib/requests/exceptions.py | 122 + RBXLegacyDiscordBot/lib/requests/help.py | 111 + RBXLegacyDiscordBot/lib/requests/hooks.py | 34 + RBXLegacyDiscordBot/lib/requests/models.py | 950 + RBXLegacyDiscordBot/lib/requests/packages.py | 14 + RBXLegacyDiscordBot/lib/requests/sessions.py | 731 + .../lib/requests/status_codes.py | 91 + .../lib/requests/structures.py | 105 + RBXLegacyDiscordBot/lib/requests/utils.py | 904 + .../lib/six-1.10.0.dist-info/DESCRIPTION.rst | 18 + .../lib/six-1.10.0.dist-info/INSTALLER | 1 + .../lib/six-1.10.0.dist-info/METADATA | 34 + .../lib/six-1.10.0.dist-info/RECORD | 9 + .../lib/six-1.10.0.dist-info/WHEEL | 6 + .../lib/six-1.10.0.dist-info/top_level.txt | 1 + RBXLegacyDiscordBot/lib/six.py | 868 + .../urllib3-1.21.1.dist-info/DESCRIPTION.rst | 978 + .../lib/urllib3-1.21.1.dist-info/INSTALLER | 1 + .../lib/urllib3-1.21.1.dist-info/METADATA | 1006 + .../lib/urllib3-1.21.1.dist-info/RECORD | 79 + .../lib/urllib3-1.21.1.dist-info/WHEEL | 6 + .../urllib3-1.21.1.dist-info/top_level.txt | 1 + RBXLegacyDiscordBot/lib/urllib3/__init__.py | 97 + .../lib/urllib3/_collections.py | 314 + RBXLegacyDiscordBot/lib/urllib3/connection.py | 373 + .../lib/urllib3/connectionpool.py | 899 + .../lib/urllib3/contrib/__init__.py | 0 .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 590 + .../contrib/_securetransport/low_level.py | 343 + .../lib/urllib3/contrib/appengine.py | 296 + .../lib/urllib3/contrib/ntlmpool.py | 112 + .../lib/urllib3/contrib/pyopenssl.py | 452 + .../lib/urllib3/contrib/securetransport.py | 807 + .../lib/urllib3/contrib/socks.py | 188 + RBXLegacyDiscordBot/lib/urllib3/exceptions.py | 246 + RBXLegacyDiscordBot/lib/urllib3/fields.py | 178 + RBXLegacyDiscordBot/lib/urllib3/filepost.py | 94 + .../lib/urllib3/packages/__init__.py | 5 + .../urllib3/packages/backports/__init__.py | 0 .../urllib3/packages/backports/makefile.py | 53 + .../lib/urllib3/packages/ordered_dict.py | 259 + .../lib/urllib3/packages/six.py | 868 + .../packages/ssl_match_hostname/__init__.py | 19 + .../ssl_match_hostname/_implementation.py | 157 + .../lib/urllib3/poolmanager.py | 440 + RBXLegacyDiscordBot/lib/urllib3/request.py | 148 + RBXLegacyDiscordBot/lib/urllib3/response.py | 622 + .../lib/urllib3/util/__init__.py | 54 + .../lib/urllib3/util/connection.py | 130 + .../lib/urllib3/util/request.py | 118 + .../lib/urllib3/util/response.py | 81 + RBXLegacyDiscordBot/lib/urllib3/util/retry.py | 401 + .../lib/urllib3/util/selectors.py | 581 + RBXLegacyDiscordBot/lib/urllib3/util/ssl_.py | 337 + .../lib/urllib3/util/timeout.py | 242 + RBXLegacyDiscordBot/lib/urllib3/util/url.py | 230 + RBXLegacyDiscordBot/lib/urllib3/util/wait.py | 40 + .../websockets-3.3.dist-info/DESCRIPTION.rst | 30 + .../lib/websockets-3.3.dist-info/INSTALLER | 1 + .../lib/websockets-3.3.dist-info/METADATA | 51 + .../lib/websockets-3.3.dist-info/RECORD | 47 + .../lib/websockets-3.3.dist-info/WHEEL | 8 + .../websockets-3.3.dist-info/top_level.txt | 2 + .../lib/websockets/__init__.py | 17 + RBXLegacyDiscordBot/lib/websockets/client.py | 181 + .../lib/websockets/compatibility.py | 8 + .../lib/websockets/exceptions.py | 63 + RBXLegacyDiscordBot/lib/websockets/framing.py | 204 + .../lib/websockets/handshake.py | 138 + RBXLegacyDiscordBot/lib/websockets/http.py | 108 + .../lib/websockets/protocol.py | 667 + .../lib/websockets/py35/__init__.py | 2 + .../lib/websockets/py35/client.py | 21 + .../lib/websockets/py35/client_server.py | 33 + RBXLegacyDiscordBot/lib/websockets/server.py | 369 + .../lib/websockets/test_client_server.py | 459 + .../lib/websockets/test_framing.py | 146 + .../lib/websockets/test_handshake.py | 117 + .../lib/websockets/test_http.py | 77 + .../lib/websockets/test_protocol.py | 788 + .../lib/websockets/test_uri.py | 33 + RBXLegacyDiscordBot/lib/websockets/uri.py | 55 + RBXLegacyDiscordBot/lib/websockets/version.py | 1 + .../DESCRIPTION.rst | 3 + .../youtube_dl-2017.7.23.dist-info/INSTALLER | 1 + .../youtube_dl-2017.7.23.dist-info/METADATA | 25 + .../lib/youtube_dl-2017.7.23.dist-info/RECORD | 1535 ++ .../lib/youtube_dl-2017.7.23.dist-info/WHEEL | 6 + .../entry_points.txt | 3 + .../top_level.txt | 1 + .../lib/youtube_dl/YoutubeDL.py | 2338 ++ .../lib/youtube_dl/__init__.py | 474 + .../lib/youtube_dl/__main__.py | 19 + RBXLegacyDiscordBot/lib/youtube_dl/aes.py | 361 + RBXLegacyDiscordBot/lib/youtube_dl/cache.py | 96 + RBXLegacyDiscordBot/lib/youtube_dl/compat.py | 2952 +++ .../lib/youtube_dl/downloader/__init__.py | 61 + .../lib/youtube_dl/downloader/common.py | 386 + .../lib/youtube_dl/downloader/dash.py | 63 + .../lib/youtube_dl/downloader/external.py | 347 + .../lib/youtube_dl/downloader/f4m.py | 429 + .../lib/youtube_dl/downloader/fragment.py | 241 + .../lib/youtube_dl/downloader/hls.py | 181 + .../lib/youtube_dl/downloader/http.py | 253 + .../lib/youtube_dl/downloader/ism.py | 257 + .../lib/youtube_dl/downloader/rtmp.py | 203 + .../lib/youtube_dl/downloader/rtsp.py | 47 + .../lib/youtube_dl/extractor/__init__.py | 46 + .../lib/youtube_dl/extractor/abc.py | 167 + .../lib/youtube_dl/extractor/abcnews.py | 146 + .../lib/youtube_dl/extractor/abcotvs.py | 112 + .../lib/youtube_dl/extractor/academicearth.py | 41 + .../lib/youtube_dl/extractor/acast.py | 98 + .../lib/youtube_dl/extractor/addanime.py | 95 + .../lib/youtube_dl/extractor/adn.py | 150 + .../lib/youtube_dl/extractor/adobepass.py | 1567 ++ .../lib/youtube_dl/extractor/adobetv.py | 197 + .../lib/youtube_dl/extractor/adultswim.py | 159 + .../lib/youtube_dl/extractor/aenetworks.py | 236 + .../lib/youtube_dl/extractor/afreecatv.py | 352 + .../lib/youtube_dl/extractor/airmozilla.py | 66 + .../lib/youtube_dl/extractor/aljazeera.py | 33 + .../lib/youtube_dl/extractor/allocine.py | 132 + .../lib/youtube_dl/extractor/alphaporno.py | 77 + .../lib/youtube_dl/extractor/amcnetworks.py | 113 + .../lib/youtube_dl/extractor/amp.py | 101 + .../lib/youtube_dl/extractor/animeondemand.py | 270 + .../lib/youtube_dl/extractor/anitube.py | 30 + .../lib/youtube_dl/extractor/anvato.py | 278 + .../lib/youtube_dl/extractor/anysex.py | 61 + .../lib/youtube_dl/extractor/aol.py | 110 + .../lib/youtube_dl/extractor/aparat.py | 58 + .../lib/youtube_dl/extractor/appleconnect.py | 50 + .../lib/youtube_dl/extractor/appletrailers.py | 283 + .../lib/youtube_dl/extractor/archiveorg.py | 65 + .../lib/youtube_dl/extractor/ard.py | 299 + .../lib/youtube_dl/extractor/arkena.py | 133 + .../lib/youtube_dl/extractor/arte.py | 460 + .../lib/youtube_dl/extractor/asiancrush.py | 93 + .../lib/youtube_dl/extractor/atresplayer.py | 202 + .../youtube_dl/extractor/atttechchannel.py | 55 + .../lib/youtube_dl/extractor/atvat.py | 73 + .../lib/youtube_dl/extractor/audimedia.py | 89 + .../lib/youtube_dl/extractor/audioboom.py | 69 + .../lib/youtube_dl/extractor/audiomack.py | 145 + .../lib/youtube_dl/extractor/awaan.py | 185 + .../lib/youtube_dl/extractor/azmedien.py | 213 + .../lib/youtube_dl/extractor/baidu.py | 56 + .../lib/youtube_dl/extractor/bambuser.py | 142 + .../lib/youtube_dl/extractor/bandcamp.py | 349 + .../lib/youtube_dl/extractor/bbc.py | 1231 + .../lib/youtube_dl/extractor/beampro.py | 188 + .../lib/youtube_dl/extractor/beatport.py | 103 + .../lib/youtube_dl/extractor/beeg.py | 130 + .../lib/youtube_dl/extractor/behindkink.py | 46 + .../lib/youtube_dl/extractor/bellmedia.py | 83 + .../lib/youtube_dl/extractor/bet.py | 80 + .../lib/youtube_dl/extractor/bigflix.py | 76 + .../lib/youtube_dl/extractor/bild.py | 40 + .../lib/youtube_dl/extractor/bilibili.py | 261 + .../lib/youtube_dl/extractor/biobiochiletv.py | 81 + .../lib/youtube_dl/extractor/biqle.py | 40 + .../youtube_dl/extractor/bleacherreport.py | 106 + .../lib/youtube_dl/extractor/blinkx.py | 86 + .../lib/youtube_dl/extractor/bloomberg.py | 83 + .../lib/youtube_dl/extractor/bokecc.py | 60 + .../lib/youtube_dl/extractor/bostonglobe.py | 72 + .../lib/youtube_dl/extractor/bpb.py | 57 + .../lib/youtube_dl/extractor/br.py | 171 + .../lib/youtube_dl/extractor/bravotv.py | 74 + .../lib/youtube_dl/extractor/breakcom.py | 143 + .../lib/youtube_dl/extractor/brightcove.py | 716 + .../lib/youtube_dl/extractor/buzzfeed.py | 98 + .../lib/youtube_dl/extractor/byutv.py | 93 + .../lib/youtube_dl/extractor/c56.py | 65 + .../lib/youtube_dl/extractor/camdemy.py | 161 + .../lib/youtube_dl/extractor/camwithher.py | 87 + .../lib/youtube_dl/extractor/canalc2.py | 65 + .../lib/youtube_dl/extractor/canalplus.py | 191 + .../lib/youtube_dl/extractor/canvas.py | 120 + .../lib/youtube_dl/extractor/carambatv.py | 102 + .../youtube_dl/extractor/cartoonnetwork.py | 42 + .../lib/youtube_dl/extractor/cbc.py | 355 + .../lib/youtube_dl/extractor/cbs.py | 100 + .../youtube_dl/extractor/cbsinteractive.py | 103 + .../lib/youtube_dl/extractor/cbslocal.py | 106 + .../lib/youtube_dl/extractor/cbsnews.py | 133 + .../lib/youtube_dl/extractor/cbssports.py | 31 + .../lib/youtube_dl/extractor/ccc.py | 77 + .../lib/youtube_dl/extractor/ccma.py | 99 + .../lib/youtube_dl/extractor/cctv.py | 191 + .../lib/youtube_dl/extractor/cda.py | 182 + .../lib/youtube_dl/extractor/ceskatelevize.py | 279 + .../lib/youtube_dl/extractor/channel9.py | 256 + .../lib/youtube_dl/extractor/charlierose.py | 51 + .../lib/youtube_dl/extractor/chaturbate.py | 76 + .../lib/youtube_dl/extractor/chilloutzone.py | 97 + .../lib/youtube_dl/extractor/chirbit.py | 92 + .../lib/youtube_dl/extractor/cinchcast.py | 50 + .../lib/youtube_dl/extractor/cjsw.py | 72 + .../lib/youtube_dl/extractor/clipfish.py | 67 + .../lib/youtube_dl/extractor/cliphunter.py | 88 + .../lib/youtube_dl/extractor/cliprs.py | 33 + .../lib/youtube_dl/extractor/clipsyndicate.py | 54 + .../lib/youtube_dl/extractor/closertotruth.py | 92 + .../lib/youtube_dl/extractor/cloudy.py | 56 + .../lib/youtube_dl/extractor/clubic.py | 56 + .../lib/youtube_dl/extractor/clyp.py | 57 + .../lib/youtube_dl/extractor/cmt.py | 54 + .../lib/youtube_dl/extractor/cnbc.py | 36 + .../lib/youtube_dl/extractor/cnn.py | 152 + .../lib/youtube_dl/extractor/collegerama.py | 93 + .../lib/youtube_dl/extractor/comcarcoff.py | 74 + .../lib/youtube_dl/extractor/comedycentral.py | 138 + .../lib/youtube_dl/extractor/common.py | 2566 ++ .../youtube_dl/extractor/commonmistakes.py | 50 + .../youtube_dl/extractor/commonprotocols.py | 60 + .../lib/youtube_dl/extractor/condenast.py | 222 + .../lib/youtube_dl/extractor/corus.py | 95 + .../lib/youtube_dl/extractor/coub.py | 140 + .../lib/youtube_dl/extractor/cracked.py | 91 + .../lib/youtube_dl/extractor/crackle.py | 141 + .../lib/youtube_dl/extractor/criterion.py | 39 + .../youtube_dl/extractor/crooksandliars.py | 60 + .../lib/youtube_dl/extractor/crunchyroll.py | 590 + .../lib/youtube_dl/extractor/cspan.py | 183 + .../lib/youtube_dl/extractor/ctsnews.py | 87 + .../lib/youtube_dl/extractor/ctvnews.py | 68 + .../youtube_dl/extractor/cultureunplugged.py | 70 + .../youtube_dl/extractor/curiositystream.py | 159 + .../lib/youtube_dl/extractor/cwtv.py | 117 + .../lib/youtube_dl/extractor/dailymail.py | 81 + .../lib/youtube_dl/extractor/dailymotion.py | 464 + .../lib/youtube_dl/extractor/daisuki.py | 159 + .../lib/youtube_dl/extractor/daum.py | 308 + .../lib/youtube_dl/extractor/dbtv.py | 56 + .../lib/youtube_dl/extractor/dctp.py | 54 + .../lib/youtube_dl/extractor/deezer.py | 91 + .../lib/youtube_dl/extractor/defense.py | 39 + .../lib/youtube_dl/extractor/democracynow.py | 96 + .../lib/youtube_dl/extractor/dfb.py | 57 + .../lib/youtube_dl/extractor/dhm.py | 59 + .../lib/youtube_dl/extractor/digiteka.py | 112 + .../lib/youtube_dl/extractor/discovery.py | 97 + .../lib/youtube_dl/extractor/discoverygo.py | 167 + .../youtube_dl/extractor/discoverynetworks.py | 52 + .../lib/youtube_dl/extractor/discoveryvr.py | 59 + .../lib/youtube_dl/extractor/disney.py | 162 + .../lib/youtube_dl/extractor/dispeak.py | 118 + .../lib/youtube_dl/extractor/dotsub.py | 83 + .../lib/youtube_dl/extractor/douyutv.py | 201 + .../lib/youtube_dl/extractor/dplay.py | 244 + .../lib/youtube_dl/extractor/dramafever.py | 209 + .../lib/youtube_dl/extractor/drbonanza.py | 59 + .../lib/youtube_dl/extractor/dreisat.py | 204 + .../lib/youtube_dl/extractor/dropbox.py | 40 + .../lib/youtube_dl/extractor/drtuber.py | 101 + .../lib/youtube_dl/extractor/drtv.py | 228 + .../lib/youtube_dl/extractor/dumpert.py | 69 + .../lib/youtube_dl/extractor/dvtv.py | 168 + .../lib/youtube_dl/extractor/dw.py | 108 + .../lib/youtube_dl/extractor/eagleplatform.py | 208 + .../lib/youtube_dl/extractor/ebaumsworld.py | 33 + .../lib/youtube_dl/extractor/echomsk.py | 46 + .../lib/youtube_dl/extractor/egghead.py | 84 + .../lib/youtube_dl/extractor/ehow.py | 38 + .../lib/youtube_dl/extractor/eighttracks.py | 164 + .../lib/youtube_dl/extractor/einthusan.py | 102 + .../lib/youtube_dl/extractor/eitb.py | 88 + .../lib/youtube_dl/extractor/ellentv.py | 101 + .../lib/youtube_dl/extractor/elpais.py | 95 + .../lib/youtube_dl/extractor/embedly.py | 16 + .../lib/youtube_dl/extractor/engadget.py | 27 + .../lib/youtube_dl/extractor/eporner.py | 116 + .../lib/youtube_dl/extractor/eroprofile.py | 95 + .../lib/youtube_dl/extractor/escapist.py | 106 + .../lib/youtube_dl/extractor/espn.py | 177 + .../lib/youtube_dl/extractor/esri.py | 74 + .../lib/youtube_dl/extractor/etonline.py | 39 + .../lib/youtube_dl/extractor/europa.py | 93 + .../youtube_dl/extractor/everyonesmixtape.py | 77 + .../lib/youtube_dl/extractor/expotv.py | 77 + .../lib/youtube_dl/extractor/extractors.py | 1332 ++ .../lib/youtube_dl/extractor/extremetube.py | 50 + .../lib/youtube_dl/extractor/eyedotv.py | 64 + .../lib/youtube_dl/extractor/facebook.py | 442 + .../lib/youtube_dl/extractor/faz.py | 73 + .../lib/youtube_dl/extractor/fc2.py | 160 + .../lib/youtube_dl/extractor/fczenit.py | 48 + .../lib/youtube_dl/extractor/filmon.py | 178 + .../lib/youtube_dl/extractor/firstpost.py | 50 + .../lib/youtube_dl/extractor/firsttv.py | 155 + .../lib/youtube_dl/extractor/fivemin.py | 54 + .../lib/youtube_dl/extractor/fivetv.py | 89 + .../lib/youtube_dl/extractor/fktv.py | 51 + .../lib/youtube_dl/extractor/flickr.py | 116 + .../lib/youtube_dl/extractor/flipagram.py | 115 + .../lib/youtube_dl/extractor/folketinget.py | 77 + .../lib/youtube_dl/extractor/footyroom.py | 56 + .../lib/youtube_dl/extractor/formula1.py | 33 + .../lib/youtube_dl/extractor/fourtube.py | 117 + .../lib/youtube_dl/extractor/fox.py | 58 + .../lib/youtube_dl/extractor/fox9.py | 43 + .../lib/youtube_dl/extractor/foxgay.py | 63 + .../lib/youtube_dl/extractor/foxnews.py | 140 + .../lib/youtube_dl/extractor/foxsports.py | 43 + .../lib/youtube_dl/extractor/franceculture.py | 63 + .../lib/youtube_dl/extractor/franceinter.py | 56 + .../lib/youtube_dl/extractor/francetv.py | 368 + .../lib/youtube_dl/extractor/freesound.py | 79 + .../lib/youtube_dl/extractor/freespeech.py | 37 + .../lib/youtube_dl/extractor/freshlive.py | 83 + .../lib/youtube_dl/extractor/funimation.py | 149 + .../lib/youtube_dl/extractor/funnyordie.py | 162 + .../lib/youtube_dl/extractor/fusion.py | 35 + .../lib/youtube_dl/extractor/fxnetworks.py | 70 + .../lib/youtube_dl/extractor/gameinformer.py | 28 + .../lib/youtube_dl/extractor/gameone.py | 134 + .../lib/youtube_dl/extractor/gamersyde.py | 70 + .../lib/youtube_dl/extractor/gamespot.py | 129 + .../lib/youtube_dl/extractor/gamestar.py | 55 + .../lib/youtube_dl/extractor/gaskrank.py | 101 + .../lib/youtube_dl/extractor/gazeta.py | 48 + .../lib/youtube_dl/extractor/gdcvault.py | 174 + .../lib/youtube_dl/extractor/generic.py | 3000 +++ .../lib/youtube_dl/extractor/gfycat.py | 110 + .../lib/youtube_dl/extractor/giantbomb.py | 87 + .../lib/youtube_dl/extractor/giga.py | 102 + .../lib/youtube_dl/extractor/glide.py | 43 + .../lib/youtube_dl/extractor/globo.py | 456 + .../lib/youtube_dl/extractor/go.py | 200 + .../lib/youtube_dl/extractor/go90.py | 126 + .../lib/youtube_dl/extractor/godtube.py | 58 + .../lib/youtube_dl/extractor/golem.py | 72 + .../lib/youtube_dl/extractor/googledrive.py | 106 + .../lib/youtube_dl/extractor/googleplus.py | 73 + .../lib/youtube_dl/extractor/googlesearch.py | 59 + .../lib/youtube_dl/extractor/goshgay.py | 51 + .../lib/youtube_dl/extractor/gputechconf.py | 35 + .../lib/youtube_dl/extractor/groupon.py | 67 + .../lib/youtube_dl/extractor/hark.py | 33 + .../lib/youtube_dl/extractor/hbo.py | 195 + .../lib/youtube_dl/extractor/hearthisat.py | 135 + .../lib/youtube_dl/extractor/heise.py | 90 + .../lib/youtube_dl/extractor/hellporno.py | 75 + .../lib/youtube_dl/extractor/helsinki.py | 43 + .../lib/youtube_dl/extractor/hentaistigma.py | 39 + .../lib/youtube_dl/extractor/hgtv.py | 40 + .../lib/youtube_dl/extractor/historicfilms.py | 47 + .../lib/youtube_dl/extractor/hitbox.py | 214 + .../lib/youtube_dl/extractor/hitrecord.py | 68 + .../lib/youtube_dl/extractor/hornbunny.py | 49 + .../lib/youtube_dl/extractor/hotnewhiphop.py | 67 + .../lib/youtube_dl/extractor/hotstar.py | 101 + .../lib/youtube_dl/extractor/howcast.py | 43 + .../lib/youtube_dl/extractor/howstuffworks.py | 115 + .../lib/youtube_dl/extractor/hrti.py | 202 + .../lib/youtube_dl/extractor/huajiao.py | 56 + .../lib/youtube_dl/extractor/huffpost.py | 96 + .../lib/youtube_dl/extractor/hypem.py | 61 + .../lib/youtube_dl/extractor/iconosquare.py | 85 + .../lib/youtube_dl/extractor/ign.py | 232 + .../lib/youtube_dl/extractor/imdb.py | 123 + .../lib/youtube_dl/extractor/imgur.py | 150 + .../lib/youtube_dl/extractor/ina.py | 36 + .../lib/youtube_dl/extractor/inc.py | 41 + .../lib/youtube_dl/extractor/indavideo.py | 143 + .../lib/youtube_dl/extractor/infoq.py | 135 + .../lib/youtube_dl/extractor/instagram.py | 302 + .../extractor/internetvideoarchive.py | 100 + .../lib/youtube_dl/extractor/iprima.py | 101 + .../lib/youtube_dl/extractor/iqiyi.py | 394 + .../lib/youtube_dl/extractor/ir90tv.py | 42 + .../lib/youtube_dl/extractor/itv.py | 223 + .../lib/youtube_dl/extractor/ivi.py | 216 + .../lib/youtube_dl/extractor/ivideon.py | 83 + .../lib/youtube_dl/extractor/iwara.py | 95 + .../lib/youtube_dl/extractor/izlesene.py | 121 + .../lib/youtube_dl/extractor/jamendo.py | 138 + .../lib/youtube_dl/extractor/jeuxvideo.py | 56 + .../lib/youtube_dl/extractor/joj.py | 100 + .../lib/youtube_dl/extractor/jove.py | 80 + .../lib/youtube_dl/extractor/jpopsukitv.py | 68 + .../lib/youtube_dl/extractor/jwplatform.py | 35 + .../lib/youtube_dl/extractor/kaltura.py | 348 + .../lib/youtube_dl/extractor/kamcord.py | 71 + .../lib/youtube_dl/extractor/kanalplay.py | 97 + .../lib/youtube_dl/extractor/kankan.py | 48 + .../lib/youtube_dl/extractor/karaoketv.py | 64 + .../youtube_dl/extractor/karrierevideos.py | 99 + .../lib/youtube_dl/extractor/keek.py | 39 + .../lib/youtube_dl/extractor/keezmovies.py | 128 + .../lib/youtube_dl/extractor/ketnet.py | 72 + .../lib/youtube_dl/extractor/khanacademy.py | 82 + .../lib/youtube_dl/extractor/kickstarter.py | 71 + .../youtube_dl/extractor/konserthusetplay.py | 123 + .../lib/youtube_dl/extractor/kontrtube.py | 73 + .../lib/youtube_dl/extractor/krasview.py | 60 + .../lib/youtube_dl/extractor/ku6.py | 32 + .../lib/youtube_dl/extractor/kusi.py | 88 + .../lib/youtube_dl/extractor/kuwo.py | 352 + .../lib/youtube_dl/extractor/la7.py | 65 + .../lib/youtube_dl/extractor/laola1tv.py | 217 + .../lib/youtube_dl/extractor/lci.py | 24 + .../lib/youtube_dl/extractor/lcp.py | 90 + .../lib/youtube_dl/extractor/learnr.py | 33 + .../lib/youtube_dl/extractor/lecture2go.py | 71 + .../lib/youtube_dl/extractor/leeco.py | 368 + .../lib/youtube_dl/extractor/lego.py | 128 + .../lib/youtube_dl/extractor/lemonde.py | 58 + .../youtube_dl/extractor/libraryofcongress.py | 143 + .../lib/youtube_dl/extractor/libsyn.py | 69 + .../lib/youtube_dl/extractor/lifenews.py | 239 + .../lib/youtube_dl/extractor/limelight.py | 367 + .../lib/youtube_dl/extractor/litv.py | 148 + .../lib/youtube_dl/extractor/liveleak.py | 133 + .../lib/youtube_dl/extractor/livestream.py | 369 + .../lib/youtube_dl/extractor/lnkgo.py | 113 + .../lib/youtube_dl/extractor/localnews8.py | 47 + .../lib/youtube_dl/extractor/lovehomeporn.py | 37 + .../lib/youtube_dl/extractor/lrt.py | 94 + .../lib/youtube_dl/extractor/lynda.py | 316 + .../lib/youtube_dl/extractor/m6.py | 25 + .../lib/youtube_dl/extractor/macgamestore.py | 42 + .../lib/youtube_dl/extractor/mailru.py | 138 + .../lib/youtube_dl/extractor/makerschannel.py | 40 + .../lib/youtube_dl/extractor/makertv.py | 32 + .../lib/youtube_dl/extractor/mangomolo.py | 54 + .../lib/youtube_dl/extractor/matchtv.py | 55 + .../lib/youtube_dl/extractor/mdr.py | 184 + .../lib/youtube_dl/extractor/medialaan.py | 277 + .../lib/youtube_dl/extractor/mediaset.py | 118 + .../lib/youtube_dl/extractor/medici.py | 70 + .../lib/youtube_dl/extractor/meipai.py | 104 + .../lib/youtube_dl/extractor/melonvod.py | 72 + .../lib/youtube_dl/extractor/meta.py | 73 + .../lib/youtube_dl/extractor/metacafe.py | 286 + .../lib/youtube_dl/extractor/metacritic.py | 65 + .../lib/youtube_dl/extractor/mgoon.py | 87 + .../lib/youtube_dl/extractor/mgtv.py | 68 + .../lib/youtube_dl/extractor/miaopai.py | 40 + .../extractor/microsoftvirtualacademy.py | 195 + .../lib/youtube_dl/extractor/minhateca.py | 70 + .../lib/youtube_dl/extractor/ministrygrid.py | 57 + .../lib/youtube_dl/extractor/minoto.py | 56 + .../lib/youtube_dl/extractor/miomio.py | 141 + .../lib/youtube_dl/extractor/mit.py | 156 + .../lib/youtube_dl/extractor/mitele.py | 207 + .../lib/youtube_dl/extractor/mixcloud.py | 306 + .../lib/youtube_dl/extractor/mlb.py | 173 + .../lib/youtube_dl/extractor/mnet.py | 81 + .../lib/youtube_dl/extractor/moevideo.py | 114 + .../lib/youtube_dl/extractor/mofosex.py | 56 + .../lib/youtube_dl/extractor/mojvideo.py | 58 + .../lib/youtube_dl/extractor/moniker.py | 116 + .../lib/youtube_dl/extractor/morningstar.py | 47 + .../lib/youtube_dl/extractor/motherless.py | 116 + .../lib/youtube_dl/extractor/motorsport.py | 49 + .../lib/youtube_dl/extractor/movieclips.py | 49 + .../lib/youtube_dl/extractor/moviezine.py | 45 + .../lib/youtube_dl/extractor/movingimage.py | 52 + .../lib/youtube_dl/extractor/mpora.py | 62 + .../lib/youtube_dl/extractor/msn.py | 115 + .../lib/youtube_dl/extractor/mtv.py | 484 + .../lib/youtube_dl/extractor/muenchentv.py | 75 + .../lib/youtube_dl/extractor/musicplayon.py | 66 + .../lib/youtube_dl/extractor/mwave.py | 90 + .../lib/youtube_dl/extractor/myspace.py | 212 + .../lib/youtube_dl/extractor/myspass.py | 73 + .../lib/youtube_dl/extractor/myvi.py | 60 + .../lib/youtube_dl/extractor/myvideo.py | 177 + .../lib/youtube_dl/extractor/myvidster.py | 29 + .../extractor/nationalgeographic.py | 183 + .../lib/youtube_dl/extractor/naver.py | 125 + .../lib/youtube_dl/extractor/nba.py | 154 + .../lib/youtube_dl/extractor/nbc.py | 391 + .../lib/youtube_dl/extractor/ndr.py | 387 + .../lib/youtube_dl/extractor/ndtv.py | 54 + .../lib/youtube_dl/extractor/nerdcubed.py | 36 + .../lib/youtube_dl/extractor/neteasemusic.py | 485 + .../lib/youtube_dl/extractor/netzkino.py | 89 + .../lib/youtube_dl/extractor/newgrounds.py | 166 + .../lib/youtube_dl/extractor/newstube.py | 117 + .../lib/youtube_dl/extractor/nextmedia.py | 238 + .../lib/youtube_dl/extractor/nexx.py | 271 + .../lib/youtube_dl/extractor/nfb.py | 112 + .../lib/youtube_dl/extractor/nfl.py | 231 + .../lib/youtube_dl/extractor/nhk.py | 51 + .../lib/youtube_dl/extractor/nhl.py | 351 + .../lib/youtube_dl/extractor/nick.py | 126 + .../lib/youtube_dl/extractor/niconico.py | 275 + .../youtube_dl/extractor/ninecninemedia.py | 122 + .../lib/youtube_dl/extractor/ninegag.py | 104 + .../lib/youtube_dl/extractor/ninenow.py | 85 + .../lib/youtube_dl/extractor/nintendo.py | 46 + .../lib/youtube_dl/extractor/njpwworld.py | 91 + .../lib/youtube_dl/extractor/nobelprize.py | 62 + .../lib/youtube_dl/extractor/noco.py | 235 + .../lib/youtube_dl/extractor/nonktube.py | 33 + .../lib/youtube_dl/extractor/noovo.py | 97 + .../lib/youtube_dl/extractor/normalboots.py | 54 + .../lib/youtube_dl/extractor/nosvideo.py | 75 + .../lib/youtube_dl/extractor/nova.py | 179 + .../lib/youtube_dl/extractor/novamov.py | 212 + .../lib/youtube_dl/extractor/nowness.py | 147 + .../lib/youtube_dl/extractor/nowtv.py | 261 + .../lib/youtube_dl/extractor/noz.py | 89 + .../lib/youtube_dl/extractor/npo.py | 598 + .../lib/youtube_dl/extractor/npr.py | 82 + .../lib/youtube_dl/extractor/nrk.py | 522 + .../lib/youtube_dl/extractor/ntvde.py | 77 + .../lib/youtube_dl/extractor/ntvru.py | 132 + .../lib/youtube_dl/extractor/nuevo.py | 39 + .../lib/youtube_dl/extractor/nuvid.py | 71 + .../lib/youtube_dl/extractor/nytimes.py | 223 + .../lib/youtube_dl/extractor/nzz.py | 36 + .../lib/youtube_dl/extractor/odatv.py | 50 + .../lib/youtube_dl/extractor/odnoklassniki.py | 210 + .../lib/youtube_dl/extractor/oktoberfesttv.py | 47 + .../lib/youtube_dl/extractor/once.py | 42 + .../lib/youtube_dl/extractor/ondemandkorea.py | 62 + .../lib/youtube_dl/extractor/onet.py | 250 + .../lib/youtube_dl/extractor/onionstudios.py | 78 + .../lib/youtube_dl/extractor/ooyala.py | 204 + .../lib/youtube_dl/extractor/openload.py | 128 + .../lib/youtube_dl/extractor/ora.py | 75 + .../lib/youtube_dl/extractor/orf.py | 309 + .../lib/youtube_dl/extractor/packtpub.py | 164 + .../lib/youtube_dl/extractor/pandatv.py | 94 + .../lib/youtube_dl/extractor/pandoratv.py | 114 + .../youtube_dl/extractor/parliamentliveuk.py | 43 + .../lib/youtube_dl/extractor/patreon.py | 115 + .../lib/youtube_dl/extractor/pbs.py | 616 + .../lib/youtube_dl/extractor/pearvideo.py | 63 + .../lib/youtube_dl/extractor/people.py | 32 + .../lib/youtube_dl/extractor/periscope.py | 156 + .../extractor/philharmoniedeparis.py | 78 + .../lib/youtube_dl/extractor/phoenix.py | 45 + .../lib/youtube_dl/extractor/photobucket.py | 46 + .../lib/youtube_dl/extractor/piksel.py | 123 + .../lib/youtube_dl/extractor/pinkbike.py | 97 + .../lib/youtube_dl/extractor/pladform.py | 99 + .../lib/youtube_dl/extractor/playfm.py | 75 + .../lib/youtube_dl/extractor/plays.py | 53 + .../lib/youtube_dl/extractor/playtvak.py | 181 + .../lib/youtube_dl/extractor/playvid.py | 99 + .../lib/youtube_dl/extractor/playwire.py | 75 + .../lib/youtube_dl/extractor/pluralsight.py | 378 + .../lib/youtube_dl/extractor/podomatic.py | 69 + .../lib/youtube_dl/extractor/pokemon.py | 58 + .../lib/youtube_dl/extractor/polskieradio.py | 180 + .../lib/youtube_dl/extractor/porn91.py | 58 + .../lib/youtube_dl/extractor/porncom.py | 100 + .../lib/youtube_dl/extractor/pornflip.py | 92 + .../lib/youtube_dl/extractor/pornhd.py | 96 + .../lib/youtube_dl/extractor/pornhub.py | 308 + .../lib/youtube_dl/extractor/pornotube.py | 85 + .../lib/youtube_dl/extractor/pornovoisines.py | 108 + .../lib/youtube_dl/extractor/pornoxo.py | 58 + .../lib/youtube_dl/extractor/presstv.py | 74 + .../lib/youtube_dl/extractor/primesharetv.py | 62 + .../lib/youtube_dl/extractor/promptfile.py | 70 + .../lib/youtube_dl/extractor/prosiebensat1.py | 447 + .../lib/youtube_dl/extractor/puls4.py | 57 + .../lib/youtube_dl/extractor/pyvideo.py | 72 + .../lib/youtube_dl/extractor/qqmusic.py | 361 + .../lib/youtube_dl/extractor/r7.py | 112 + .../lib/youtube_dl/extractor/radiobremen.py | 63 + .../lib/youtube_dl/extractor/radiocanada.py | 176 + .../lib/youtube_dl/extractor/radiode.py | 52 + .../lib/youtube_dl/extractor/radiofrance.py | 59 + .../lib/youtube_dl/extractor/radiojavan.py | 67 + .../lib/youtube_dl/extractor/rai.py | 457 + .../lib/youtube_dl/extractor/rbmaradio.py | 71 + .../lib/youtube_dl/extractor/rds.py | 70 + .../lib/youtube_dl/extractor/redbulltv.py | 139 + .../lib/youtube_dl/extractor/redtube.py | 95 + .../lib/youtube_dl/extractor/regiotv.py | 62 + .../lib/youtube_dl/extractor/rentv.py | 75 + .../lib/youtube_dl/extractor/restudy.py | 41 + .../lib/youtube_dl/extractor/reuters.py | 69 + .../lib/youtube_dl/extractor/reverbnation.py | 53 + .../lib/youtube_dl/extractor/revision3.py | 170 + .../lib/youtube_dl/extractor/rice.py | 116 + .../lib/youtube_dl/extractor/ringtv.py | 44 + .../lib/youtube_dl/extractor/rmcdecouverte.py | 45 + .../lib/youtube_dl/extractor/ro220.py | 43 + .../lib/youtube_dl/extractor/rockstargames.py | 69 + .../lib/youtube_dl/extractor/roosterteeth.py | 148 + .../youtube_dl/extractor/rottentomatoes.py | 32 + .../lib/youtube_dl/extractor/roxwel.py | 53 + .../lib/youtube_dl/extractor/rozhlas.py | 50 + .../lib/youtube_dl/extractor/rtbf.py | 98 + .../lib/youtube_dl/extractor/rte.py | 142 + .../lib/youtube_dl/extractor/rtl2.py | 208 + .../lib/youtube_dl/extractor/rtlnl.py | 170 + .../lib/youtube_dl/extractor/rtp.py | 89 + .../lib/youtube_dl/extractor/rts.py | 230 + .../lib/youtube_dl/extractor/rtve.py | 258 + .../lib/youtube_dl/extractor/rtvnh.py | 62 + .../lib/youtube_dl/extractor/rudo.py | 53 + .../lib/youtube_dl/extractor/ruhd.py | 45 + .../lib/youtube_dl/extractor/ruleporn.py | 44 + .../lib/youtube_dl/extractor/rutube.py | 195 + .../lib/youtube_dl/extractor/rutv.py | 211 + .../lib/youtube_dl/extractor/ruutu.py | 126 + .../lib/youtube_dl/extractor/ruv.py | 101 + .../lib/youtube_dl/extractor/safari.py | 201 + .../lib/youtube_dl/extractor/sandia.py | 65 + .../lib/youtube_dl/extractor/sapo.py | 119 + .../lib/youtube_dl/extractor/savefrom.py | 37 + .../lib/youtube_dl/extractor/sbs.py | 66 + .../lib/youtube_dl/extractor/screencast.py | 114 + .../youtube_dl/extractor/screencastomatic.py | 37 + .../youtube_dl/extractor/scrippsnetworks.py | 60 + .../lib/youtube_dl/extractor/seeker.py | 57 + .../lib/youtube_dl/extractor/senateisvp.py | 153 + .../lib/youtube_dl/extractor/sendtonews.py | 105 + .../lib/youtube_dl/extractor/servingsys.py | 72 + .../lib/youtube_dl/extractor/sexu.py | 63 + .../lib/youtube_dl/extractor/shahid.py | 134 + .../lib/youtube_dl/extractor/shared.py | 96 + .../lib/youtube_dl/extractor/showroomlive.py | 84 + .../lib/youtube_dl/extractor/sina.py | 115 + .../lib/youtube_dl/extractor/sixplay.py | 101 + .../youtube_dl/extractor/skylinewebcams.py | 42 + .../lib/youtube_dl/extractor/skynewsarabia.py | 117 + .../lib/youtube_dl/extractor/skysports.py | 34 + .../lib/youtube_dl/extractor/slideshare.py | 56 + .../lib/youtube_dl/extractor/slutload.py | 40 + .../lib/youtube_dl/extractor/smotri.py | 414 + .../lib/youtube_dl/extractor/snotr.py | 73 + .../lib/youtube_dl/extractor/sohu.py | 202 + .../lib/youtube_dl/extractor/sonyliv.py | 34 + .../lib/youtube_dl/extractor/soundcloud.py | 567 + .../lib/youtube_dl/extractor/soundgasm.py | 64 + .../lib/youtube_dl/extractor/southpark.py | 103 + .../lib/youtube_dl/extractor/spankbang.py | 67 + .../lib/youtube_dl/extractor/spankwire.py | 127 + .../lib/youtube_dl/extractor/spiegel.py | 172 + .../lib/youtube_dl/extractor/spiegeltv.py | 17 + .../lib/youtube_dl/extractor/spike.py | 54 + .../lib/youtube_dl/extractor/sport5.py | 92 + .../lib/youtube_dl/extractor/sportbox.py | 79 + .../youtube_dl/extractor/sportdeutschland.py | 100 + .../lib/youtube_dl/extractor/sportschau.py | 38 + .../lib/youtube_dl/extractor/sprout.py | 52 + .../lib/youtube_dl/extractor/srgssr.py | 171 + .../lib/youtube_dl/extractor/srmediathek.py | 59 + .../lib/youtube_dl/extractor/stanfordoc.py | 91 + .../lib/youtube_dl/extractor/steam.py | 123 + .../lib/youtube_dl/extractor/stitcher.py | 81 + .../lib/youtube_dl/extractor/streamable.py | 112 + .../lib/youtube_dl/extractor/streamango.py | 75 + .../lib/youtube_dl/extractor/streamcloud.py | 75 + .../lib/youtube_dl/extractor/streamcz.py | 105 + .../lib/youtube_dl/extractor/streetvoice.py | 49 + .../lib/youtube_dl/extractor/sunporno.py | 79 + .../lib/youtube_dl/extractor/svt.py | 190 + .../lib/youtube_dl/extractor/swrmediathek.py | 115 + .../lib/youtube_dl/extractor/syfy.py | 58 + .../lib/youtube_dl/extractor/sztvhu.py | 41 + .../lib/youtube_dl/extractor/tagesschau.py | 311 + .../lib/youtube_dl/extractor/tass.py | 62 + .../lib/youtube_dl/extractor/tastytrade.py | 43 + .../lib/youtube_dl/extractor/tbs.py | 61 + .../lib/youtube_dl/extractor/tdslifeway.py | 33 + .../lib/youtube_dl/extractor/teachertube.py | 131 + .../youtube_dl/extractor/teachingchannel.py | 35 + .../lib/youtube_dl/extractor/teamcoco.py | 170 + .../lib/youtube_dl/extractor/teamfourstar.py | 48 + .../lib/youtube_dl/extractor/techtalks.py | 82 + .../lib/youtube_dl/extractor/ted.py | 320 + .../lib/youtube_dl/extractor/tele13.py | 88 + .../lib/youtube_dl/extractor/telebruxelles.py | 59 + .../lib/youtube_dl/extractor/telecinco.py | 66 + .../lib/youtube_dl/extractor/telegraaf.py | 78 + .../lib/youtube_dl/extractor/telemb.py | 78 + .../lib/youtube_dl/extractor/telequebec.py | 49 + .../lib/youtube_dl/extractor/teletask.py | 53 + .../lib/youtube_dl/extractor/telewebion.py | 55 + .../lib/youtube_dl/extractor/testurl.py | 68 + .../lib/youtube_dl/extractor/tf1.py | 53 + .../lib/youtube_dl/extractor/tfo.py | 57 + .../lib/youtube_dl/extractor/theintercept.py | 49 + .../lib/youtube_dl/extractor/theplatform.py | 397 + .../lib/youtube_dl/extractor/thescene.py | 44 + .../lib/youtube_dl/extractor/thesixtyone.py | 106 + .../lib/youtube_dl/extractor/thestar.py | 36 + .../lib/youtube_dl/extractor/thesun.py | 32 + .../youtube_dl/extractor/theweatherchannel.py | 79 + .../youtube_dl/extractor/thisamericanlife.py | 40 + .../lib/youtube_dl/extractor/thisav.py | 73 + .../lib/youtube_dl/extractor/thisoldhouse.py | 44 + .../lib/youtube_dl/extractor/threeqsdn.py | 142 + .../lib/youtube_dl/extractor/tinypic.py | 56 + .../lib/youtube_dl/extractor/tmz.py | 56 + .../lib/youtube_dl/extractor/tnaflix.py | 309 + .../lib/youtube_dl/extractor/toggle.py | 198 + .../lib/youtube_dl/extractor/tonline.py | 59 + .../lib/youtube_dl/extractor/toongoggles.py | 81 + .../lib/youtube_dl/extractor/toutv.py | 100 + .../lib/youtube_dl/extractor/toypics.py | 90 + .../lib/youtube_dl/extractor/traileraddict.py | 64 + .../lib/youtube_dl/extractor/trilulilu.py | 103 + .../lib/youtube_dl/extractor/trutv.py | 47 + .../lib/youtube_dl/extractor/tube8.py | 80 + .../lib/youtube_dl/extractor/tubitv.py | 90 + .../lib/youtube_dl/extractor/tudou.py | 49 + .../lib/youtube_dl/extractor/tumblr.py | 160 + .../lib/youtube_dl/extractor/tunein.py | 183 + .../lib/youtube_dl/extractor/tunepk.py | 90 + .../lib/youtube_dl/extractor/turbo.py | 68 + .../lib/youtube_dl/extractor/turner.py | 184 + .../lib/youtube_dl/extractor/tutv.py | 35 + .../lib/youtube_dl/extractor/tv2.py | 145 + .../lib/youtube_dl/extractor/tv2hu.py | 62 + .../lib/youtube_dl/extractor/tv3.py | 34 + .../lib/youtube_dl/extractor/tv4.py | 129 + .../lib/youtube_dl/extractor/tv5mondeplus.py | 79 + .../lib/youtube_dl/extractor/tva.py | 54 + .../lib/youtube_dl/extractor/tvanouvelles.py | 65 + .../lib/youtube_dl/extractor/tvc.py | 109 + .../lib/youtube_dl/extractor/tvigle.py | 119 + .../lib/youtube_dl/extractor/tvland.py | 34 + .../lib/youtube_dl/extractor/tvn24.py | 76 + .../lib/youtube_dl/extractor/tvnoe.py | 48 + .../lib/youtube_dl/extractor/tvp.py | 226 + .../lib/youtube_dl/extractor/tvplay.py | 429 + .../lib/youtube_dl/extractor/tvplayer.py | 86 + .../lib/youtube_dl/extractor/tweakers.py | 62 + .../youtube_dl/extractor/twentyfourvideo.py | 110 + .../lib/youtube_dl/extractor/twentymin.py | 91 + .../youtube_dl/extractor/twentytwotracks.py | 86 + .../lib/youtube_dl/extractor/twitch.py | 632 + .../lib/youtube_dl/extractor/twitter.py | 495 + .../lib/youtube_dl/extractor/udemy.py | 403 + .../lib/youtube_dl/extractor/udn.py | 96 + .../lib/youtube_dl/extractor/uktvplay.py | 33 + .../lib/youtube_dl/extractor/unistra.py | 67 + .../lib/youtube_dl/extractor/uol.py | 143 + .../lib/youtube_dl/extractor/uplynk.py | 70 + .../lib/youtube_dl/extractor/upskill.py | 176 + .../lib/youtube_dl/extractor/urort.py | 66 + .../lib/youtube_dl/extractor/urplay.py | 57 + .../lib/youtube_dl/extractor/usanetwork.py | 76 + .../lib/youtube_dl/extractor/usatoday.py | 48 + .../lib/youtube_dl/extractor/ustream.py | 281 + .../lib/youtube_dl/extractor/ustudio.py | 125 + .../lib/youtube_dl/extractor/varzesh3.py | 79 + .../lib/youtube_dl/extractor/vbox7.py | 105 + .../lib/youtube_dl/extractor/veehd.py | 118 + .../lib/youtube_dl/extractor/veoh.py | 127 + .../lib/youtube_dl/extractor/vessel.py | 157 + .../lib/youtube_dl/extractor/vesti.py | 121 + .../lib/youtube_dl/extractor/vevo.py | 370 + .../lib/youtube_dl/extractor/vgtv.py | 296 + .../lib/youtube_dl/extractor/vh1.py | 131 + .../lib/youtube_dl/extractor/vice.py | 274 + .../lib/youtube_dl/extractor/viceland.py | 38 + .../lib/youtube_dl/extractor/vidbit.py | 84 + .../lib/youtube_dl/extractor/viddler.py | 146 + .../lib/youtube_dl/extractor/videa.py | 97 + .../youtube_dl/extractor/videodetective.py | 30 + .../lib/youtube_dl/extractor/videofyme.py | 52 + .../lib/youtube_dl/extractor/videomega.py | 60 + .../lib/youtube_dl/extractor/videomore.py | 227 + .../lib/youtube_dl/extractor/videopremium.py | 46 + .../lib/youtube_dl/extractor/videopress.py | 96 + .../lib/youtube_dl/extractor/vidio.py | 77 + .../lib/youtube_dl/extractor/vidme.py | 273 + .../lib/youtube_dl/extractor/vidzi.py | 61 + .../lib/youtube_dl/extractor/vier.py | 264 + .../lib/youtube_dl/extractor/viewlift.py | 200 + .../lib/youtube_dl/extractor/viewster.py | 217 + .../lib/youtube_dl/extractor/viidea.py | 193 + .../lib/youtube_dl/extractor/viki.py | 384 + .../lib/youtube_dl/extractor/vimeo.py | 1029 + .../lib/youtube_dl/extractor/vimple.py | 61 + .../lib/youtube_dl/extractor/vine.py | 154 + .../lib/youtube_dl/extractor/viu.py | 256 + .../lib/youtube_dl/extractor/vk.py | 626 + .../lib/youtube_dl/extractor/vlive.py | 318 + .../lib/youtube_dl/extractor/vodlocker.py | 80 + .../lib/youtube_dl/extractor/vodpl.py | 32 + .../lib/youtube_dl/extractor/vodplatform.py | 37 + .../lib/youtube_dl/extractor/voicerepublic.py | 100 + .../lib/youtube_dl/extractor/voxmedia.py | 142 + .../lib/youtube_dl/extractor/vporn.py | 123 + .../lib/youtube_dl/extractor/vrak.py | 80 + .../lib/youtube_dl/extractor/vrt.py | 154 + .../lib/youtube_dl/extractor/vrv.py | 212 + .../lib/youtube_dl/extractor/vshare.py | 38 + .../lib/youtube_dl/extractor/vube.py | 172 + .../lib/youtube_dl/extractor/vuclip.py | 70 + .../lib/youtube_dl/extractor/vvvvid.py | 140 + .../lib/youtube_dl/extractor/vyborymos.py | 55 + .../lib/youtube_dl/extractor/vzaar.py | 55 + .../lib/youtube_dl/extractor/walla.py | 86 + .../youtube_dl/extractor/washingtonpost.py | 183 + .../lib/youtube_dl/extractor/wat.py | 166 + .../youtube_dl/extractor/watchindianporn.py | 68 + .../lib/youtube_dl/extractor/wdr.py | 265 + .../lib/youtube_dl/extractor/webcaster.py | 102 + .../lib/youtube_dl/extractor/webofstories.py | 155 + .../lib/youtube_dl/extractor/weiqitv.py | 52 + .../lib/youtube_dl/extractor/wimp.py | 57 + .../lib/youtube_dl/extractor/wistia.py | 120 + .../youtube_dl/extractor/worldstarhiphop.py | 40 + .../lib/youtube_dl/extractor/wrzuta.py | 158 + .../lib/youtube_dl/extractor/wsj.py | 120 + .../lib/youtube_dl/extractor/xbef.py | 44 + .../lib/youtube_dl/extractor/xboxclips.py | 53 + .../lib/youtube_dl/extractor/xfileshare.py | 201 + .../lib/youtube_dl/extractor/xhamster.py | 233 + .../lib/youtube_dl/extractor/xiami.py | 198 + .../lib/youtube_dl/extractor/xminus.py | 79 + .../lib/youtube_dl/extractor/xnxx.py | 48 + .../lib/youtube_dl/extractor/xstream.py | 119 + .../lib/youtube_dl/extractor/xtube.py | 177 + .../lib/youtube_dl/extractor/xuite.py | 153 + .../lib/youtube_dl/extractor/xvideos.py | 81 + .../lib/youtube_dl/extractor/xxxymovies.py | 81 + .../lib/youtube_dl/extractor/yahoo.py | 435 + .../lib/youtube_dl/extractor/yandexmusic.py | 270 + .../lib/youtube_dl/extractor/yesjapan.py | 62 + .../lib/youtube_dl/extractor/yinyuetai.py | 56 + .../lib/youtube_dl/extractor/ynet.py | 52 + .../lib/youtube_dl/extractor/youjizz.py | 39 + .../lib/youtube_dl/extractor/youku.py | 299 + .../lib/youtube_dl/extractor/youporn.py | 190 + .../lib/youtube_dl/extractor/yourupload.py | 46 + .../lib/youtube_dl/extractor/youtube.py | 2771 +++ .../lib/youtube_dl/extractor/zapiks.py | 110 + .../lib/youtube_dl/extractor/zaq1.py | 101 + .../lib/youtube_dl/extractor/zdf.py | 314 + .../lib/youtube_dl/extractor/zingmp3.py | 143 + .../lib/youtube_dl/jsinterp.py | 262 + RBXLegacyDiscordBot/lib/youtube_dl/options.py | 908 + .../lib/youtube_dl/postprocessor/__init__.py | 40 + .../lib/youtube_dl/postprocessor/common.py | 69 + .../postprocessor/embedthumbnail.py | 92 + .../postprocessor/execafterdownload.py | 31 + .../lib/youtube_dl/postprocessor/ffmpeg.py | 613 + .../postprocessor/metadatafromtitle.py | 48 + .../lib/youtube_dl/postprocessor/xattrpp.py | 77 + RBXLegacyDiscordBot/lib/youtube_dl/socks.py | 273 + .../lib/youtube_dl/swfinterp.py | 834 + RBXLegacyDiscordBot/lib/youtube_dl/update.py | 187 + RBXLegacyDiscordBot/lib/youtube_dl/utils.py | 3824 +++ RBXLegacyDiscordBot/lib/youtube_dl/version.py | 3 + RBXLegacyDiscordBot/red.py | 643 + RBXLegacyDiscordBot/requirements.txt | 4 + RBXLegacyDiscordBot/requirements_no_audio.txt | 4 + RBXLegacyDiscordBot/start_launcher.bat | 35 + RBXLegacyDiscordBot/start_red.bat | 3 + RBXLegacyDiscordBot/start_red_autorestart.bat | 3 + 1417 files changed, 341934 insertions(+) create mode 100644 RBXLegacyDiscordBot/.gitignore create mode 100644 RBXLegacyDiscordBot/.travis.yml create mode 100644 RBXLegacyDiscordBot/cogs/alias.py create mode 100644 RBXLegacyDiscordBot/cogs/audio.py create mode 100644 RBXLegacyDiscordBot/cogs/customcom.py create mode 100644 RBXLegacyDiscordBot/cogs/downloader.py create mode 100644 RBXLegacyDiscordBot/cogs/economy.py create mode 100644 RBXLegacyDiscordBot/cogs/general.py create mode 100644 RBXLegacyDiscordBot/cogs/image.py create mode 100644 RBXLegacyDiscordBot/cogs/mod.py create mode 100644 RBXLegacyDiscordBot/cogs/owner.py create mode 100644 RBXLegacyDiscordBot/cogs/streams.py create mode 100644 RBXLegacyDiscordBot/cogs/trivia.py create mode 100644 RBXLegacyDiscordBot/cogs/utils/__init__.py create mode 100644 RBXLegacyDiscordBot/cogs/utils/chat_formatting.py create mode 100644 RBXLegacyDiscordBot/cogs/utils/checks.py create mode 100644 RBXLegacyDiscordBot/cogs/utils/dataIO.py create mode 100644 RBXLegacyDiscordBot/cogs/utils/settings.py create mode 100644 RBXLegacyDiscordBot/launcher.py create mode 100644 RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/_cffi_backend.cp36-win32.pyd create mode 100644 RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/PKG-INFO create mode 100644 RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/SOURCES.txt create mode 100644 RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/dependency_links.txt create mode 100644 RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/installed-files.txt create mode 100644 RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/requires.txt create mode 100644 RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/_websocket.c create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/_websocket.cp36-win32.pyd create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/_websocket.pyx create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/_ws_impl.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/abc.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/client.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/client_reqrep.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/client_ws.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/connector.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/cookiejar.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/errors.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/file_sender.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/hdrs.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/helpers.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/log.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/multipart.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/parsers.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/protocol.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/pytest_plugin.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/resolver.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/server.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/signals.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/streams.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/test_utils.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/web.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/web_exceptions.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/web_reqrep.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/web_urldispatcher.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/web_ws.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/worker.py create mode 100644 RBXLegacyDiscordBot/lib/aiohttp/wsgi.py create mode 100644 RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/async_timeout/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/certifi/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/certifi/__main__.py create mode 100644 RBXLegacyDiscordBot/lib/certifi/cacert.pem create mode 100644 RBXLegacyDiscordBot/lib/certifi/core.py create mode 100644 RBXLegacyDiscordBot/lib/certifi/old_root.pem create mode 100644 RBXLegacyDiscordBot/lib/certifi/weak.pem create mode 100644 RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/entry_points.txt create mode 100644 RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/cffi/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/_cffi_include.h create mode 100644 RBXLegacyDiscordBot/lib/cffi/_embedding.h create mode 100644 RBXLegacyDiscordBot/lib/cffi/api.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/backend_ctypes.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/cffi_opcode.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/commontypes.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/cparser.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/error.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/ffiplatform.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/lock.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/model.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/parse_c_type.h create mode 100644 RBXLegacyDiscordBot/lib/cffi/recompiler.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/setuptools_ext.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/vengine_cpy.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/vengine_gen.py create mode 100644 RBXLegacyDiscordBot/lib/cffi/verifier.py create mode 100644 RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/entry_points.txt create mode 100644 RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/chardet/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/big5freq.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/big5prober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/chardistribution.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/charsetgroupprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/charsetprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/cli/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/cli/chardetect.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/codingstatemachine.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/compat.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/cp949prober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/enums.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/escprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/escsm.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/eucjpprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/euckrfreq.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/euckrprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/euctwfreq.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/euctwprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/gb2312freq.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/gb2312prober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/hebrewprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/jisfreq.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/jpcntx.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/langbulgarianmodel.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/langcyrillicmodel.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/langgreekmodel.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/langhebrewmodel.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/langhungarianmodel.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/langthaimodel.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/langturkishmodel.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/latin1prober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/mbcharsetprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/mbcsgroupprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/mbcssm.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/sbcharsetprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/sbcsgroupprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/sjisprober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/universaldetector.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/utf8prober.py create mode 100644 RBXLegacyDiscordBot/lib/chardet/version.py create mode 100644 RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/PKG-INFO create mode 100644 RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/SOURCES.txt create mode 100644 RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/dependency_links.txt create mode 100644 RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/installed-files.txt create mode 100644 RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/requires.txt create mode 100644 RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/discord/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/discord/calls.py create mode 100644 RBXLegacyDiscordBot/lib/discord/channel.py create mode 100644 RBXLegacyDiscordBot/lib/discord/client.py create mode 100644 RBXLegacyDiscordBot/lib/discord/colour.py create mode 100644 RBXLegacyDiscordBot/lib/discord/compat.py create mode 100644 RBXLegacyDiscordBot/lib/discord/embeds.py create mode 100644 RBXLegacyDiscordBot/lib/discord/emoji.py create mode 100644 RBXLegacyDiscordBot/lib/discord/enums.py create mode 100644 RBXLegacyDiscordBot/lib/discord/errors.py create mode 100644 RBXLegacyDiscordBot/lib/discord/ext/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/discord/ext/commands/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/discord/ext/commands/bot.py create mode 100644 RBXLegacyDiscordBot/lib/discord/ext/commands/context.py create mode 100644 RBXLegacyDiscordBot/lib/discord/ext/commands/converter.py create mode 100644 RBXLegacyDiscordBot/lib/discord/ext/commands/cooldowns.py create mode 100644 RBXLegacyDiscordBot/lib/discord/ext/commands/core.py create mode 100644 RBXLegacyDiscordBot/lib/discord/ext/commands/errors.py create mode 100644 RBXLegacyDiscordBot/lib/discord/ext/commands/formatter.py create mode 100644 RBXLegacyDiscordBot/lib/discord/ext/commands/view.py create mode 100644 RBXLegacyDiscordBot/lib/discord/game.py create mode 100644 RBXLegacyDiscordBot/lib/discord/gateway.py create mode 100644 RBXLegacyDiscordBot/lib/discord/http.py create mode 100644 RBXLegacyDiscordBot/lib/discord/invite.py create mode 100644 RBXLegacyDiscordBot/lib/discord/iterators.py create mode 100644 RBXLegacyDiscordBot/lib/discord/member.py create mode 100644 RBXLegacyDiscordBot/lib/discord/message.py create mode 100644 RBXLegacyDiscordBot/lib/discord/mixins.py create mode 100644 RBXLegacyDiscordBot/lib/discord/object.py create mode 100644 RBXLegacyDiscordBot/lib/discord/opus.py create mode 100644 RBXLegacyDiscordBot/lib/discord/permissions.py create mode 100644 RBXLegacyDiscordBot/lib/discord/reaction.py create mode 100644 RBXLegacyDiscordBot/lib/discord/role.py create mode 100644 RBXLegacyDiscordBot/lib/discord/server.py create mode 100644 RBXLegacyDiscordBot/lib/discord/state.py create mode 100644 RBXLegacyDiscordBot/lib/discord/user.py create mode 100644 RBXLegacyDiscordBot/lib/discord/utils.py create mode 100644 RBXLegacyDiscordBot/lib/discord/voice_client.py create mode 100644 RBXLegacyDiscordBot/lib/idna-2.5.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/idna-2.5.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/idna-2.5.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/idna-2.5.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/idna-2.5.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/idna-2.5.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/idna/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/idna/codec.py create mode 100644 RBXLegacyDiscordBot/lib/idna/compat.py create mode 100644 RBXLegacyDiscordBot/lib/idna/core.py create mode 100644 RBXLegacyDiscordBot/lib/idna/idnadata.py create mode 100644 RBXLegacyDiscordBot/lib/idna/intranges.py create mode 100644 RBXLegacyDiscordBot/lib/idna/uts46data.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/PKG-INFO create mode 100644 RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/SOURCES.txt create mode 100644 RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/dependency_links.txt create mode 100644 RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/installed-files.txt create mode 100644 RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/requires.txt create mode 100644 RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/client.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/helpers/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/helpers/error.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/helpers/format.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/account.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/account_settings.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/album.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/comment.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/conversation.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/custom_gallery.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/gallery_album.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/gallery_image.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/image.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/message.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/notification.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/tag.py create mode 100644 RBXLegacyDiscordBot/lib/imgurpython/imgur/models/tag_vote.py create mode 100644 RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/multidict/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/multidict/__init__.pyi create mode 100644 RBXLegacyDiscordBot/lib/multidict/_istr.c create mode 100644 RBXLegacyDiscordBot/lib/multidict/_istr.cp36-win32.pyd create mode 100644 RBXLegacyDiscordBot/lib/multidict/_multidict.c create mode 100644 RBXLegacyDiscordBot/lib/multidict/_multidict.cp36-win32.pyd create mode 100644 RBXLegacyDiscordBot/lib/multidict/_multidict.pyx create mode 100644 RBXLegacyDiscordBot/lib/multidict/_multidict_py.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/_sodium.cp36-win32.pyd create mode 100644 RBXLegacyDiscordBot/lib/nacl/bindings/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/bindings/crypto_box.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/bindings/crypto_hash.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/bindings/crypto_scalarmult.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/bindings/crypto_secretbox.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/bindings/crypto_sign.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/bindings/randombytes.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/bindings/sodium_core.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/encoding.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/exceptions.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/hash.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/public.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/secret.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/signing.py create mode 100644 RBXLegacyDiscordBot/lib/nacl/utils.py create mode 100644 RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/entry_points.txt create mode 100644 RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/pip/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/__main__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/appdirs.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/_cmd.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/adapter.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/cache.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/file_cache.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/redis_cache.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/compat.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/controller.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/filewrapper.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/heuristics.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/serialize.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/wrapper.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/colorama/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/colorama/ansi.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/colorama/ansitowin32.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/colorama/initialise.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/colorama/win32.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/colorama/winterm.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/misc.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/shutil.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/sysconfig.cfg create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/sysconfig.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/tarfile.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/compat.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/database.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/index.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/locators.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/manifest.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/markers.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/metadata.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/resources.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/scripts.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/util.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/version.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distlib/wheel.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/distro.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_ihatexml.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_inputstream.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_tokenizer.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/_base.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/datrie.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/py.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_utils.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/constants.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/alphabeticalattributes.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/base.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/inject_meta_charset.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/lint.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/optionaltags.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/sanitizer.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/whitespace.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/html5parser.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/serializer.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treeadapters/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treeadapters/genshi.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treeadapters/sax.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treebuilders/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treebuilders/base.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treebuilders/dom.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treebuilders/etree.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treebuilders/etree_lxml.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treewalkers/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treewalkers/base.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treewalkers/dom.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treewalkers/etree.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treewalkers/etree_lxml.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/treewalkers/genshi.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/ipaddress.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/lockfile/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/lockfile/linklockfile.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/lockfile/mkdirlockfile.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/lockfile/pidlockfile.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/lockfile/sqlitelockfile.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/lockfile/symlinklockfile.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/ordereddict.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/packaging/__about__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/packaging/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/packaging/_compat.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/packaging/_structures.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/packaging/markers.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/packaging/requirements.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/packaging/specifiers.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/packaging/utils.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/packaging/version.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/pkg_resources/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/progress/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/progress/bar.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/progress/counter.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/progress/helpers.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/progress/spinner.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/pyparsing.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/re-vendor.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/adapters.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/api.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/auth.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/cacert.pem create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/certs.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/compat.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/cookies.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/exceptions.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/hooks.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/models.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/big5freq.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/big5prober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/chardetect.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/chardistribution.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/charsetgroupprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/charsetprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/codingstatemachine.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/compat.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/constants.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/cp949prober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/escprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/escsm.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/eucjpprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/euckrfreq.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/euckrprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/euctwfreq.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/euctwprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/gb2312freq.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/gb2312prober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/hebrewprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/jisfreq.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/jpcntx.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/langbulgarianmodel.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/langcyrillicmodel.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/langgreekmodel.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/langhebrewmodel.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/langhungarianmodel.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/langthaimodel.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/latin1prober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/mbcharsetprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/mbcsgroupprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/mbcssm.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/sbcharsetprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/sbcsgroupprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/sjisprober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/universaldetector.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/utf8prober.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/_collections.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/connection.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/connectionpool.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/contrib/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/contrib/appengine.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/contrib/socks.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/exceptions.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/fields.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/filepost.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/packages/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/packages/six.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/poolmanager.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/request.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/response.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/util/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/util/connection.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/util/request.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/util/response.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/util/retry.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/util/ssl_.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/util/timeout.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/urllib3/util/url.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/sessions.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/status_codes.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/structures.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/requests/utils.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/retrying.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/six.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/webencodings/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/webencodings/labels.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/webencodings/mklabels.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/webencodings/tests.py create mode 100644 RBXLegacyDiscordBot/lib/pip/_vendor/webencodings/x_user_defined.py create mode 100644 RBXLegacyDiscordBot/lib/pip/basecommand.py create mode 100644 RBXLegacyDiscordBot/lib/pip/baseparser.py create mode 100644 RBXLegacyDiscordBot/lib/pip/cmdoptions.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/check.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/completion.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/download.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/freeze.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/hash.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/help.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/install.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/list.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/search.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/show.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/uninstall.py create mode 100644 RBXLegacyDiscordBot/lib/pip/commands/wheel.py create mode 100644 RBXLegacyDiscordBot/lib/pip/compat/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/compat/dictconfig.py create mode 100644 RBXLegacyDiscordBot/lib/pip/download.py create mode 100644 RBXLegacyDiscordBot/lib/pip/exceptions.py create mode 100644 RBXLegacyDiscordBot/lib/pip/index.py create mode 100644 RBXLegacyDiscordBot/lib/pip/locations.py create mode 100644 RBXLegacyDiscordBot/lib/pip/models/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/models/index.py create mode 100644 RBXLegacyDiscordBot/lib/pip/operations/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/operations/check.py create mode 100644 RBXLegacyDiscordBot/lib/pip/operations/freeze.py create mode 100644 RBXLegacyDiscordBot/lib/pip/pep425tags.py create mode 100644 RBXLegacyDiscordBot/lib/pip/req/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/req/req_file.py create mode 100644 RBXLegacyDiscordBot/lib/pip/req/req_install.py create mode 100644 RBXLegacyDiscordBot/lib/pip/req/req_set.py create mode 100644 RBXLegacyDiscordBot/lib/pip/req/req_uninstall.py create mode 100644 RBXLegacyDiscordBot/lib/pip/status_codes.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/appdirs.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/build.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/deprecation.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/encoding.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/filesystem.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/glibc.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/hashes.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/logging.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/outdated.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/packaging.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/setuptools_build.py create mode 100644 RBXLegacyDiscordBot/lib/pip/utils/ui.py create mode 100644 RBXLegacyDiscordBot/lib/pip/vcs/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pip/vcs/bazaar.py create mode 100644 RBXLegacyDiscordBot/lib/pip/vcs/git.py create mode 100644 RBXLegacyDiscordBot/lib/pip/vcs/mercurial.py create mode 100644 RBXLegacyDiscordBot/lib/pip/vcs/subversion.py create mode 100644 RBXLegacyDiscordBot/lib/pip/wheel.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser-2.18-py3.6.egg-info/PKG-INFO create mode 100644 RBXLegacyDiscordBot/lib/pycparser-2.18-py3.6.egg-info/SOURCES.txt create mode 100644 RBXLegacyDiscordBot/lib/pycparser-2.18-py3.6.egg-info/dependency_links.txt create mode 100644 RBXLegacyDiscordBot/lib/pycparser-2.18-py3.6.egg-info/installed-files.txt create mode 100644 RBXLegacyDiscordBot/lib/pycparser-2.18-py3.6.egg-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/pycparser/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/_ast_gen.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/_build_tables.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/_c_ast.cfg create mode 100644 RBXLegacyDiscordBot/lib/pycparser/ast_transforms.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/c_ast.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/c_generator.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/c_lexer.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/c_parser.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/lextab.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/ply/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/ply/cpp.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/ply/ctokens.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/ply/lex.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/ply/yacc.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/ply/ygen.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/plyparser.py create mode 100644 RBXLegacyDiscordBot/lib/pycparser/yacctab.py create mode 100644 RBXLegacyDiscordBot/lib/requests-2.18.1.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/requests-2.18.1.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/requests-2.18.1.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/requests-2.18.1.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/requests-2.18.1.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/requests-2.18.1.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/requests/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/requests/__version__.py create mode 100644 RBXLegacyDiscordBot/lib/requests/_internal_utils.py create mode 100644 RBXLegacyDiscordBot/lib/requests/adapters.py create mode 100644 RBXLegacyDiscordBot/lib/requests/api.py create mode 100644 RBXLegacyDiscordBot/lib/requests/auth.py create mode 100644 RBXLegacyDiscordBot/lib/requests/certs.py create mode 100644 RBXLegacyDiscordBot/lib/requests/compat.py create mode 100644 RBXLegacyDiscordBot/lib/requests/cookies.py create mode 100644 RBXLegacyDiscordBot/lib/requests/exceptions.py create mode 100644 RBXLegacyDiscordBot/lib/requests/help.py create mode 100644 RBXLegacyDiscordBot/lib/requests/hooks.py create mode 100644 RBXLegacyDiscordBot/lib/requests/models.py create mode 100644 RBXLegacyDiscordBot/lib/requests/packages.py create mode 100644 RBXLegacyDiscordBot/lib/requests/sessions.py create mode 100644 RBXLegacyDiscordBot/lib/requests/status_codes.py create mode 100644 RBXLegacyDiscordBot/lib/requests/structures.py create mode 100644 RBXLegacyDiscordBot/lib/requests/utils.py create mode 100644 RBXLegacyDiscordBot/lib/six-1.10.0.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/six-1.10.0.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/six-1.10.0.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/six-1.10.0.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/six-1.10.0.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/six-1.10.0.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/six.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3-1.21.1.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/urllib3-1.21.1.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/urllib3-1.21.1.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/urllib3-1.21.1.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/urllib3-1.21.1.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/urllib3-1.21.1.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/urllib3/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/_collections.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/connection.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/connectionpool.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/contrib/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/contrib/_securetransport/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/contrib/_securetransport/bindings.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/contrib/_securetransport/low_level.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/contrib/appengine.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/contrib/ntlmpool.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/contrib/pyopenssl.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/contrib/securetransport.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/contrib/socks.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/exceptions.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/fields.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/filepost.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/packages/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/packages/backports/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/packages/backports/makefile.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/packages/ordered_dict.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/packages/six.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/packages/ssl_match_hostname/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/poolmanager.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/request.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/response.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/util/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/util/connection.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/util/request.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/util/response.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/util/retry.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/util/selectors.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/util/ssl_.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/util/timeout.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/util/url.py create mode 100644 RBXLegacyDiscordBot/lib/urllib3/util/wait.py create mode 100644 RBXLegacyDiscordBot/lib/websockets-3.3.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/websockets-3.3.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/websockets-3.3.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/websockets-3.3.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/websockets-3.3.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/websockets-3.3.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/websockets/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/client.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/compatibility.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/exceptions.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/framing.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/handshake.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/http.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/protocol.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/py35/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/py35/client.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/py35/client_server.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/server.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/test_client_server.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/test_framing.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/test_handshake.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/test_http.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/test_protocol.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/test_uri.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/uri.py create mode 100644 RBXLegacyDiscordBot/lib/websockets/version.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl-2017.7.23.dist-info/DESCRIPTION.rst create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl-2017.7.23.dist-info/INSTALLER create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl-2017.7.23.dist-info/METADATA create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl-2017.7.23.dist-info/RECORD create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl-2017.7.23.dist-info/WHEEL create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl-2017.7.23.dist-info/entry_points.txt create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl-2017.7.23.dist-info/top_level.txt create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/YoutubeDL.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/__main__.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/aes.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/cache.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/compat.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/common.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/dash.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/external.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/f4m.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/fragment.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/hls.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/http.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/ism.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/rtmp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/downloader/rtsp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/abc.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/abcnews.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/abcotvs.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/academicearth.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/acast.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/addanime.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/adn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/adobepass.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/adobetv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/adultswim.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/aenetworks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/afreecatv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/airmozilla.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/aljazeera.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/allocine.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/alphaporno.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/amcnetworks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/amp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/animeondemand.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/anitube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/anvato.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/anysex.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/aol.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/aparat.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/appleconnect.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/appletrailers.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/archiveorg.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ard.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/arkena.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/arte.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/asiancrush.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/atresplayer.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/atttechchannel.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/atvat.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/audimedia.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/audioboom.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/audiomack.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/awaan.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/azmedien.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/baidu.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bambuser.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bandcamp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bbc.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/beampro.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/beatport.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/beeg.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/behindkink.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bellmedia.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bet.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bigflix.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bild.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bilibili.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/biobiochiletv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/biqle.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bleacherreport.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/blinkx.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bloomberg.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bokecc.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bostonglobe.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bpb.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/br.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/bravotv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/breakcom.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/brightcove.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/buzzfeed.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/byutv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/c56.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/camdemy.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/camwithher.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/canalc2.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/canalplus.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/canvas.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/carambatv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cartoonnetwork.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cbc.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cbs.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cbsinteractive.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cbslocal.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cbsnews.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cbssports.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ccc.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ccma.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cctv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cda.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ceskatelevize.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/channel9.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/charlierose.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/chaturbate.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/chilloutzone.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/chirbit.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cinchcast.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cjsw.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/clipfish.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cliphunter.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cliprs.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/clipsyndicate.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/closertotruth.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cloudy.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/clubic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/clyp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cmt.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cnbc.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cnn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/collegerama.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/comcarcoff.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/comedycentral.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/common.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/commonmistakes.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/commonprotocols.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/condenast.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/corus.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/coub.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cracked.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/crackle.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/criterion.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/crooksandliars.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/crunchyroll.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cspan.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ctsnews.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ctvnews.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cultureunplugged.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/curiositystream.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/cwtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dailymail.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dailymotion.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/daisuki.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/daum.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dbtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dctp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/deezer.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/defense.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/democracynow.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dfb.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dhm.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/digiteka.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/discovery.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/discoverygo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/discoverynetworks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/discoveryvr.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/disney.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dispeak.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dotsub.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/douyutv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dplay.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dramafever.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/drbonanza.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dreisat.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dropbox.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/drtuber.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/drtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dumpert.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dvtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/dw.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/eagleplatform.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ebaumsworld.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/echomsk.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/egghead.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ehow.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/eighttracks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/einthusan.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/eitb.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ellentv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/elpais.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/embedly.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/engadget.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/eporner.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/eroprofile.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/escapist.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/espn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/esri.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/etonline.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/europa.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/everyonesmixtape.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/expotv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/extractors.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/extremetube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/eyedotv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/facebook.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/faz.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/fc2.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/fczenit.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/filmon.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/firstpost.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/firsttv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/fivemin.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/fivetv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/fktv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/flickr.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/flipagram.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/folketinget.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/footyroom.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/formula1.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/fourtube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/fox.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/fox9.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/foxgay.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/foxnews.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/foxsports.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/franceculture.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/franceinter.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/francetv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/freesound.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/freespeech.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/freshlive.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/funimation.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/funnyordie.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/fusion.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/fxnetworks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/gameinformer.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/gameone.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/gamersyde.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/gamespot.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/gamestar.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/gaskrank.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/gazeta.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/gdcvault.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/generic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/gfycat.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/giantbomb.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/giga.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/glide.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/globo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/go.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/go90.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/godtube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/golem.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/googledrive.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/googleplus.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/googlesearch.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/goshgay.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/gputechconf.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/groupon.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hark.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hbo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hearthisat.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/heise.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hellporno.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/helsinki.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hentaistigma.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hgtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/historicfilms.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hitbox.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hitrecord.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hornbunny.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hotnewhiphop.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hotstar.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/howcast.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/howstuffworks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hrti.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/huajiao.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/huffpost.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/hypem.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/iconosquare.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ign.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/imdb.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/imgur.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ina.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/inc.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/indavideo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/infoq.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/instagram.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/internetvideoarchive.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/iprima.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/iqiyi.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ir90tv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/itv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ivi.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ivideon.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/iwara.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/izlesene.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/jamendo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/jeuxvideo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/joj.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/jove.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/jpopsukitv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/jwplatform.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/kaltura.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/kamcord.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/kanalplay.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/kankan.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/karaoketv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/karrierevideos.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/keek.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/keezmovies.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ketnet.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/khanacademy.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/kickstarter.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/konserthusetplay.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/kontrtube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/krasview.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ku6.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/kusi.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/kuwo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/la7.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/laola1tv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/lci.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/lcp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/learnr.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/lecture2go.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/leeco.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/lego.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/lemonde.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/libraryofcongress.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/libsyn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/lifenews.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/limelight.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/litv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/liveleak.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/livestream.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/lnkgo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/localnews8.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/lovehomeporn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/lrt.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/lynda.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/m6.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/macgamestore.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mailru.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/makerschannel.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/makertv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mangomolo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/matchtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mdr.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/medialaan.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mediaset.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/medici.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/meipai.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/melonvod.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/meta.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/metacafe.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/metacritic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mgoon.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mgtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/miaopai.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/microsoftvirtualacademy.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/minhateca.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ministrygrid.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/minoto.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/miomio.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mit.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mitele.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mixcloud.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mlb.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mnet.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/moevideo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mofosex.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mojvideo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/moniker.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/morningstar.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/motherless.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/motorsport.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/movieclips.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/moviezine.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/movingimage.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mpora.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/msn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/muenchentv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/musicplayon.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/mwave.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/myspace.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/myspass.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/myvi.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/myvideo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/myvidster.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nationalgeographic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/naver.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nba.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nbc.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ndr.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ndtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nerdcubed.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/neteasemusic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/netzkino.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/newgrounds.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/newstube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nextmedia.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nexx.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nfb.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nfl.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nhk.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nhl.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nick.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/niconico.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ninecninemedia.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ninegag.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ninenow.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nintendo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/njpwworld.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nobelprize.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/noco.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nonktube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/noovo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/normalboots.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nosvideo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nova.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/novamov.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nowness.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nowtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/noz.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/npo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/npr.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nrk.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ntvde.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ntvru.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nuevo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nuvid.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nytimes.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/nzz.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/odatv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/odnoklassniki.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/oktoberfesttv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/once.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ondemandkorea.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/onet.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/onionstudios.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ooyala.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/openload.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ora.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/orf.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/packtpub.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pandatv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pandoratv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/parliamentliveuk.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/patreon.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pbs.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pearvideo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/people.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/periscope.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/philharmoniedeparis.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/phoenix.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/photobucket.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/piksel.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pinkbike.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pladform.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/playfm.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/plays.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/playtvak.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/playvid.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/playwire.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pluralsight.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/podomatic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pokemon.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/polskieradio.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/porn91.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/porncom.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pornflip.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pornhd.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pornhub.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pornotube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pornovoisines.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pornoxo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/presstv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/primesharetv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/promptfile.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/prosiebensat1.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/puls4.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/pyvideo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/qqmusic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/r7.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/radiobremen.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/radiocanada.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/radiode.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/radiofrance.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/radiojavan.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rai.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rbmaradio.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rds.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/redbulltv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/redtube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/regiotv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rentv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/restudy.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/reuters.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/reverbnation.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/revision3.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rice.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ringtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rmcdecouverte.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ro220.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rockstargames.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/roosterteeth.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rottentomatoes.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/roxwel.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rozhlas.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rtbf.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rte.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rtl2.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rtlnl.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rtp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rts.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rtve.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rtvnh.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rudo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ruhd.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ruleporn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rutube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/rutv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ruutu.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ruv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/safari.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sandia.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sapo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/savefrom.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sbs.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/screencast.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/screencastomatic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/scrippsnetworks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/seeker.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/senateisvp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sendtonews.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/servingsys.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sexu.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/shahid.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/shared.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/showroomlive.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sina.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sixplay.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/skylinewebcams.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/skynewsarabia.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/skysports.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/slideshare.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/slutload.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/smotri.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/snotr.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sohu.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sonyliv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/soundcloud.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/soundgasm.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/southpark.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/spankbang.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/spankwire.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/spiegel.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/spiegeltv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/spike.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sport5.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sportbox.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sportdeutschland.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sportschau.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sprout.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/srgssr.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/srmediathek.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/stanfordoc.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/steam.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/stitcher.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/streamable.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/streamango.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/streamcloud.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/streamcz.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/streetvoice.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sunporno.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/svt.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/swrmediathek.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/syfy.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/sztvhu.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tagesschau.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tass.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tastytrade.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tbs.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tdslifeway.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/teachertube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/teachingchannel.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/teamcoco.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/teamfourstar.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/techtalks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ted.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tele13.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/telebruxelles.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/telecinco.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/telegraaf.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/telemb.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/telequebec.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/teletask.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/telewebion.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/testurl.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tf1.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tfo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/theintercept.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/theplatform.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/thescene.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/thesixtyone.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/thestar.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/thesun.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/theweatherchannel.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/thisamericanlife.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/thisav.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/thisoldhouse.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/threeqsdn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tinypic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tmz.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tnaflix.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/toggle.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tonline.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/toongoggles.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/toutv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/toypics.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/traileraddict.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/trilulilu.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/trutv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tube8.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tubitv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tudou.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tumblr.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tunein.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tunepk.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/turbo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/turner.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tutv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tv2.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tv2hu.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tv3.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tv4.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tv5mondeplus.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tva.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tvanouvelles.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tvc.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tvigle.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tvland.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tvn24.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tvnoe.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tvp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tvplay.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tvplayer.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/tweakers.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/twentyfourvideo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/twentymin.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/twentytwotracks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/twitch.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/twitter.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/udemy.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/udn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/uktvplay.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/unistra.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/uol.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/uplynk.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/upskill.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/urort.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/urplay.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/usanetwork.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/usatoday.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ustream.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ustudio.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/varzesh3.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vbox7.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/veehd.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/veoh.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vessel.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vesti.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vevo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vgtv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vh1.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vice.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/viceland.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vidbit.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/viddler.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/videa.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/videodetective.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/videofyme.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/videomega.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/videomore.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/videopremium.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/videopress.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vidio.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vidme.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vidzi.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vier.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/viewlift.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/viewster.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/viidea.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/viki.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vimeo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vimple.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vine.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/viu.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vk.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vlive.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vodlocker.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vodpl.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vodplatform.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/voicerepublic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/voxmedia.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vporn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vrak.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vrt.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vrv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vshare.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vuclip.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vvvvid.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vyborymos.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/vzaar.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/walla.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/washingtonpost.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/wat.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/watchindianporn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/wdr.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/webcaster.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/webofstories.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/weiqitv.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/wimp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/wistia.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/worldstarhiphop.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/wrzuta.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/wsj.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xbef.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xboxclips.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xfileshare.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xhamster.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xiami.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xminus.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xnxx.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xstream.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xtube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xuite.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xvideos.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/xxxymovies.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/yahoo.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/yandexmusic.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/yesjapan.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/yinyuetai.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/ynet.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/youjizz.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/youku.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/youporn.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/yourupload.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/youtube.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/zapiks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/zaq1.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/zdf.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/extractor/zingmp3.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/jsinterp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/options.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/postprocessor/__init__.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/postprocessor/common.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/postprocessor/embedthumbnail.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/postprocessor/execafterdownload.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/postprocessor/ffmpeg.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/postprocessor/metadatafromtitle.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/postprocessor/xattrpp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/socks.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/swfinterp.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/update.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/utils.py create mode 100644 RBXLegacyDiscordBot/lib/youtube_dl/version.py create mode 100644 RBXLegacyDiscordBot/red.py create mode 100644 RBXLegacyDiscordBot/requirements.txt create mode 100644 RBXLegacyDiscordBot/requirements_no_audio.txt create mode 100644 RBXLegacyDiscordBot/start_launcher.bat create mode 100644 RBXLegacyDiscordBot/start_red.bat create mode 100644 RBXLegacyDiscordBot/start_red_autorestart.bat diff --git a/RBXLegacyDiscordBot/.gitignore b/RBXLegacyDiscordBot/.gitignore new file mode 100644 index 0000000..c39171c --- /dev/null +++ b/RBXLegacyDiscordBot/.gitignore @@ -0,0 +1,8 @@ +*.json +*.pyc +__pycache__ +data +!data/trivia/* +!data/audio/playlists/* +*.exe +*.dll \ No newline at end of file diff --git a/RBXLegacyDiscordBot/.travis.yml b/RBXLegacyDiscordBot/.travis.yml new file mode 100644 index 0000000..f4c4d56 --- /dev/null +++ b/RBXLegacyDiscordBot/.travis.yml @@ -0,0 +1,12 @@ +language: python +python: + - "3.5.2" +install: + - pip install -r requirements.txt +script: + - python -m compileall ./red.py + - python -m compileall ./cogs + - python ./red.py --no-prompt --no-cogs --dry-run +cache: pip +notifications: + email: false diff --git a/RBXLegacyDiscordBot/cogs/alias.py b/RBXLegacyDiscordBot/cogs/alias.py new file mode 100644 index 0000000..6c23fcb --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/alias.py @@ -0,0 +1,191 @@ +from discord.ext import commands +from .utils.chat_formatting import box +from .utils.dataIO import dataIO +from .utils import checks +from __main__ import user_allowed, send_cmd_help +from copy import deepcopy +import os +import discord + + +class Alias: + def __init__(self, bot): + self.bot = bot + self.file_path = "data/alias/aliases.json" + self.aliases = dataIO.load_json(self.file_path) + self.remove_old() + + @commands.group(pass_context=True, no_pm=True) + async def alias(self, ctx): + """Manage per-server aliases for commands""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + + @alias.command(name="add", pass_context=True, no_pm=True) + @checks.mod_or_permissions(manage_server=True) + async def _add_alias(self, ctx, command, *, to_execute): + """Add an alias for a command + + Example: !alias add test flip @Twentysix""" + server = ctx.message.server + command = command.lower() + if len(command.split(" ")) != 1: + await self.bot.say("I can't safely do multi-word aliases because" + " of the fact that I allow arguments to" + " aliases. It sucks, I know, deal with it.") + return + if self.part_of_existing_command(command, server.id): + await self.bot.say('I can\'t safely add an alias that starts with ' + 'an existing command or alias. Sry <3') + return + prefix = self.get_prefix(server, to_execute) + if prefix is not None: + to_execute = to_execute[len(prefix):] + if server.id not in self.aliases: + self.aliases[server.id] = {} + if command not in self.bot.commands: + self.aliases[server.id][command] = to_execute + dataIO.save_json(self.file_path, self.aliases) + await self.bot.say("Alias '{}' added.".format(command)) + else: + await self.bot.say("Cannot add '{}' because it's a real bot " + "command.".format(command)) + + @alias.command(name="help", pass_context=True, no_pm=True) + async def _help_alias(self, ctx, command): + """Tries to execute help for the base command of the alias""" + server = ctx.message.server + if server.id in self.aliases: + server_aliases = self.aliases[server.id] + if command in server_aliases: + help_cmd = server_aliases[command].split(" ")[0] + new_content = self.bot.settings.get_prefixes(server)[0] + new_content += "help " + new_content += help_cmd[len(self.get_prefix(server, + help_cmd)):] + message = ctx.message + message.content = new_content + await self.bot.process_commands(message) + else: + await self.bot.say("That alias doesn't exist.") + + @alias.command(name="show", pass_context=True, no_pm=True) + async def _show_alias(self, ctx, command): + """Shows what command the alias executes.""" + server = ctx.message.server + if server.id in self.aliases: + server_aliases = self.aliases[server.id] + if command in server_aliases: + await self.bot.say(box(server_aliases[command])) + else: + await self.bot.say("That alias doesn't exist.") + + @alias.command(name="del", pass_context=True, no_pm=True) + @checks.mod_or_permissions(manage_server=True) + async def _del_alias(self, ctx, command): + """Deletes an alias""" + command = command.lower() + server = ctx.message.server + if server.id in self.aliases: + self.aliases[server.id].pop(command, None) + dataIO.save_json(self.file_path, self.aliases) + await self.bot.say("Alias '{}' deleted.".format(command)) + + @alias.command(name="list", pass_context=True, no_pm=True) + async def _alias_list(self, ctx): + """Lists aliases available on this server + + Responds in DM""" + server = ctx.message.server + if server.id in self.aliases: + message = "```Alias list:\n" + for alias in sorted(self.aliases[server.id]): + if len(message) + len(alias) + 3 > 2000: + await self.bot.whisper(message) + message = "```\n" + message += "\t{}\n".format(alias) + if message != "```Alias list:\n": + message += "```" + await self.bot.whisper(message) + else: + await self.bot.say("There are no aliases on this server.") + + async def on_message(self, message): + if len(message.content) < 2 or message.channel.is_private: + return + + msg = message.content + server = message.server + prefix = self.get_prefix(server, msg) + + if not prefix: + return + + if server.id in self.aliases and user_allowed(message): + alias = self.first_word(msg[len(prefix):]).lower() + if alias in self.aliases[server.id]: + new_command = self.aliases[server.id][alias] + args = message.content[len(prefix + alias):] + new_message = deepcopy(message) + new_message.content = prefix + new_command + args + await self.bot.process_commands(new_message) + + def part_of_existing_command(self, alias, server): + '''Command or alias''' + for command in self.bot.commands: + if alias.lower() == command.lower(): + return True + return False + + def remove_old(self): + for sid in self.aliases: + to_delete = [] + to_add = [] + for aliasname, alias in self.aliases[sid].items(): + lower = aliasname.lower() + if aliasname != lower: + to_delete.append(aliasname) + to_add.append((lower, alias)) + if aliasname != self.first_word(aliasname): + to_delete.append(aliasname) + continue + server = discord.Object(id=sid) + prefix = self.get_prefix(server, alias) + if prefix is not None: + self.aliases[sid][aliasname] = alias[len(prefix):] + for alias in to_delete: # Fixes caps and bad prefixes + del self.aliases[sid][alias] + for alias, command in to_add: # For fixing caps + self.aliases[sid][alias] = command + dataIO.save_json(self.file_path, self.aliases) + + def first_word(self, msg): + return msg.split(" ")[0] + + def get_prefix(self, server, msg): + prefixes = self.bot.settings.get_prefixes(server) + for p in prefixes: + if msg.startswith(p): + return p + return None + + +def check_folder(): + if not os.path.exists("data/alias"): + print("Creating data/alias folder...") + os.makedirs("data/alias") + + +def check_file(): + aliases = {} + + f = "data/alias/aliases.json" + if not dataIO.is_valid_json(f): + print("Creating default alias's aliases.json...") + dataIO.save_json(f, aliases) + + +def setup(bot): + check_folder() + check_file() + bot.add_cog(Alias(bot)) diff --git a/RBXLegacyDiscordBot/cogs/audio.py b/RBXLegacyDiscordBot/cogs/audio.py new file mode 100644 index 0000000..3452d9e --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/audio.py @@ -0,0 +1,2251 @@ +import discord +from discord.ext import commands +import threading +import os +from random import shuffle, choice +from cogs.utils.dataIO import dataIO +from cogs.utils import checks +from cogs.utils.chat_formatting import pagify +from urllib.parse import urlparse +from __main__ import send_cmd_help, settings +from json import JSONDecodeError +import re +import logging +import collections +import copy +import asyncio +import math +import time +import inspect +import subprocess + +__author__ = "tekulvw" +__version__ = "0.1.1" + +log = logging.getLogger("red.audio") + +try: + import youtube_dl +except: + youtube_dl = None + +try: + if not discord.opus.is_loaded(): + discord.opus.load_opus('libopus-0.dll') +except OSError: # Incorrect bitness + opus = False +except: # Missing opus + opus = None +else: + opus = True + +youtube_dl_options = { + 'source_address': '0.0.0.0', + 'format': 'bestaudio/best', + 'extractaudio': True, + 'audioformat': "mp3", + 'outtmpl': '%(id)s', + 'nocheckcertificate': True, + 'ignoreerrors': True, + 'quiet': True, + 'no_warnings': True, + 'outtmpl': "data/audio/cache/%(id)s", + 'default_search': 'auto' +} + + +class MaximumLength(Exception): + def __init__(self, m): + self.message = m + + def __str__(self): + return self.message + + +class NotConnected(Exception): + pass + + +class AuthorNotConnected(NotConnected): + pass + + +class VoiceNotConnected(NotConnected): + pass + + +class UnauthorizedConnect(Exception): + pass + + +class UnauthorizedSpeak(Exception): + pass + + +class ChannelUserLimit(Exception): + pass + + +class UnauthorizedSave(Exception): + pass + + +class ConnectTimeout(NotConnected): + pass + + +class InvalidURL(Exception): + pass + + +class InvalidSong(InvalidURL): + pass + + +class InvalidPlaylist(InvalidSong): + pass + + +class deque(collections.deque): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def peek(self): + ret = self.pop() + self.append(ret) + return copy.deepcopy(ret) + + def peekleft(self): + ret = self.popleft() + self.appendleft(ret) + return copy.deepcopy(ret) + + +class Song: + def __init__(self, **kwargs): + self.__dict__ = kwargs + self.title = kwargs.pop('title', None) + self.id = kwargs.pop('id', None) + self.url = kwargs.pop('url', None) + self.webpage_url = kwargs.pop('webpage_url', "") + self.duration = kwargs.pop('duration', 60) + + +class Playlist: + def __init__(self, server=None, sid=None, name=None, author=None, url=None, + playlist=None, path=None, main_class=None, **kwargs): + # when is this used? idk + # what is server when it's global? None? idk + self.server = server + self._sid = sid + self.name = name + # this is an id...... + self.author = author + self.url = url + self.main_class = main_class # reference to Audio + self.path = path + + if url is None and "link" in kwargs: + self.url = kwargs.get('link') + self.playlist = playlist + + @property + def filename(self): + f = "data/audio/playlists" + f = os.path.join(f, self.sid, self.name + ".txt") + return f + + def to_json(self): + ret = {"author": self.author, "playlist": self.playlist, + "link": self.url} + return ret + + def is_author(self, user): + """checks if the user is the author of this playlist + Returns True/False""" + return user.id == self.author + + def can_edit(self, user): + """right now checks if user is mod or higher including server owner + global playlists are uneditable atm + + dev notes: + should probably be defined elsewhere later or be dynamic""" + + # I don't know how global playlists are handled. + # Not sure if the framework is there for them to be editable. + # Don't know how they are handled by Playlist + # Don't know how they are handled by Audio + # so let's make sure it's not global at all. + if self.main_class._playlist_exists_global(self.name): + return False + + admin_role = settings.get_server_admin(self.server) + mod_role = settings.get_server_mod(self.server) + + is_playlist_author = self.is_author(user) + is_bot_owner = user.id == settings.owner + is_server_owner = self.server.owner.id == self.author + is_admin = discord.utils.get(user.roles, name=admin_role) is not None + is_mod = discord.utils.get(user.roles, name=mod_role) is not None + + return any((is_playlist_author, + is_bot_owner, + is_server_owner, + is_admin, + is_mod)) + + + # def __del__() ? + + def append_song(self, author, url): + if not self.can_edit(author): + raise UnauthorizedSave + elif not self.main_class._valid_playable_url(url): + raise InvalidURL + else: + self.playlist.append(url) + self.save() + + def save(self): + dataIO.save_json(self.path, self.to_json()) + + @property + def sid(self): + if self._sid: + return self._sid + elif self.server: + return self.server.id + else: + return None + + +class Downloader(threading.Thread): + def __init__(self, url, max_duration=None, download=False, + cache_path="data/audio/cache", *args, **kwargs): + super().__init__(*args, **kwargs) + self.url = url + self.max_duration = max_duration + self.done = threading.Event() + self.song = None + self.failed = False + self._download = download + self.hit_max_length = threading.Event() + self._yt = None + + def run(self): + try: + self.get_info() + if self._download: + self.download() + except MaximumLength: + self.hit_max_length.set() + except: + self.failed = True + self.done.set() + + def download(self): + self.duration_check() + + if not os.path.isfile('data/audio/cache' + self.song.id): + video = self._yt.extract_info(self.url) + self.song = Song(**video) + + def duration_check(self): + log.debug("duration {} for songid {}".format(self.song.duration, + self.song.id)) + if self.max_duration and self.song.duration > self.max_duration: + log.debug("songid {} too long".format(self.song.id)) + raise MaximumLength("songid {} has duration {} > {}".format( + self.song.id, self.song.duration, self.max_duration)) + + def get_info(self): + if self._yt is None: + self._yt = youtube_dl.YoutubeDL(youtube_dl_options) + if "[SEARCH:]" not in self.url: + video = self._yt.extract_info(self.url, download=False, + process=False) + else: + self.url = self.url[9:] + yt_id = self._yt.extract_info( + self.url, download=False)["entries"][0]["id"] + # Should handle errors here ^ + self.url = "https://youtube.com/watch?v={}".format(yt_id) + video = self._yt.extract_info(self.url, download=False, + process=False) + + self.song = Song(**video) + + +class Audio: + """Music Streaming.""" + + def __init__(self, bot, player): + self.bot = bot + self.queue = {} # add deque's, repeat + self.downloaders = {} # sid: object + self.settings = dataIO.load_json("data/audio/settings.json") + self.server_specific_setting_keys = ["VOLUME", "VOTE_ENABLED", + "VOTE_THRESHOLD", "NOPPL_DISCONNECT"] + self.cache_path = "data/audio/cache" + self.local_playlist_path = "data/audio/localtracks" + self._old_game = False + + self.skip_votes = {} + + self.connect_timers = {} + + if player == "ffmpeg": + self.settings["AVCONV"] = False + elif player == "avconv": + self.settings["AVCONV"] = True + self.save_settings() + + async def _add_song_status(self, song): + if self._old_game is False: + self._old_game = list(self.bot.servers)[0].me.game + status = list(self.bot.servers)[0].me.status + game = discord.Game(name=song.title) + await self.bot.change_presence(status=status, game=game) + log.debug('Bot status changed to song title: ' + song.title) + + def _add_to_queue(self, server, url): + if server.id not in self.queue: + self._setup_queue(server) + self.queue[server.id]["QUEUE"].append(url) + + def _add_to_temp_queue(self, server, url): + if server.id not in self.queue: + self._setup_queue(server) + self.queue[server.id]["TEMP_QUEUE"].append(url) + + def _addleft_to_queue(self, server, url): + if server.id not in self.queue: + self._setup_queue() + self.queue[server.id]["QUEUE"].appendleft(url) + + def _cache_desired_files(self): + filelist = [] + for server in self.downloaders: + song = self.downloaders[server].song + try: + filelist.append(song.id) + except AttributeError: + pass + shuffle(filelist) + return filelist + + def _cache_max(self): + setting_max = self.settings["MAX_CACHE"] + return max([setting_max, self._cache_min()]) # enforcing hard limit + + def _cache_min(self): + x = self._server_count() + return max([60, 48 * math.log(x) * x**0.3]) # log is not log10 + + def _cache_required_files(self): + queue = copy.deepcopy(self.queue) + filelist = [] + for server in queue: + now_playing = queue[server].get("NOW_PLAYING") + try: + filelist.append(now_playing.id) + except AttributeError: + pass + return filelist + + def _cache_size(self): + songs = os.listdir(self.cache_path) + size = sum(map(lambda s: os.path.getsize( + os.path.join(self.cache_path, s)) / 10**6, songs)) + return size + + def _cache_too_large(self): + if self._cache_size() > self._cache_max(): + return True + return False + + def _clear_queue(self, server): + if server.id not in self.queue: + return + self.queue[server.id]["QUEUE"] = deque() + self.queue[server.id]["TEMP_QUEUE"] = deque() + + async def _create_ffmpeg_player(self, server, filename, local=False): + """This function will guarantee we have a valid voice client, + even if one doesn't exist previously.""" + voice_channel_id = self.queue[server.id]["VOICE_CHANNEL_ID"] + voice_client = self.voice_client(server) + + if voice_client is None: + log.debug("not connected when we should be in sid {}".format( + server.id)) + to_connect = self.bot.get_channel(voice_channel_id) + if to_connect is None: + raise VoiceNotConnected("Okay somehow we're not connected and" + " we have no valid channel to" + " reconnect to. In other words...LOL" + " REKT.") + log.debug("valid reconnect channel for sid" + " {}, reconnecting...".format(server.id)) + await self._join_voice_channel(to_connect) # SHIT + elif voice_client.channel.id != voice_channel_id: + # This was decided at 3:45 EST in #advanced-testing by 26 + self.queue[server.id]["VOICE_CHANNEL_ID"] = voice_client.channel.id + log.debug("reconnect chan id for sid {} is wrong, fixing".format( + server.id)) + + # Okay if we reach here we definitively have a working voice_client + + if local: + song_filename = os.path.join(self.local_playlist_path, filename) + else: + song_filename = os.path.join(self.cache_path, filename) + + use_avconv = self.settings["AVCONV"] + options = '-b:a 64k -bufsize 64k' + + try: + voice_client.audio_player.process.kill() + log.debug("killed old player") + except AttributeError: + pass + except ProcessLookupError: + pass + + log.debug("making player on sid {}".format(server.id)) + + voice_client.audio_player = voice_client.create_ffmpeg_player( + song_filename, use_avconv=use_avconv, options=options) + + # Set initial volume + vol = self.get_server_settings(server)['VOLUME'] / 100 + voice_client.audio_player.volume = vol + + return voice_client # Just for ease of use, it's modified in-place + + # TODO: _current_playlist + + # TODO: _current_song + + def _delete_playlist(self, server, name): + if not name.endswith('.txt'): + name = name + ".txt" + try: + os.remove(os.path.join('data/audio/playlists', server.id, name)) + except OSError: + pass + except WindowsError: + pass + + # TODO: _disable_controls() + + async def _disconnect_voice_client(self, server): + if not self.voice_connected(server): + return + + voice_client = self.voice_client(server) + + await voice_client.disconnect() + + async def _download_all(self, url_list): + """ + Doesn't actually download, just get's info for uses like queue_list + """ + downloaders = [] + for url in url_list: + d = Downloader(url) + d.start() + downloaders.append(d) + + while any([d.is_alive() for d in downloaders]): + await asyncio.sleep(0.1) + + songs = [d.song for d in downloaders if d.song is not None] + return songs + + async def _download_next(self, server, curr_dl, next_dl): + """Checks to see if we need to download the next, and does. + + Both curr_dl and next_dl should already be started.""" + if curr_dl.song is None: + # Only happens when the downloader thread hasn't initialized fully + # There's no reason to wait if we can't compare + return + + max_length = self.settings["MAX_LENGTH"] + + while next_dl.is_alive(): + await asyncio.sleep(0.5) + + if curr_dl.song.id != next_dl.song.id: + log.debug("downloader ID's mismatch on sid {}".format(server.id) + + " gonna start dl-ing the next thing on the queue" + " id {}".format(next_dl.song.id)) + try: + next_dl.duration_check() + except MaximumLength: + return + self.downloaders[server.id] = Downloader(next_dl.url, max_length, + download=True) + self.downloaders[server.id].start() + + def _dump_cache(self, ignore_desired=False): + reqd = self._cache_required_files() + log.debug("required cache files:\n\t{}".format(reqd)) + + opt = self._cache_desired_files() + log.debug("desired cache files:\n\t{}".format(opt)) + + prev_size = self._cache_size() + + for file in os.listdir(self.cache_path): + if file not in reqd: + if ignore_desired or file not in opt: + try: + os.remove(os.path.join(self.cache_path, file)) + except OSError: + # A directory got in the cache? + pass + except WindowsError: + # Removing a file in use, reqd failed + pass + + post_size = self._cache_size() + dumped = prev_size - post_size + + if not ignore_desired and self._cache_too_large(): + log.debug("must dump desired files") + return dumped + self._dump_cache(ignore_desired=True) + + log.debug("dumped {} MB of audio files".format(dumped)) + + return dumped + + # TODO: _enable_controls() + + # returns list of active voice channels + # assuming list does not change during the execution of this function + # if that happens, blame asyncio. + def _get_active_voice_clients(self): + avcs = [] + for vc in self.bot.voice_clients: + if hasattr(vc, 'audio_player') and not vc.audio_player.is_done(): + avcs.append(vc) + return avcs + + def _get_queue(self, server, limit): + if server.id not in self.queue: + return [] + + ret = [] + for i in range(limit): + try: + ret.append(self.queue[server.id]["QUEUE"][i]) + except IndexError: + pass + + return ret + + def _get_queue_nowplaying(self, server): + if server.id not in self.queue: + return None + + return self.queue[server.id]["NOW_PLAYING"] + + def _get_queue_playlist(self, server): + if server.id not in self.queue: + return None + + return self.queue[server.id]["PLAYLIST"] + + def _get_queue_repeat(self, server): + if server.id not in self.queue: + return None + + return self.queue[server.id]["REPEAT"] + + def _get_queue_tempqueue(self, server, limit): + if server.id not in self.queue: + return [] + + ret = [] + for i in range(limit): + try: + ret.append(self.queue[server.id]["TEMP_QUEUE"][i]) + except IndexError: + pass + return ret + + async def _guarantee_downloaded(self, server, url): + max_length = self.settings["MAX_LENGTH"] + if server.id not in self.downloaders: # We don't have a downloader + log.debug("sid {} not in downloaders, making one".format( + server.id)) + self.downloaders[server.id] = Downloader(url, max_length) + + if self.downloaders[server.id].url != url: # Our downloader is old + # I'm praying to Jeezus that we don't accidentally lose a running + # Downloader + log.debug("sid {} in downloaders but wrong url".format(server.id)) + self.downloaders[server.id] = Downloader(url, max_length) + + try: + # We're assuming we have the right thing in our downloader object + self.downloaders[server.id].start() + log.debug("starting our downloader for sid {}".format(server.id)) + except RuntimeError: + # Queue manager already started it for us, isn't that nice? + pass + + # Getting info w/o download + self.downloaders[server.id].done.wait() + + # This will throw a maxlength exception if required + self.downloaders[server.id].duration_check() + song = self.downloaders[server.id].song + + log.debug("sid {} wants to play songid {}".format(server.id, song.id)) + + # Now we check to see if we have a cache hit + cache_location = os.path.join(self.cache_path, song.id) + if not os.path.exists(cache_location): + log.debug("cache miss on song id {}".format(song.id)) + self.downloaders[server.id] = Downloader(url, max_length, + download=True) + self.downloaders[server.id].start() + + while self.downloaders[server.id].is_alive(): + await asyncio.sleep(0.5) + + song = self.downloaders[server.id].song + else: + log.debug("cache hit on song id {}".format(song.id)) + + return song + + def _is_queue_playlist(self, server): + if server.id not in self.queue: + return False + + return self.queue[server.id]["PLAYLIST"] + + async def _join_voice_channel(self, channel): + server = channel.server + connect_time = self.connect_timers.get(server.id, 0) + if time.time() < connect_time: + diff = int(connect_time - time.time()) + raise ConnectTimeout("You are on connect cooldown for another {}" + " seconds.".format(diff)) + if server.id in self.queue: + self.queue[server.id]["VOICE_CHANNEL_ID"] = channel.id + try: + await asyncio.wait_for(self.bot.join_voice_channel(channel), + timeout=5, loop=self.bot.loop) + except asyncio.futures.TimeoutError as e: + log.exception(e) + self.connect_timers[server.id] = time.time() + 300 + raise ConnectTimeout("We timed out connecting to a voice channel," + " please try again in 10 minutes.") + + def _list_local_playlists(self): + ret = [] + for thing in os.listdir(self.local_playlist_path): + if os.path.isdir(os.path.join(self.local_playlist_path, thing)): + ret.append(thing) + log.debug("local playlists:\n\t{}".format(ret)) + return ret + + def _list_playlists(self, server): + try: + server = server.id + except: + pass + path = "data/audio/playlists" + old_playlists = [f[:-4] for f in os.listdir(path) + if f.endswith(".txt")] + path = os.path.join(path, server) + if os.path.exists(path): + new_playlists = [f[:-4] for f in os.listdir(path) + if f.endswith(".txt")] + else: + new_playlists = [] + return list(set(old_playlists + new_playlists)) + + def _load_playlist(self, server, name, local=True): + try: + server = server.id + except: + pass + + f = "data/audio/playlists" + if local: + f = os.path.join(f, server, name + ".txt") + else: + f = os.path.join(f, name + ".txt") + kwargs = dataIO.load_json(f) + + kwargs['path'] = f + kwargs['main_class'] = self + kwargs['name'] = name + kwargs['sid'] = server + kwargs['server'] = self.bot.get_server(server) + + return Playlist(**kwargs) + + def _local_playlist_songlist(self, name): + dirpath = os.path.join(self.local_playlist_path, name) + return sorted(os.listdir(dirpath)) + + def _make_local_song(self, filename): + # filename should be playlist_folder/file_name + folder, song = os.path.split(filename) + return Song(name=song, id=filename, title=song, url=filename, + webpage_url=filename) + + def _make_playlist(self, author, url, songlist): + try: + author = author.id + except: + pass + + return Playlist(author=author, url=url, playlist=songlist) + + def _match_sc_playlist(self, url): + return self._match_sc_url(url) + + def _match_yt_playlist(self, url): + if not self._match_yt_url(url): + return False + yt_playlist = re.compile( + r'^(https?\:\/\/)?(www\.)?(youtube\.com|youtu\.?be)' + r'((\/playlist\?)|\/watch\?).*(list=)(.*)(&|$)') + # Group 6 should be the list ID + if yt_playlist.match(url): + return True + return False + + def _match_sc_url(self, url): + sc_url = re.compile( + r'^(https?\:\/\/)?(www\.)?(soundcloud\.com\/)') + if sc_url.match(url): + return True + return False + + def _match_yt_url(self, url): + yt_link = re.compile( + r'^(https?\:\/\/)?(www\.|m\.)?(youtube\.com|youtu\.?be)\/.+$') + if yt_link.match(url): + return True + return False + + def _match_any_url(self, url): + url = urlparse(url) + if url.scheme and url.netloc and url.path: + return True + return False + + # TODO: _next_songs_in_queue + + async def _parse_playlist(self, url): + if self._match_sc_playlist(url): + return await self._parse_sc_playlist(url) + elif self._match_yt_playlist(url): + return await self._parse_yt_playlist(url) + raise InvalidPlaylist("The given URL is neither a Soundcloud or" + " YouTube playlist.") + + async def _parse_sc_playlist(self, url): + playlist = [] + d = Downloader(url) + d.start() + + while d.is_alive(): + await asyncio.sleep(0.5) + + for entry in d.song.entries: + if entry["url"][4] != "s": + song_url = "https{}".format(entry["url"][4:]) + playlist.append(song_url) + else: + playlist.append(entry.url) + + return playlist + + async def _parse_yt_playlist(self, url): + d = Downloader(url) + d.start() + playlist = [] + + while d.is_alive(): + await asyncio.sleep(0.5) + + for entry in d.song.entries: + try: + song_url = "https://www.youtube.com/watch?v={}".format( + entry['id']) + playlist.append(song_url) + except AttributeError: + pass + except TypeError: + pass + + log.debug("song list:\n\t{}".format(playlist)) + + return playlist + + async def _play(self, sid, url): + """Returns the song object of what's playing""" + if type(sid) is not discord.Server: + server = self.bot.get_server(sid) + else: + server = sid + + assert type(server) is discord.Server + log.debug('starting to play on "{}"'.format(server.name)) + + if self._valid_playable_url(url) or "[SEARCH:]" in url: + try: + song = await self._guarantee_downloaded(server, url) + except MaximumLength: + log.warning("I can't play URL below because it is too long." + " Use [p]audioset maxlength to change this.\n\n" + "{}".format(url)) + raise + local = False + else: # Assume local + try: + song = self._make_local_song(url) + local = True + except FileNotFoundError: + raise + + voice_client = await self._create_ffmpeg_player(server, song.id, + local=local) + # That ^ creates the audio_player property + + voice_client.audio_player.start() + log.debug("starting player on sid {}".format(server.id)) + + return song + + def _play_playlist(self, server, playlist): + try: + songlist = playlist.playlist + name = playlist.name + except AttributeError: + songlist = playlist + name = True + + log.debug("setting up playlist {} on sid {}".format(name, server.id)) + + self._stop_player(server) + self._stop_downloader(server) + self._clear_queue(server) + + log.debug("finished resetting state on sid {}".format(server.id)) + + self._setup_queue(server) + self._set_queue_playlist(server, name) + self._set_queue_repeat(server, True) + self._set_queue(server, songlist) + + def _play_local_playlist(self, server, name): + songlist = self._local_playlist_songlist(name) + + ret = [] + for song in songlist: + ret.append(os.path.join(name, song)) + + ret_playlist = Playlist(server=server, name=name, playlist=ret) + self._play_playlist(server, ret_playlist) + + def _player_count(self): + count = 0 + queue = copy.deepcopy(self.queue) + for sid in queue: + server = self.bot.get_server(sid) + try: + vc = self.voice_client(server) + if vc.audio_player.is_playing(): + count += 1 + except: + pass + return count + + def _playlist_exists(self, server, name): + return self._playlist_exists_local(server, name) or \ + self._playlist_exists_global(name) + + def _playlist_exists_global(self, name): + f = "data/audio/playlists" + f = os.path.join(f, name + ".txt") + log.debug('checking for {}'.format(f)) + + return dataIO.is_valid_json(f) + + def _playlist_exists_local(self, server, name): + try: + server = server.id + except AttributeError: + pass + + f = "data/audio/playlists" + f = os.path.join(f, server, name + ".txt") + log.debug('checking for {}'.format(f)) + + return dataIO.is_valid_json(f) + + def _remove_queue(self, server): + if server.id in self.queue: + del self.queue[server.id] + + async def _remove_song_status(self): + if self._old_game is not False: + status = list(self.bot.servers)[0].me.status + await self.bot.change_presence(game=self._old_game, + status=status) + log.debug('Bot status returned to ' + str(self._old_game)) + self._old_game = False + + def _save_playlist(self, server, name, playlist): + sid = server.id + try: + f = playlist.filename + playlist = playlist.to_json() + log.debug("got playlist object") + except AttributeError: + f = os.path.join("data/audio/playlists", sid, name + ".txt") + + head, _ = os.path.split(f) + if not os.path.exists(head): + os.makedirs(head) + + log.debug("saving playlist '{}' to {}:\n\t{}".format(name, f, + playlist)) + dataIO.save_json(f, playlist) + + def _shuffle_queue(self, server): + shuffle(self.queue[server.id]["QUEUE"]) + + def _shuffle_temp_queue(self, server): + shuffle(self.queue[server.id]["TEMP_QUEUE"]) + + def _server_count(self): + return max([1, len(self.bot.servers)]) + + def _set_queue(self, server, songlist): + if server.id in self.queue: + self._clear_queue(server) + else: + self._setup_queue(server) + self.queue[server.id]["QUEUE"].extend(songlist) + + def _set_queue_channel(self, server, channel): + if server.id not in self.queue: + return + + try: + channel = channel.id + except AttributeError: + pass + + self.queue[server.id]["VOICE_CHANNEL_ID"] = channel + + def _set_queue_nowplaying(self, server, song): + if server.id not in self.queue: + return + + self.queue[server.id]["NOW_PLAYING"] = song + + def _set_queue_playlist(self, server, name=True): + if server.id not in self.queue: + self._setup_queue(server) + + self.queue[server.id]["PLAYLIST"] = name + + def _set_queue_repeat(self, server, value): + if server.id not in self.queue: + self._setup_queue(server) + + self.queue[server.id]["REPEAT"] = value + + def _setup_queue(self, server): + self.queue[server.id] = {"REPEAT": False, "PLAYLIST": False, + "VOICE_CHANNEL_ID": None, + "QUEUE": deque(), "TEMP_QUEUE": deque(), + "NOW_PLAYING": None} + + def _stop(self, server): + self._setup_queue(server) + self._stop_player(server) + self._stop_downloader(server) + self.bot.loop.create_task(self._update_bot_status()) + + async def _stop_and_disconnect(self, server): + self._stop(server) + await self._disconnect_voice_client(server) + + def _stop_downloader(self, server): + if server.id not in self.downloaders: + return + + del self.downloaders[server.id] + + def _stop_player(self, server): + if not self.voice_connected(server): + return + + voice_client = self.voice_client(server) + + if hasattr(voice_client, 'audio_player'): + voice_client.audio_player.stop() + + # no return. they can check themselves. + async def _update_bot_status(self): + if self.settings["TITLE_STATUS"]: + song = None + try: + active_servers = self._get_active_voice_clients() + except: + log.debug("Voice client changed while trying to update bot's" + " song status") + return + if len(active_servers) == 1: + server = active_servers[0].server + song = self.queue[server.id]["NOW_PLAYING"] + if song: + await self._add_song_status(song) + else: + await self._remove_song_status() + + def _valid_playlist_name(self, name): + for char in name: + if char.isdigit() or char.isalpha() or char == "_": + pass + else: + return False + return True + + def _valid_playable_url(self, url): + yt = self._match_yt_url(url) + sc = self._match_sc_url(url) + if yt or sc: # TODO: Add sc check + return True + return False + + @commands.group(pass_context=True) + async def audioset(self, ctx): + """Audio settings.""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + return + + @audioset.command(name="cachemax") + @checks.is_owner() + async def audioset_cachemax(self, size: int): + """Set the max cache size in MB""" + if size < self._cache_min(): + await self.bot.say("Sorry, but because of the number of servers" + " that your bot is in I cannot safely allow" + " you to have less than {} MB of cache.".format( + self._cache_min())) + return + + self.settings["MAX_CACHE"] = size + await self.bot.say("Max cache size set to {} MB.".format(size)) + self.save_settings() + + @audioset.command(name="emptydisconnect", pass_context=True) + @checks.mod_or_permissions(manage_messages=True) + async def audioset_emptydisconnect(self, ctx): + """Toggles auto disconnection when everyone leaves the channel""" + server = ctx.message.server + settings = self.get_server_settings(server.id) + noppl_disconnect = settings.get("NOPPL_DISCONNECT", True) + self.set_server_setting(server, "NOPPL_DISCONNECT", + not noppl_disconnect) + if not noppl_disconnect: + await self.bot.say("If there is no one left in the voice channel" + " the bot will automatically disconnect after" + " five minutes.") + else: + await self.bot.say("The bot will no longer auto disconnect" + " if the voice channel is empty.") + self.save_settings() + + @audioset.command(name="maxlength") + @checks.is_owner() + async def audioset_maxlength(self, length: int): + """Maximum track length (seconds) for requested links""" + if length <= 0: + await self.bot.say("Wow, a non-positive length value...aren't" + " you smart.") + return + self.settings["MAX_LENGTH"] = length + await self.bot.say("Maximum length is now {} seconds.".format(length)) + self.save_settings() + + @audioset.command(name="player") + @checks.is_owner() + async def audioset_player(self): + """Toggles between Ffmpeg and Avconv""" + self.settings["AVCONV"] = not self.settings["AVCONV"] + if self.settings["AVCONV"]: + await self.bot.say("Player toggled. You're now using avconv.") + else: + await self.bot.say("Player toggled. You're now using ffmpeg.") + self.save_settings() + + @audioset.command(name="status") + @checks.is_owner() # cause effect is cross-server + async def audioset_status(self): + """Enables/disables songs' titles as status""" + self.settings["TITLE_STATUS"] = not self.settings["TITLE_STATUS"] + if self.settings["TITLE_STATUS"]: + await self.bot.say("If only one server is playing music, songs'" + " titles will now show up as status") + # not updating on disable if we say disable + # means don't mess with it. + await self._update_bot_status() + else: + await self.bot.say("Songs' titles will no longer show up as" + " status") + self.save_settings() + + @audioset.command(pass_context=True, name="volume", no_pm=True) + @checks.mod_or_permissions(manage_messages=True) + async def audioset_volume(self, ctx, percent: int=None): + """Sets the volume (0 - 100) + Note: volume may be set up to 200 but you may experience clipping.""" + server = ctx.message.server + if percent is None: + vol = self.get_server_settings(server)['VOLUME'] + msg = "Volume is currently set to %d%%" % vol + elif percent >= 0 and percent <= 200: + self.set_server_setting(server, "VOLUME", percent) + msg = "Volume is now set to %d." % percent + if percent > 100: + msg += ("\nWarning: volume levels above 100 may result in" + " clipping") + + # Set volume of playing audio + vc = self.voice_client(server) + if vc: + vc.audio_player.volume = percent / 100 + + self.save_settings() + else: + msg = "Volume must be between 0 and 100." + await self.bot.say(msg) + + @audioset.command(pass_context=True, name="vote", no_pm=True) + @checks.mod_or_permissions(manage_messages=True) + async def audioset_vote(self, ctx, percent: int): + """Percentage needed for the masses to skip songs. 0 to disable.""" + server = ctx.message.server + + if percent < 0: + await self.bot.say("Can't be less than zero.") + return + elif percent > 100: + percent = 100 + + if percent == 0: + enabled = False + await self.bot.say("Voting disabled. All users can stop or skip.") + else: + enabled = True + await self.bot.say("Vote percentage set to {}%".format(percent)) + + self.set_server_setting(server, "VOTE_THRESHOLD", percent) + self.set_server_setting(server, "VOTE_ENABLED", enabled) + self.save_settings() + + @commands.group(pass_context=True) + async def audiostat(self, ctx): + """General stats on audio stuff.""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + return + + @audiostat.command(name="servers") + async def audiostat_servers(self): + """Number of servers currently playing.""" + + count = self._player_count() + + await self.bot.say("Currently playing music in {} servers.".format( + count)) + + @commands.group(pass_context=True) + async def cache(self, ctx): + """Cache management tools.""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + return + + @cache.command(name="dump") + @checks.is_owner() + async def cache_dump(self): + """Dumps the cache.""" + dumped = self._dump_cache() + await self.bot.say("Dumped {:.3f} MB of audio files.".format(dumped)) + + @cache.command(name="minimum") + async def cache_minimum(self): + """Current minimum cache size, based on server count.""" + await self.bot.say("The cache will be at least {:.3f} MB".format( + self._cache_min())) + + @cache.command(name="size") + async def cache_size(self): + """Current size of the cache.""" + await self.bot.say("Cache is currently at {:.3f} MB.".format( + self._cache_size())) + + @commands.group(pass_context=True, hidden=True, no_pm=True) + @checks.is_owner() + async def disconnect(self, ctx): + """Disconnects from voice channel in current server.""" + if ctx.invoked_subcommand is None: + server = ctx.message.server + await self._stop_and_disconnect(server) + + @disconnect.command(name="all", hidden=True, no_pm=True) + async def disconnect_all(self): + """Disconnects from all voice channels.""" + while len(list(self.bot.voice_clients)) != 0: + vc = list(self.bot.voice_clients)[0] + await self._stop_and_disconnect(vc.server) + await self.bot.say("done.") + + @commands.command(hidden=True, pass_context=True, no_pm=True) + @checks.is_owner() + async def joinvoice(self, ctx): + """Joins your voice channel""" + author = ctx.message.author + server = ctx.message.server + voice_channel = author.voice_channel + + if voice_channel is not None: + self._stop(server) + + await self._join_voice_channel(voice_channel) + + @commands.group(pass_context=True, no_pm=True) + async def local(self, ctx): + """Local playlists commands""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + + @local.command(name="start", pass_context=True, no_pm=True) + async def play_local(self, ctx, *, name): + """Plays a local playlist""" + server = ctx.message.server + author = ctx.message.author + voice_channel = author.voice_channel + + # Checking already connected, will join if not + + if not self.voice_connected(server): + try: + self.has_connect_perm(author, server) + except AuthorNotConnected: + await self.bot.say("You must join a voice channel before I can" + " play anything.") + return + except UnauthorizedConnect: + await self.bot.say("I don't have permissions to join your" + " voice channel.") + return + except UnauthorizedSpeak: + await self.bot.say("I don't have permissions to speak in your" + " voice channel.") + return + except ChannelUserLimit: + await self.bot.say("Your voice channel is full.") + return + else: + await self._join_voice_channel(voice_channel) + else: # We are connected but not to the right channel + if self.voice_client(server).channel != voice_channel: + pass # TODO: Perms + + # Checking if playing in current server + + if self.is_playing(server): + await self.bot.say("I'm already playing a song on this server!") + return # TODO: Possibly execute queue? + + # If not playing, spawn a downloader if it doesn't exist and begin + # downloading the next song + + if self.currently_downloading(server): + await self.bot.say("I'm already downloading a file!") + return + + lists = self._list_local_playlists() + + if not any(map(lambda l: os.path.split(l)[1] == name, lists)): + await self.bot.say("Local playlist not found.") + return + + self._play_local_playlist(server, name) + + @local.command(name="list", no_pm=True) + async def list_local(self): + """Lists local playlists""" + playlists = ", ".join(self._list_local_playlists()) + if playlists: + playlists = "Available local playlists:\n\n" + playlists + for page in pagify(playlists, delims=[" "]): + await self.bot.say(page) + else: + await self.bot.say("There are no playlists.") + + @commands.command(pass_context=True, no_pm=True) + async def pause(self, ctx): + """Pauses the current song, `[p]resume` to continue.""" + server = ctx.message.server + if not self.voice_connected(server): + await self.bot.say("Not voice connected in this server.") + return + + # We are connected somewhere + voice_client = self.voice_client(server) + + if not hasattr(voice_client, 'audio_player'): + await self.bot.say("Nothing playing, nothing to pause.") + elif voice_client.audio_player.is_playing(): + voice_client.audio_player.pause() + await self.bot.say("Paused.") + else: + await self.bot.say("Nothing playing, nothing to pause.") + + @commands.command(pass_context=True, no_pm=True) + async def play(self, ctx, *, url_or_search_terms): + """Plays a link / searches and play""" + url = url_or_search_terms + server = ctx.message.server + author = ctx.message.author + voice_channel = author.voice_channel + + # Checking if playing in current server + + if self.is_playing(server): + await ctx.invoke(self._queue, url=url) + return # Default to queue + + # Checking already connected, will join if not + + try: + self.has_connect_perm(author, server) + except AuthorNotConnected: + await self.bot.say("You must join a voice channel before I can" + " play anything.") + return + except UnauthorizedConnect: + await self.bot.say("I don't have permissions to join your" + " voice channel.") + return + except UnauthorizedSpeak: + await self.bot.say("I don't have permissions to speak in your" + " voice channel.") + return + except ChannelUserLimit: + await self.bot.say("Your voice channel is full.") + return + + if not self.voice_connected(server): + await self._join_voice_channel(voice_channel) + else: # We are connected but not to the right channel + if self.voice_client(server).channel != voice_channel: + await self._stop_and_disconnect(server) + await self._join_voice_channel(voice_channel) + + # If not playing, spawn a downloader if it doesn't exist and begin + # downloading the next song + + if self.currently_downloading(server): + await self.bot.say("I'm already downloading a file!") + return + + url = url.strip("<>") + + if self._match_any_url(url): + if not self._valid_playable_url(url): + await self.bot.say("That's not a valid URL.") + return + else: + url = url.replace("/", "/") + url = "[SEARCH:]" + url + + if "[SEARCH:]" not in url and "youtube" in url: + url = url.split("&")[0] # Temp fix for the &list issue + + self._stop_player(server) + self._clear_queue(server) + self._add_to_queue(server, url) + + @commands.command(pass_context=True, no_pm=True) + async def prev(self, ctx): + """Goes back to the last song.""" + # Current song is in NOW_PLAYING + server = ctx.message.server + + if self.is_playing(server): + curr_url = self._get_queue_nowplaying(server).webpage_url + last_url = None + if self._is_queue_playlist(server): + # need to reorder queue + try: + last_url = self.queue[server.id]["QUEUE"].pop() + except IndexError: + pass + + log.debug("prev on sid {}, curr_url {}".format(server.id, + curr_url)) + + self._addleft_to_queue(server, curr_url) + if last_url: + self._addleft_to_queue(server, last_url) + self._set_queue_nowplaying(server, None) + + self.voice_client(server).audio_player.stop() + + await self.bot.say("Going back 1 song.") + else: + await self.bot.say("Not playing anything on this server.") + + @commands.group(pass_context=True, no_pm=True) + async def playlist(self, ctx): + """Playlist management/control.""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + + @playlist.command(pass_context=True, no_pm=True, name="create") + async def playlist_create(self, ctx, name): + """Creates an empty playlist""" + server = ctx.message.server + author = ctx.message.author + if not self._valid_playlist_name(name) or len(name) > 25: + await self.bot.say("That playlist name is invalid. It must only" + " contain alpha-numeric characters or _.") + return + + # Returns a Playlist object + url = None + songlist = [] + playlist = self._make_playlist(author, url, songlist) + + playlist.name = name + playlist.server = server + + self._save_playlist(server, name, playlist) + await self.bot.say("Empty playlist '{}' saved.".format(name)) + + @playlist.command(pass_context=True, no_pm=True, name="add") + async def playlist_add(self, ctx, name, url): + """Add a YouTube or Soundcloud playlist.""" + server = ctx.message.server + author = ctx.message.author + if not self._valid_playlist_name(name) or len(name) > 25: + await self.bot.say("That playlist name is invalid. It must only" + " contain alpha-numeric characters or _.") + return + + if self._valid_playable_url(url): + try: + await self.bot.say("Enumerating song list... This could take" + " a few moments.") + songlist = await self._parse_playlist(url) + except InvalidPlaylist: + await self.bot.say("That playlist URL is invalid.") + return + + playlist = self._make_playlist(author, url, songlist) + # Returns a Playlist object + + playlist.name = name + playlist.server = server + + self._save_playlist(server, name, playlist) + await self.bot.say("Playlist '{}' saved. Tracks: {}".format( + name, len(songlist))) + else: + await self.bot.say("That URL is not a valid Soundcloud or YouTube" + " playlist link. If you think this is in error" + " please let us know and we'll get it" + " fixed ASAP.") + + @playlist.command(pass_context=True, no_pm=True, name="append") + async def playlist_append(self, ctx, name, url): + """Appends to a playlist.""" + author = ctx.message.author + server = ctx.message.server + if name not in self._list_playlists(server): + await self.bot.say("There is no playlist with that name.") + return + playlist = self._load_playlist( + server, name, local=self._playlist_exists_local(server, name)) + try: + playlist.append_song(author, url) + except UnauthorizedSave: + await self.bot.say("You're not the author of that playlist.") + except InvalidURL: + await self.bot.say("Invalid link.") + else: + await self.bot.say("Done.") + + @playlist.command(pass_context=True, no_pm=True, name="extend") + async def playlist_extend(self, ctx, playlist_url_or_name): + """Extends a playlist with a playlist link""" + # Need better wording ^ + await self.bot.say("Not implemented yet.") + + @playlist.command(pass_context=True, no_pm=True, name="list") + async def playlist_list(self, ctx): + """Lists all available playlists""" + server = ctx.message.server + playlists = ", ".join(self._list_playlists(server)) + if playlists: + playlists = "Available playlists:\n\n" + playlists + for page in pagify(playlists, delims=[" "]): + await self.bot.say(page) + else: + await self.bot.say("There are no playlists.") + + @playlist.command(pass_context=True, no_pm=True, name="queue") + async def playlist_queue(self, ctx, url): + """Adds a song to the playlist loop. + + Does NOT write to disk.""" + server = ctx.message.server + if not self.voice_connected(server): + await self.bot.say("Not voice connected in this server.") + return + + # We are connected somewhere + if server.id not in self.queue: + log.debug("Something went wrong, we're connected but have no" + " queue entry.") + raise VoiceNotConnected("Something went wrong, we have no internal" + " queue to modify. This should never" + " happen.") + + # We have a queue to modify + self._add_to_queue(server, url) + + await self.bot.say("Queued.") + + @playlist.command(pass_context=True, no_pm=True, name="remove") + async def playlist_remove(self, ctx, name): + """Deletes a saved playlist.""" + author = ctx.message.author + server = ctx.message.server + + if not self._valid_playlist_name(name): + await self.bot.say("The playlist's name contains invalid " + "characters.") + return + + if not self._playlist_exists(server, name): + await self.bot.say("Playlist not found.") + return + + playlist = self._load_playlist( + server, name, local=self._playlist_exists_local(server, name)) + + if not playlist.can_edit(author): + await self.bot.say("You do not have permissions to delete that playlist.") + return + + self._delete_playlist(server, name) + await self.bot.say("Playlist deleted.") + + + @playlist.command(pass_context=True, no_pm=True, name="start") + async def playlist_start(self, ctx, name): + """Plays a playlist.""" + server = ctx.message.server + author = ctx.message.author + voice_channel = ctx.message.author.voice_channel + + caller = inspect.currentframe().f_back.f_code.co_name + + if voice_channel is None: + await self.bot.say("You must be in a voice channel to start a" + " playlist.") + return + + if self._playlist_exists(server, name): + if not self.voice_connected(server): + try: + self.has_connect_perm(author, server) + except AuthorNotConnected: + await self.bot.say("You must join a voice channel before" + " I can play anything.") + return + except UnauthorizedConnect: + await self.bot.say("I don't have permissions to join your" + " voice channel.") + return + except UnauthorizedSpeak: + await self.bot.say("I don't have permissions to speak in" + " your voice channel.") + return + except ChannelUserLimit: + await self.bot.say("Your voice channel is full.") + return + else: + await self._join_voice_channel(voice_channel) + self._clear_queue(server) + playlist = self._load_playlist(server, name, + local=self._playlist_exists_local( + server, name)) + if caller == "playlist_start_mix": + shuffle(playlist.playlist) + + self._play_playlist(server, playlist) + await self.bot.say("Playlist queued.") + else: + await self.bot.say("That playlist does not exist.") + + @playlist.command(pass_context=True, no_pm=True, name="mix") + async def playlist_start_mix(self, ctx, name): + """Plays and mixes a playlist.""" + await self.playlist_start.callback(self, ctx, name) + + @commands.command(pass_context=True, no_pm=True, name="queue") + async def _queue(self, ctx, *, url=None): + """Queues a song to play next. Extended functionality in `[p]help` + + If you use `queue` when one song is playing, your new song will get + added to the song loop (if running). If you use `queue` when a + playlist is running, it will temporarily be played next and will + NOT stay in the playlist loop.""" + if url is None: + return await self._queue_list(ctx) + server = ctx.message.server + if not self.voice_connected(server): + await ctx.invoke(self.play, url_or_search_terms=url) + return + + # We are connected somewhere + if server.id not in self.queue: + log.debug("Something went wrong, we're connected but have no" + " queue entry.") + raise VoiceNotConnected("Something went wrong, we have no internal" + " queue to modify. This should never" + " happen.") + + url = url.strip("<>") + + if self._match_any_url(url): + if not self._valid_playable_url(url): + await self.bot.say("That's not a valid URL.") + return + else: + url = "[SEARCH:]" + url + + if "[SEARCH:]" not in url and "youtube" in url: + url = url.split("&")[0] # Temp fix for the &list issue + + # We have a queue to modify + if self.queue[server.id]["PLAYLIST"]: + log.debug("queueing to the temp_queue for sid {}".format( + server.id)) + self._add_to_temp_queue(server, url) + else: + log.debug("queueing to the actual queue for sid {}".format( + server.id)) + self._add_to_queue(server, url) + await self.bot.say("Queued.") + + async def _queue_list(self, ctx): + """Not a command, use `queue` with no args to call this.""" + server = ctx.message.server + if server.id not in self.queue: + await self.bot.say("Nothing playing on this server!") + return + elif len(self.queue[server.id]["QUEUE"]) == 0: + await self.bot.say("Nothing queued on this server.") + return + + msg = "" + + now_playing = self._get_queue_nowplaying(server) + + if now_playing is not None: + msg += "\n***Now playing:***\n{}\n".format(now_playing.title) + + queue_url_list = self._get_queue(server, 5) + tempqueue_url_list = self._get_queue_tempqueue(server, 5) + + await self.bot.say("Gathering information...") + + queue_song_list = await self._download_all(queue_url_list) + tempqueue_song_list = await self._download_all(tempqueue_url_list) + + song_info = [] + for num, song in enumerate(tempqueue_song_list, 1): + try: + song_info.append("{}. {.title}".format(num, song)) + except AttributeError: + song_info.append("{}. {.webpage_url}".format(num, song)) + + for num, song in enumerate(queue_song_list, len(song_info) + 1): + if num > 5: + break + try: + song_info.append("{}. {.title}".format(num, song)) + except AttributeError: + song_info.append("{}. {.webpage_url}".format(num, song)) + msg += "\n***Next up:***\n" + "\n".join(song_info) + + await self.bot.say(msg) + + @commands.group(pass_context=True, no_pm=True) + async def repeat(self, ctx): + """Toggles REPEAT""" + server = ctx.message.server + if ctx.invoked_subcommand is None: + if self.is_playing(server): + if self.queue[server.id]["REPEAT"]: + msg = "The queue is currently looping." + else: + msg = "The queue is currently not looping." + await self.bot.say(msg) + await self.bot.say( + "Do `{}repeat toggle` to change this.".format(ctx.prefix)) + else: + await self.bot.say("Play something to see this setting.") + + @repeat.command(pass_context=True, no_pm=True, name="toggle") + async def repeat_toggle(self, ctx): + """Flips repeat setting.""" + server = ctx.message.server + if not self.is_playing(server): + await self.bot.say("I don't have a repeat setting to flip." + " Try playing something first.") + return + + self._set_queue_repeat(server, not self.queue[server.id]["REPEAT"]) + repeat = self.queue[server.id]["REPEAT"] + if repeat: + await self.bot.say("Repeat toggled on.") + else: + await self.bot.say("Repeat toggled off.") + + @commands.command(pass_context=True, no_pm=True) + async def resume(self, ctx): + """Resumes a paused song or playlist""" + server = ctx.message.server + if not self.voice_connected(server): + await self.bot.say("Not voice connected in this server.") + return + + # We are connected somewhere + voice_client = self.voice_client(server) + + if not hasattr(voice_client, 'audio_player'): + await self.bot.say("Nothing paused, nothing to resume.") + elif not voice_client.audio_player.is_done() and \ + not voice_client.audio_player.is_playing(): + voice_client.audio_player.resume() + await self.bot.say("Resuming.") + else: + await self.bot.say("Nothing paused, nothing to resume.") + + @commands.command(pass_context=True, no_pm=True, name="shuffle") + async def _shuffle(self, ctx): + """Shuffles the current queue""" + server = ctx.message.server + if server.id not in self.queue: + await self.bot.say("I have nothing in queue to shuffle.") + return + + self._shuffle_queue(server) + self._shuffle_temp_queue(server) + + await self.bot.say("Queues shuffled.") + + @commands.command(pass_context=True, aliases=["next"], no_pm=True) + async def skip(self, ctx): + """Skips a song, using the set threshold if the requester isn't + a mod or admin. Mods, admins and bot owner are not counted in + the vote threshold.""" + msg = ctx.message + server = ctx.message.server + if self.is_playing(server): + vchan = server.me.voice_channel + vc = self.voice_client(server) + if msg.author.voice_channel == vchan: + if self.can_instaskip(msg.author): + vc.audio_player.stop() + if self._get_queue_repeat(server) is False: + self._set_queue_nowplaying(server, None) + await self.bot.say("Skipping...") + else: + if msg.author.id in self.skip_votes[server.id]: + self.skip_votes[server.id].remove(msg.author.id) + reply = "I removed your vote to skip." + else: + self.skip_votes[server.id].append(msg.author.id) + reply = "you voted to skip." + + num_votes = len(self.skip_votes[server.id]) + # Exclude bots and non-plebs + num_members = sum(not (m.bot or self.can_instaskip(m)) + for m in vchan.voice_members) + vote = int(100 * num_votes / num_members) + thresh = self.get_server_settings(server)["VOTE_THRESHOLD"] + + if vote >= thresh: + vc.audio_player.stop() + if self._get_queue_repeat(server) is False: + self._set_queue_nowplaying(server, None) + self.skip_votes[server.id] = [] + await self.bot.say("Vote threshold met. Skipping...") + return + else: + reply += " Votes: %d/%d" % (num_votes, num_members) + reply += " (%d%% out of %d%% needed)" % (vote, thresh) + await self.bot.reply(reply) + else: + await self.bot.say("You need to be in the voice channel to skip the music.") + else: + await self.bot.say("Can't skip if I'm not playing.") + + def can_instaskip(self, member): + server = member.server + + if not self.get_server_settings(server)["VOTE_ENABLED"]: + return True + + admin_role = settings.get_server_admin(server) + mod_role = settings.get_server_mod(server) + + is_owner = member.id == settings.owner + is_server_owner = member == server.owner + is_admin = discord.utils.get(member.roles, name=admin_role) is not None + is_mod = discord.utils.get(member.roles, name=mod_role) is not None + + + nonbots = sum(not m.bot for m in member.voice_channel.voice_members) + alone = nonbots <= 1 + + return is_owner or is_server_owner or is_admin or is_mod or alone + + @commands.command(pass_context=True, no_pm=True) + async def sing(self, ctx): + """Makes Red sing one of her songs""" + ids = ("zGTkAVsrfg8", "cGMWL8cOeAU", "vFrjMq4aL-g", "WROI5WYBU_A", + "41tIUr_ex3g", "f9O2Rjn1azc") + url = "https://www.youtube.com/watch?v={}".format(choice(ids)) + await ctx.invoke(self.play, url_or_search_terms=url) + + @commands.command(pass_context=True, no_pm=True) + async def song(self, ctx): + """Info about the current song.""" + server = ctx.message.server + if not self.is_playing(server): + await self.bot.say("I'm not playing on this server.") + return + + song = self._get_queue_nowplaying(server) + if song: + if not hasattr(song, 'creator'): + song.creator = None + if not hasattr(song, 'view_count'): + song.view_count = None + if not hasattr(song, 'uploader'): + song.uploader = None + if hasattr(song, 'duration'): + m, s = divmod(song.duration, 60) + h, m = divmod(m, 60) + if h: + dur = "{0}:{1:0>2}:{2:0>2}".format(h, m, s) + else: + dur = "{0}:{1:0>2}".format(m, s) + else: + dur = None + msg = ("\n**Title:** {}\n**Author:** {}\n**Uploader:** {}\n" + "**Views:** {}\n**Duration:** {}\n\n<{}>".format( + song.title, song.creator, song.uploader, + song.view_count, dur, song.webpage_url)) + await self.bot.say(msg.replace("**Author:** None\n", "") + .replace("**Views:** None\n", "") + .replace("**Uploader:** None\n", "") + .replace("**Duration:** None\n", "")) + else: + await self.bot.say("Darude - Sandstorm.") + + @commands.command(pass_context=True, no_pm=True) + async def stop(self, ctx): + """Stops a currently playing song or playlist. CLEARS QUEUE.""" + server = ctx.message.server + if self.is_playing(server): + if ctx.message.author.voice_channel == server.me.voice_channel: + if self.can_instaskip(ctx.message.author): + await self.bot.say('Stopping...') + self._stop(server) + else: + await self.bot.say("You can't stop music when there are other" + " people in the channel! Vote to skip" + " instead.") + else: + await self.bot.say("You need to be in the voice channel to stop the music.") + else: + await self.bot.say("Can't stop if I'm not playing.") + + @commands.command(name="yt", pass_context=True, no_pm=True) + async def yt_search(self, ctx, *, search_terms: str): + """Searches and plays a video from YouTube""" + await self.bot.say("Searching...") + await ctx.invoke(self.play, url_or_search_terms=search_terms) + + def is_playing(self, server): + if not self.voice_connected(server): + return False + if self.voice_client(server) is None: + return False + if not hasattr(self.voice_client(server), 'audio_player'): + return False + if self.voice_client(server).audio_player.is_done(): + return False + return True + + async def cache_manager(self): + while self == self.bot.get_cog("Audio"): + if self._cache_too_large(): + # Our cache is too big, dumping + log.debug("cache too large ({} > {}), dumping".format( + self._cache_size(), self._cache_max())) + self._dump_cache() + await asyncio.sleep(5) # No need to run this every half second + + async def cache_scheduler(self): + await asyncio.sleep(30) # Extra careful + + self.bot.loop.create_task(self.cache_manager()) + + def currently_downloading(self, server): + if server.id in self.downloaders: + if self.downloaders[server.id].is_alive(): + return True + return False + + async def disconnect_timer(self): + stop_times = {} + while self == self.bot.get_cog('Audio'): + for vc in self.bot.voice_clients: + server = vc.server + if not hasattr(vc, 'audio_player') and \ + (server not in stop_times or + stop_times[server] is None): + log.debug("putting sid {} in stop loop, no player".format( + server.id)) + stop_times[server] = int(time.time()) + + if hasattr(vc, 'audio_player'): + if vc.audio_player.is_done(): + if server not in stop_times or stop_times[server] is None: + log.debug("putting sid {} in stop loop".format(server.id)) + stop_times[server] = int(time.time()) + + noppl_disconnect = self.get_server_settings(server) + noppl_disconnect = noppl_disconnect.get("NOPPL_DISCONNECT", True) + if noppl_disconnect and len(vc.channel.voice_members) == 1: + if server not in stop_times or stop_times[server] is None: + log.debug("putting sid {} in stop loop".format(server.id)) + stop_times[server] = int(time.time()) + elif not vc.audio_player.is_done(): + stop_times[server] = None + + for server in stop_times: + if stop_times[server] and \ + int(time.time()) - stop_times[server] > 300: + # 5 min not playing to d/c + log.debug("dcing from sid {} after 300s".format(server.id)) + self._clear_queue(server) + await self._stop_and_disconnect(server) + stop_times[server] = None + await asyncio.sleep(5) + + def get_server_settings(self, server): + try: + sid = server.id + except: + sid = server + + if sid not in self.settings["SERVERS"]: + self.settings["SERVERS"][sid] = {} + ret = self.settings["SERVERS"][sid] + + # Not the cleanest way. Some refactoring is suggested if more settings + # have to be added + if "NOPPL_DISCONNECT" not in ret: + ret["NOPPL_DISCONNECT"] = True + + for setting in self.server_specific_setting_keys: + if setting not in ret: + # Add the default + ret[setting] = self.settings[setting] + if setting.lower() == "volume" and ret[setting] <= 1: + ret[setting] *= 100 + # ^This will make it so that only users with an outdated config will + # have their volume set * 100. In theory. + self.save_settings() + + return ret + + def has_connect_perm(self, author, server): + channel = author.voice_channel + + if channel: + is_admin = channel.permissions_for(server.me).administrator + if channel.user_limit == 0: + is_full = False + else: + is_full = len(channel.voice_members) >= channel.user_limit + + if channel is None: + raise AuthorNotConnected + elif channel.permissions_for(server.me).connect is False: + raise UnauthorizedConnect + elif channel.permissions_for(server.me).speak is False: + raise UnauthorizedSpeak + elif is_full and not is_admin: + raise ChannelUserLimit + else: + return True + return False + + async def queue_manager(self, sid): + """This function assumes that there's something in the queue for us to + play""" + server = self.bot.get_server(sid) + max_length = self.settings["MAX_LENGTH"] + + # This is a reference, or should be at least + temp_queue = self.queue[server.id]["TEMP_QUEUE"] + queue = self.queue[server.id]["QUEUE"] + repeat = self.queue[server.id]["REPEAT"] + last_song = self.queue[server.id]["NOW_PLAYING"] + + assert temp_queue is self.queue[server.id]["TEMP_QUEUE"] + assert queue is self.queue[server.id]["QUEUE"] + + # _play handles creating the voice_client and player for us + + if not self.is_playing(server): + log.debug("not playing anything on sid {}".format(server.id) + + ", attempting to start a new song.") + self.skip_votes[server.id] = [] + # Reset skip votes for each new song + if len(temp_queue) > 0: + # Fake queue for irdumb's temp playlist songs + log.debug("calling _play because temp_queue is non-empty") + try: + song = await self._play(sid, temp_queue.popleft()) + except MaximumLength: + return + elif len(queue) > 0: # We're in the normal queue + url = queue.popleft() + log.debug("calling _play on the normal queue") + try: + song = await self._play(sid, url) + except MaximumLength: + return + if repeat and last_song: + queue.append(last_song.webpage_url) + else: + song = None + self.queue[server.id]["NOW_PLAYING"] = song + log.debug("set now_playing for sid {}".format(server.id)) + self.bot.loop.create_task(self._update_bot_status()) + + elif server.id in self.downloaders: + # We're playing but we might be able to download a new song + curr_dl = self.downloaders.get(server.id) + if len(temp_queue) > 0: + next_dl = Downloader(temp_queue.peekleft(), + max_length) + elif len(queue) > 0: + next_dl = Downloader(queue.peekleft(), max_length) + else: + next_dl = None + + if next_dl is not None: + # Download next song + next_dl.start() + await self._download_next(server, curr_dl, next_dl) + + async def queue_scheduler(self): + while self == self.bot.get_cog('Audio'): + tasks = [] + queue = copy.deepcopy(self.queue) + for sid in queue: + if len(queue[sid]["QUEUE"]) == 0 and \ + len(queue[sid]["TEMP_QUEUE"]) == 0: + continue + # log.debug("scheduler found a non-empty queue" + # " for sid: {}".format(sid)) + tasks.append( + self.bot.loop.create_task(self.queue_manager(sid))) + completed = [t.done() for t in tasks] + while not all(completed): + completed = [t.done() for t in tasks] + await asyncio.sleep(0.5) + await asyncio.sleep(1) + + async def reload_monitor(self): + while self == self.bot.get_cog('Audio'): + await asyncio.sleep(0.5) + + for vc in self.bot.voice_clients: + try: + vc.audio_player.stop() + except: + pass + + def save_settings(self): + dataIO.save_json('data/audio/settings.json', self.settings) + + def set_server_setting(self, server, key, value): + if server.id not in self.settings["SERVERS"]: + self.settings["SERVERS"][server.id] = {} + self.settings["SERVERS"][server.id][key] = value + + def voice_client(self, server): + return self.bot.voice_client_in(server) + + def voice_connected(self, server): + if self.bot.is_voice_connected(server): + return True + return False + + async def voice_state_update(self, before, after): + server = after.server + # Member objects + if after.voice_channel != before.voice_channel: + try: + self.skip_votes[server.id].remove(after.id) + except (ValueError, KeyError): + pass + # Either the server ID or member ID already isn't in there + if after is None: + return + if server.id not in self.queue: + return + if after != server.me: + return + + # Member is the bot + + if before.voice_channel != after.voice_channel: + self._set_queue_channel(after.server, after.voice_channel) + + if before.mute != after.mute: + vc = self.voice_client(server) + if after.mute and vc.audio_player.is_playing(): + log.debug("Just got muted, pausing") + vc.audio_player.pause() + elif not after.mute and \ + (not vc.audio_player.is_playing() and + not vc.audio_player.is_done()): + log.debug("just got unmuted, resuming") + vc.audio_player.resume() + + def __unload(self): + for vc in self.bot.voice_clients: + self.bot.loop.create_task(vc.disconnect()) + + +def check_folders(): + folders = ("data/audio", "data/audio/cache", "data/audio/playlists", + "data/audio/localtracks", "data/audio/sfx") + for folder in folders: + if not os.path.exists(folder): + print("Creating " + folder + " folder...") + os.makedirs(folder) + + +def check_files(): + default = {"VOLUME": 50, "MAX_LENGTH": 3700, "VOTE_ENABLED": True, + "MAX_CACHE": 0, "SOUNDCLOUD_CLIENT_ID": None, + "TITLE_STATUS": True, "AVCONV": False, "VOTE_THRESHOLD": 50, + "SERVERS": {}} + settings_path = "data/audio/settings.json" + + if not os.path.isfile(settings_path): + print("Creating default audio settings.json...") + dataIO.save_json(settings_path, default) + else: # consistency check + try: + current = dataIO.load_json(settings_path) + except JSONDecodeError: + # settings.json keeps getting corrupted for unknown reasons. Let's + # try to keep it from making the cog load fail. + dataIO.save_json(settings_path, default) + current = dataIO.load_json(settings_path) + if current.keys() != default.keys(): + for key in default.keys(): + if key not in current.keys(): + current[key] = default[key] + print( + "Adding " + str(key) + " field to audio settings.json") + dataIO.save_json(settings_path, current) + +def verify_ffmpeg_avconv(): + try: + subprocess.call(["ffmpeg", "-version"], stdout=subprocess.DEVNULL) + except FileNotFoundError: + pass + else: + return "ffmpeg" + + try: + subprocess.call(["avconv", "-version"], stdout=subprocess.DEVNULL) + except FileNotFoundError: + return False + else: + return "avconv" + +def setup(bot): + check_folders() + check_files() + + if youtube_dl is None: + raise RuntimeError("You need to run `pip3 install youtube_dl`") + if opus is False: + raise RuntimeError( + "Your opus library's bitness must match your python installation's" + " bitness. They both must be either 32bit or 64bit.") + elif opus is None: + raise RuntimeError( + "You need to install ffmpeg and opus. See \"https://github.com/" + "Twentysix26/Red-DiscordBot/wiki/Requirements\"") + + player = verify_ffmpeg_avconv() + + if not player: + if os.name == "nt": + msg = "ffmpeg isn't installed" + else: + msg = "Neither ffmpeg nor avconv are installed" + raise RuntimeError( + "{}.\nConsult the guide for your operating system " + "and do ALL the steps in order.\n" + "https://twentysix26.github.io/Red-Docs/\n" + "".format(msg)) + + n = Audio(bot, player=player) # Praise 26 + bot.add_cog(n) + bot.add_listener(n.voice_state_update, 'on_voice_state_update') + bot.loop.create_task(n.queue_scheduler()) + bot.loop.create_task(n.disconnect_timer()) + bot.loop.create_task(n.reload_monitor()) + bot.loop.create_task(n.cache_scheduler()) diff --git a/RBXLegacyDiscordBot/cogs/customcom.py b/RBXLegacyDiscordBot/cogs/customcom.py new file mode 100644 index 0000000..ba6753b --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/customcom.py @@ -0,0 +1,200 @@ +from discord.ext import commands +from .utils.dataIO import dataIO +from .utils import checks +from .utils.chat_formatting import pagify, box +import os +import re + + +class CustomCommands: + """Custom commands + + Creates commands used to display text""" + + def __init__(self, bot): + self.bot = bot + self.file_path = "data/customcom/commands.json" + self.c_commands = dataIO.load_json(self.file_path) + + @commands.group(aliases=["cc"], pass_context=True, no_pm=True) + async def customcom(self, ctx): + """Custom commands management""" + if ctx.invoked_subcommand is None: + await self.bot.send_cmd_help(ctx) + + @customcom.command(name="add", pass_context=True) + @checks.mod_or_permissions(administrator=True) + async def cc_add(self, ctx, command : str, *, text): + """Adds a custom command + + Example: + [p]customcom add yourcommand Text you want + + CCs can be enhanced with arguments: + https://twentysix26.github.io/Red-Docs/red_guide_command_args/ + """ + server = ctx.message.server + command = command.lower() + if command in self.bot.commands: + await self.bot.say("That command is already a standard command.") + return + if server.id not in self.c_commands: + self.c_commands[server.id] = {} + cmdlist = self.c_commands[server.id] + if command not in cmdlist: + cmdlist[command] = text + self.c_commands[server.id] = cmdlist + dataIO.save_json(self.file_path, self.c_commands) + await self.bot.say("Custom command successfully added.") + else: + await self.bot.say("This command already exists. Use " + "`{}customcom edit` to edit it." + "".format(ctx.prefix)) + + @customcom.command(name="edit", pass_context=True) + @checks.mod_or_permissions(administrator=True) + async def cc_edit(self, ctx, command : str, *, text): + """Edits a custom command + + Example: + [p]customcom edit yourcommand Text you want + """ + server = ctx.message.server + command = command.lower() + if server.id in self.c_commands: + cmdlist = self.c_commands[server.id] + if command in cmdlist: + cmdlist[command] = text + self.c_commands[server.id] = cmdlist + dataIO.save_json(self.file_path, self.c_commands) + await self.bot.say("Custom command successfully edited.") + else: + await self.bot.say("That command doesn't exist. Use " + "`{}customcom add` to add it." + "".format(ctx.prefix)) + else: + await self.bot.say("There are no custom commands in this server." + " Use `{}customcom add` to start adding some." + "".format(ctx.prefix)) + + @customcom.command(name="delete", pass_context=True) + @checks.mod_or_permissions(administrator=True) + async def cc_delete(self, ctx, command : str): + """Deletes a custom command + + Example: + [p]customcom delete yourcommand""" + server = ctx.message.server + command = command.lower() + if server.id in self.c_commands: + cmdlist = self.c_commands[server.id] + if command in cmdlist: + cmdlist.pop(command, None) + self.c_commands[server.id] = cmdlist + dataIO.save_json(self.file_path, self.c_commands) + await self.bot.say("Custom command successfully deleted.") + else: + await self.bot.say("That command doesn't exist.") + else: + await self.bot.say("There are no custom commands in this server." + " Use `{}customcom add` to start adding some." + "".format(ctx.prefix)) + + @customcom.command(name="list", pass_context=True) + async def cc_list(self, ctx): + """Shows custom commands list""" + server = ctx.message.server + commands = self.c_commands.get(server.id, {}) + + if not commands: + await self.bot.say("There are no custom commands in this server." + " Use `{}customcom add` to start adding some." + "".format(ctx.prefix)) + return + + commands = ", ".join([ctx.prefix + c for c in sorted(commands)]) + commands = "Custom commands:\n\n" + commands + + if len(commands) < 1500: + await self.bot.say(box(commands)) + else: + for page in pagify(commands, delims=[" ", "\n"]): + await self.bot.whisper(box(page)) + + async def on_message(self, message): + if len(message.content) < 2 or message.channel.is_private: + return + + server = message.server + prefix = self.get_prefix(message) + + if not prefix: + return + + if server.id in self.c_commands and self.bot.user_allowed(message): + cmdlist = self.c_commands[server.id] + cmd = message.content[len(prefix):] + if cmd in cmdlist: + cmd = cmdlist[cmd] + cmd = self.format_cc(cmd, message) + await self.bot.send_message(message.channel, cmd) + elif cmd.lower() in cmdlist: + cmd = cmdlist[cmd.lower()] + cmd = self.format_cc(cmd, message) + await self.bot.send_message(message.channel, cmd) + + def get_prefix(self, message): + for p in self.bot.settings.get_prefixes(message.server): + if message.content.startswith(p): + return p + return False + + def format_cc(self, command, message): + results = re.findall("\{([^}]+)\}", command) + for result in results: + param = self.transform_parameter(result, message) + command = command.replace("{" + result + "}", param) + return command + + def transform_parameter(self, result, message): + """ + For security reasons only specific objects are allowed + Internals are ignored + """ + raw_result = "{" + result + "}" + objects = { + "message" : message, + "author" : message.author, + "channel" : message.channel, + "server" : message.server + } + if result in objects: + return str(objects[result]) + try: + first, second = result.split(".") + except ValueError: + return raw_result + if first in objects and not second.startswith("_"): + first = objects[first] + else: + return raw_result + return str(getattr(first, second, raw_result)) + + +def check_folders(): + if not os.path.exists("data/customcom"): + print("Creating data/customcom folder...") + os.makedirs("data/customcom") + + +def check_files(): + f = "data/customcom/commands.json" + if not dataIO.is_valid_json(f): + print("Creating empty commands.json...") + dataIO.save_json(f, {}) + + +def setup(bot): + check_folders() + check_files() + bot.add_cog(CustomCommands(bot)) diff --git a/RBXLegacyDiscordBot/cogs/downloader.py b/RBXLegacyDiscordBot/cogs/downloader.py new file mode 100644 index 0000000..0bc404a --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/downloader.py @@ -0,0 +1,693 @@ +from discord.ext import commands +from cogs.utils.dataIO import dataIO +from cogs.utils import checks +from cogs.utils.chat_formatting import pagify, box +from __main__ import send_cmd_help, set_cog +import os +from subprocess import run as sp_run, PIPE +import shutil +from asyncio import as_completed +from setuptools import distutils +import discord +from functools import partial +from concurrent.futures import ThreadPoolExecutor +from time import time +from importlib.util import find_spec +from copy import deepcopy + +NUM_THREADS = 4 +REPO_NONEX = 0x1 +REPO_CLONE = 0x2 +REPO_SAME = 0x4 +REPOS_LIST = "https://twentysix26.github.io/Red-Docs/red_cog_approved_repos/" + +DISCLAIMER = ("You're about to add a 3rd party repository. The creator of Red" + " and its community have no responsibility for any potential " + "damage that the content of 3rd party repositories might cause." + "\nBy typing 'I agree' you declare to have read and understand " + "the above message. This message won't be shown again until the" + " next reboot.") + + +class UpdateError(Exception): + pass + + +class CloningError(UpdateError): + pass + + +class RequirementFail(UpdateError): + pass + + +class Downloader: + """Cog downloader/installer.""" + + def __init__(self, bot): + self.bot = bot + self.disclaimer_accepted = False + self.path = os.path.join("data", "downloader") + self.file_path = os.path.join(self.path, "repos.json") + # {name:{url,cog1:{installed},cog1:{installed}}} + self.repos = dataIO.load_json(self.file_path) + self.executor = ThreadPoolExecutor(NUM_THREADS) + self._do_first_run() + + def save_repos(self): + dataIO.save_json(self.file_path, self.repos) + + @commands.group(pass_context=True) + @checks.is_owner() + async def cog(self, ctx): + """Additional cogs management""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + + @cog.group(pass_context=True) + async def repo(self, ctx): + """Repo management commands""" + if ctx.invoked_subcommand is None or \ + isinstance(ctx.invoked_subcommand, commands.Group): + await send_cmd_help(ctx) + return + + @repo.command(name="add", pass_context=True) + async def _repo_add(self, ctx, repo_name: str, repo_url: str): + """Adds repo to available repo lists + + Warning: Adding 3RD Party Repositories is at your own + Risk.""" + if not self.disclaimer_accepted: + await self.bot.say(DISCLAIMER) + answer = await self.bot.wait_for_message(timeout=30, + author=ctx.message.author) + if answer is None: + await self.bot.say('Not adding repo.') + return + elif "i agree" not in answer.content.lower(): + await self.bot.say('Not adding repo.') + return + else: + self.disclaimer_accepted = True + self.repos[repo_name] = {} + self.repos[repo_name]['url'] = repo_url + try: + self.update_repo(repo_name) + except CloningError: + await self.bot.say("That repository link doesn't seem to be " + "valid.") + del self.repos[repo_name] + return + self.populate_list(repo_name) + self.save_repos() + data = self.get_info_data(repo_name) + if data: + msg = data.get("INSTALL_MSG") + if msg: + await self.bot.say(msg[:2000]) + await self.bot.say("Repo '{}' added.".format(repo_name)) + + @repo.command(name="remove") + async def _repo_del(self, repo_name: str): + """Removes repo from repo list. COGS ARE NOT REMOVED.""" + def remove_readonly(func, path, excinfo): + os.chmod(path, 0o755) + func(path) + + if repo_name not in self.repos: + await self.bot.say("That repo doesn't exist.") + return + del self.repos[repo_name] + try: + shutil.rmtree(os.path.join(self.path, repo_name), onerror=remove_readonly) + except FileNotFoundError: + pass + self.save_repos() + await self.bot.say("Repo '{}' removed.".format(repo_name)) + + @cog.command(name="list") + async def _send_list(self, repo_name=None): + """Lists installable cogs + + Repositories list: + https://twentysix26.github.io/Red-Docs/red_cog_approved_repos/""" + retlist = [] + if repo_name and repo_name in self.repos: + msg = "Available cogs:\n" + for cog in sorted(self.repos[repo_name].keys()): + if 'url' == cog: + continue + data = self.get_info_data(repo_name, cog) + if data and data.get("HIDDEN") is True: + continue + if data: + retlist.append([cog, data.get("SHORT", "")]) + else: + retlist.append([cog, '']) + else: + if self.repos: + msg = "Available repos:\n" + for repo_name in sorted(self.repos.keys()): + data = self.get_info_data(repo_name) + if data: + retlist.append([repo_name, data.get("SHORT", "")]) + else: + retlist.append([repo_name, ""]) + else: + await self.bot.say("You haven't added a repository yet.\n" + "Start now! {}".format(REPOS_LIST)) + return + + col_width = max(len(row[0]) for row in retlist) + 2 + for row in retlist: + msg += "\t" + "".join(word.ljust(col_width) for word in row) + "\n" + msg += "\nRepositories list: {}".format(REPOS_LIST) + for page in pagify(msg, delims=['\n'], shorten_by=8): + await self.bot.say(box(page)) + + @cog.command() + async def info(self, repo_name: str, cog: str=None): + """Shows info about the specified cog""" + if cog is not None: + cogs = self.list_cogs(repo_name) + if cog in cogs: + data = self.get_info_data(repo_name, cog) + if data: + msg = "{} by {}\n\n".format(cog, data["AUTHOR"]) + msg += data["NAME"] + "\n\n" + data["DESCRIPTION"] + await self.bot.say(box(msg)) + else: + await self.bot.say("The specified cog has no info file.") + else: + await self.bot.say("That cog doesn't exist." + " Use cog list to see the full list.") + else: + data = self.get_info_data(repo_name) + if data is None: + await self.bot.say("That repo does not exist or the" + " information file is missing for that repo" + ".") + return + name = data.get("NAME", None) + name = repo_name if name is None else name + author = data.get("AUTHOR", "Unknown") + desc = data.get("DESCRIPTION", "") + msg = ("```{} by {}```\n\n{}".format(name, author, desc)) + await self.bot.say(msg) + + @cog.command(hidden=True) + async def search(self, *terms: str): + """Search installable cogs""" + pass # TO DO + + @cog.command(pass_context=True) + async def update(self, ctx): + """Updates cogs""" + + tasknum = 0 + num_repos = len(self.repos) + + min_dt = 0.5 + burst_inc = 0.1/(NUM_THREADS) + touch_n = tasknum + touch_t = time() + + def regulate(touch_t, touch_n): + dt = time() - touch_t + if dt + burst_inc*(touch_n) > min_dt: + touch_n = 0 + touch_t = time() + return True, touch_t, touch_n + return False, touch_t, touch_n + 1 + + tasks = [] + for r in self.repos: + task = partial(self.update_repo, r) + task = self.bot.loop.run_in_executor(self.executor, task) + tasks.append(task) + + base_msg = "Downloading updated cogs, please wait... " + status = ' %d/%d repos updated' % (tasknum, num_repos) + msg = await self.bot.say(base_msg + status) + + updated_cogs = [] + new_cogs = [] + deleted_cogs = [] + failed_cogs = [] + error_repos = {} + installed_updated_cogs = [] + + for f in as_completed(tasks): + tasknum += 1 + try: + name, updates, oldhash = await f + if updates: + if type(updates) is dict: + for k, l in updates.items(): + tl = [(name, c, oldhash) for c in l] + if k == 'A': + new_cogs.extend(tl) + elif k == 'D': + deleted_cogs.extend(tl) + elif k == 'M': + updated_cogs.extend(tl) + except UpdateError as e: + name, what = e.args + error_repos[name] = what + edit, touch_t, touch_n = regulate(touch_t, touch_n) + if edit: + status = ' %d/%d repos updated' % (tasknum, num_repos) + msg = await self._robust_edit(msg, base_msg + status) + status = 'done. ' + + for t in updated_cogs: + repo, cog, _ = t + if self.repos[repo][cog]['INSTALLED']: + try: + await self.install(repo, cog, + no_install_on_reqs_fail=False) + except RequirementFail: + failed_cogs.append(t) + else: + installed_updated_cogs.append(t) + + for t in updated_cogs.copy(): + if t in failed_cogs: + updated_cogs.remove(t) + + if not any(self.repos[repo][cog]['INSTALLED'] for + repo, cog, _ in updated_cogs): + status += ' No updates to apply. ' + + if new_cogs: + status += '\nNew cogs: ' \ + + ', '.join('%s/%s' % c[:2] for c in new_cogs) + '.' + if deleted_cogs: + status += '\nDeleted cogs: ' \ + + ', '.join('%s/%s' % c[:2] for c in deleted_cogs) + '.' + if updated_cogs: + status += '\nUpdated cogs: ' \ + + ', '.join('%s/%s' % c[:2] for c in updated_cogs) + '.' + if failed_cogs: + status += '\nCogs that got new requirements which have ' + \ + 'failed to install: ' + \ + ', '.join('%s/%s' % c[:2] for c in failed_cogs) + '.' + if error_repos: + status += '\nThe following repos failed to update: ' + for n, what in error_repos.items(): + status += '\n%s: %s' % (n, what) + + msg = await self._robust_edit(msg, base_msg + status) + + if not installed_updated_cogs: + return + + patchnote_lang = 'Prolog' + shorten_by = 8 + len(patchnote_lang) + for note in self.patch_notes_handler(installed_updated_cogs): + if note is None: + continue + for page in pagify(note, delims=['\n'], shorten_by=shorten_by): + await self.bot.say(box(page, patchnote_lang)) + + await self.bot.say("Cogs updated. Reload updated cogs? (yes/no)") + answer = await self.bot.wait_for_message(timeout=15, + author=ctx.message.author) + if answer is None: + await self.bot.say("Ok then, you can reload cogs with" + " `{}reload `".format(ctx.prefix)) + elif answer.content.lower().strip() == "yes": + registry = dataIO.load_json(os.path.join("data", "red", "cogs.json")) + update_list = [] + fail_list = [] + for repo, cog, _ in installed_updated_cogs: + if not registry.get('cogs.' + cog, False): + continue + try: + self.bot.unload_extension("cogs." + cog) + self.bot.load_extension("cogs." + cog) + update_list.append(cog) + except: + fail_list.append(cog) + msg = 'Done.' + if update_list: + msg += " The following cogs were reloaded: "\ + + ', '.join(update_list) + "\n" + if fail_list: + msg += " The following cogs failed to reload: "\ + + ', '.join(fail_list) + await self.bot.say(msg) + + else: + await self.bot.say("Ok then, you can reload cogs with" + " `{}reload `".format(ctx.prefix)) + + def patch_notes_handler(self, repo_cog_hash_pairs): + for repo, cog, oldhash in repo_cog_hash_pairs: + repo_path = os.path.join('data', 'downloader', repo) + cogfile = os.path.join(cog, cog + ".py") + cmd = ["git", "-C", repo_path, "log", "--relative-date", + "--reverse", oldhash + '..', cogfile + ] + try: + log = sp_run(cmd, stdout=PIPE).stdout.decode().strip() + yield self.format_patch(repo, cog, log) + except: + pass + + @cog.command(pass_context=True) + async def uninstall(self, ctx, repo_name, cog): + """Uninstalls a cog""" + if repo_name not in self.repos: + await self.bot.say("That repo doesn't exist.") + return + if cog not in self.repos[repo_name]: + await self.bot.say("That cog isn't available from that repo.") + return + set_cog("cogs." + cog, False) + self.repos[repo_name][cog]['INSTALLED'] = False + self.save_repos() + os.remove(os.path.join("cogs", cog + ".py")) + owner = self.bot.get_cog('Owner') + await owner.unload.callback(owner, cog_name=cog) + await self.bot.say("Cog successfully uninstalled.") + + @cog.command(name="install", pass_context=True) + async def _install(self, ctx, repo_name: str, cog: str): + """Installs specified cog""" + if repo_name not in self.repos: + await self.bot.say("That repo doesn't exist.") + return + if cog not in self.repos[repo_name]: + await self.bot.say("That cog isn't available from that repo.") + return + data = self.get_info_data(repo_name, cog) + try: + install_cog = await self.install(repo_name, cog, notify_reqs=True) + except RequirementFail: + await self.bot.say("That cog has requirements that I could not " + "install. Check the console for more " + "informations.") + return + if data is not None: + install_msg = data.get("INSTALL_MSG", None) + if install_msg: + await self.bot.say(install_msg[:2000]) + if install_cog: + await self.bot.say("Installation completed. Load it now? (yes/no)") + answer = await self.bot.wait_for_message(timeout=15, + author=ctx.message.author) + if answer is None: + await self.bot.say("Ok then, you can load it with" + " `{}load {}`".format(ctx.prefix, cog)) + elif answer.content.lower().strip() == "yes": + set_cog("cogs." + cog, True) + owner = self.bot.get_cog('Owner') + await owner.load.callback(owner, cog_name=cog) + else: + await self.bot.say("Ok then, you can load it with" + " `{}load {}`".format(ctx.prefix, cog)) + elif install_cog is False: + await self.bot.say("Invalid cog. Installation aborted.") + else: + await self.bot.say("That cog doesn't exist. Use cog list to see" + " the full list.") + + async def install(self, repo_name, cog, *, notify_reqs=False, + no_install_on_reqs_fail=True): + # 'no_install_on_reqs_fail' will make the cog get installed anyway + # on requirements installation fail. This is necessary because due to + # how 'cog update' works right now, the user would have no way to + # reupdate the cog if the update fails, since 'cog update' only + # updates the cogs that get a new commit. + # This is not a great way to deal with the problem and a cog update + # rework would probably be the best course of action. + reqs_failed = False + if cog.endswith('.py'): + cog = cog[:-3] + + path = self.repos[repo_name][cog]['file'] + cog_folder_path = self.repos[repo_name][cog]['folder'] + cog_data_path = os.path.join(cog_folder_path, 'data') + data = self.get_info_data(repo_name, cog) + if data is not None: + requirements = data.get("REQUIREMENTS", []) + + requirements = [r for r in requirements + if not self.is_lib_installed(r)] + + if requirements and notify_reqs: + await self.bot.say("Installing cog's requirements...") + + for requirement in requirements: + if not self.is_lib_installed(requirement): + success = await self.bot.pip_install(requirement) + if not success: + if no_install_on_reqs_fail: + raise RequirementFail() + else: + reqs_failed = True + + to_path = os.path.join("cogs", cog + ".py") + + print("Copying {}...".format(cog)) + shutil.copy(path, to_path) + + if os.path.exists(cog_data_path): + print("Copying {}'s data folder...".format(cog)) + distutils.dir_util.copy_tree(cog_data_path, + os.path.join('data', cog)) + self.repos[repo_name][cog]['INSTALLED'] = True + self.save_repos() + if not reqs_failed: + return True + else: + raise RequirementFail() + + def get_info_data(self, repo_name, cog=None): + if cog is not None: + cogs = self.list_cogs(repo_name) + if cog in cogs: + info_file = os.path.join(cogs[cog].get('folder'), "info.json") + if os.path.isfile(info_file): + try: + data = dataIO.load_json(info_file) + except: + return None + return data + else: + repo_info = os.path.join(self.path, repo_name, 'info.json') + if os.path.isfile(repo_info): + try: + data = dataIO.load_json(repo_info) + return data + except: + return None + return None + + def list_cogs(self, repo_name): + valid_cogs = {} + + repo_path = os.path.join(self.path, repo_name) + folders = [f for f in os.listdir(repo_path) + if os.path.isdir(os.path.join(repo_path, f))] + legacy_path = os.path.join(repo_path, "cogs") + legacy_folders = [] + if os.path.exists(legacy_path): + for f in os.listdir(legacy_path): + if os.path.isdir(os.path.join(legacy_path, f)): + legacy_folders.append(os.path.join("cogs", f)) + + folders = folders + legacy_folders + + for f in folders: + cog_folder_path = os.path.join(self.path, repo_name, f) + cog_folder = os.path.basename(cog_folder_path) + for cog in os.listdir(cog_folder_path): + cog_path = os.path.join(cog_folder_path, cog) + if os.path.isfile(cog_path) and cog_folder == cog[:-3]: + valid_cogs[cog[:-3]] = {'folder': cog_folder_path, + 'file': cog_path} + return valid_cogs + + def get_dir_name(self, url): + splitted = url.split("/") + git_name = splitted[-1] + return git_name[:-4] + + def is_lib_installed(self, name): + return bool(find_spec(name)) + + def _do_first_run(self): + save = False + repos_copy = deepcopy(self.repos) + + # Issue 725 + for repo in repos_copy: + for cog in repos_copy[repo]: + cog_data = repos_copy[repo][cog] + if isinstance(cog_data, str): # ... url field + continue + for k, v in cog_data.items(): + if k in ("file", "folder"): + repos_copy[repo][cog][k] = os.path.normpath(cog_data[k]) + + if self.repos != repos_copy: + self.repos = repos_copy + save = True + + invalid = [] + + for repo in self.repos: + broken = 'url' in self.repos[repo] and len(self.repos[repo]) == 1 + if broken: + save = True + try: + self.update_repo(repo) + self.populate_list(repo) + except CloningError: + invalid.append(repo) + continue + except Exception as e: + print(e) # TODO: Proper logging + continue + + for repo in invalid: + del self.repos[repo] + + if save: + self.save_repos() + + def populate_list(self, name): + valid_cogs = self.list_cogs(name) + new = set(valid_cogs.keys()) + old = set(self.repos[name].keys()) + for cog in new - old: + self.repos[name][cog] = valid_cogs.get(cog, {}) + self.repos[name][cog]['INSTALLED'] = False + for cog in new & old: + self.repos[name][cog].update(valid_cogs[cog]) + for cog in old - new: + if cog != 'url': + del self.repos[name][cog] + + def update_repo(self, name): + + def run(*args, **kwargs): + env = os.environ.copy() + env['GIT_TERMINAL_PROMPT'] = '0' + kwargs['env'] = env + return sp_run(*args, **kwargs) + + try: + dd = self.path + if name not in self.repos: + raise UpdateError("Repo does not exist in data, wtf") + folder = os.path.join(dd, name) + # Make sure we don't git reset the Red folder on accident + if not os.path.exists(os.path.join(folder, '.git')): + #if os.path.exists(folder): + #shutil.rmtree(folder) + url = self.repos[name].get('url') + if not url: + raise UpdateError("Need to clone but no URL set") + branch = None + if "@" in url: # Specific branch + url, branch = url.rsplit("@", maxsplit=1) + if branch is None: + p = run(["git", "clone", url, folder]) + else: + p = run(["git", "clone", "-b", branch, url, folder]) + if p.returncode != 0: + raise CloningError() + self.populate_list(name) + return name, REPO_CLONE, None + else: + rpbcmd = ["git", "-C", folder, "rev-parse", "--abbrev-ref", "HEAD"] + p = run(rpbcmd, stdout=PIPE) + branch = p.stdout.decode().strip() + + rpcmd = ["git", "-C", folder, "rev-parse", branch] + p = run(["git", "-C", folder, "reset", "--hard", + "origin/%s" % branch, "-q"]) + if p.returncode != 0: + raise UpdateError("Error resetting to origin/%s" % branch) + p = run(rpcmd, stdout=PIPE) + if p.returncode != 0: + raise UpdateError("Unable to determine old commit hash") + oldhash = p.stdout.decode().strip() + p = run(["git", "-C", folder, "pull", "-q", "--ff-only"]) + if p.returncode != 0: + raise UpdateError("Error pulling updates") + p = run(rpcmd, stdout=PIPE) + if p.returncode != 0: + raise UpdateError("Unable to determine new commit hash") + newhash = p.stdout.decode().strip() + if oldhash == newhash: + return name, REPO_SAME, None + else: + self.populate_list(name) + self.save_repos() + ret = {} + cmd = ['git', '-C', folder, 'diff', '--no-commit-id', + '--name-status', oldhash, newhash] + p = run(cmd, stdout=PIPE) + + if p.returncode != 0: + raise UpdateError("Error in git diff") + + changed = p.stdout.strip().decode().split('\n') + + for f in changed: + if not f.endswith('.py'): + continue + + status, _, cogpath = f.partition('\t') + cogname = os.path.split(cogpath)[-1][:-3] # strip .py + if status not in ret: + ret[status] = [] + ret[status].append(cogname) + + return name, ret, oldhash + + except CloningError as e: + raise CloningError(name, *e.args) from None + except UpdateError as e: + raise UpdateError(name, *e.args) from None + + async def _robust_edit(self, msg, text): + try: + msg = await self.bot.edit_message(msg, text) + except discord.errors.NotFound: + msg = await self.bot.send_message(msg.channel, text) + except: + raise + return msg + + @staticmethod + def format_patch(repo, cog, log): + header = "Patch Notes for %s/%s" % (repo, cog) + line = "=" * len(header) + if log: + return '\n'.join((header, line, log)) + + +def check_folders(): + if not os.path.exists(os.path.join("data", "downloader")): + print('Making repo downloads folder...') + os.mkdir(os.path.join("data", "downloader")) + + +def check_files(): + f = os.path.join("data", "downloader", "repos.json") + if not dataIO.is_valid_json(f): + print("Creating default data/downloader/repos.json") + dataIO.save_json(f, {}) + + +def setup(bot): + check_folders() + check_files() + n = Downloader(bot) + bot.add_cog(n) diff --git a/RBXLegacyDiscordBot/cogs/economy.py b/RBXLegacyDiscordBot/cogs/economy.py new file mode 100644 index 0000000..8c28009 --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/economy.py @@ -0,0 +1,736 @@ +import discord +from discord.ext import commands +from cogs.utils.dataIO import dataIO +from collections import namedtuple, defaultdict, deque +from datetime import datetime +from copy import deepcopy +from .utils import checks +from cogs.utils.chat_formatting import pagify, box +from enum import Enum +from __main__ import send_cmd_help +import os +import time +import logging +import random + +default_settings = {"PAYDAY_TIME": 300, "PAYDAY_CREDITS": 120, + "SLOT_MIN": 5, "SLOT_MAX": 100, "SLOT_TIME": 0, + "REGISTER_CREDITS": 0} + + +class EconomyError(Exception): + pass + + +class OnCooldown(EconomyError): + pass + + +class InvalidBid(EconomyError): + pass + + +class BankError(Exception): + pass + + +class AccountAlreadyExists(BankError): + pass + + +class NoAccount(BankError): + pass + + +class InsufficientBalance(BankError): + pass + + +class NegativeValue(BankError): + pass + + +class SameSenderAndReceiver(BankError): + pass + + +NUM_ENC = "\N{COMBINING ENCLOSING KEYCAP}" + + +class SMReel(Enum): + cherries = "\N{CHERRIES}" + cookie = "\N{COOKIE}" + two = "\N{DIGIT TWO}" + NUM_ENC + flc = "\N{FOUR LEAF CLOVER}" + cyclone = "\N{CYCLONE}" + sunflower = "\N{SUNFLOWER}" + six = "\N{DIGIT SIX}" + NUM_ENC + mushroom = "\N{MUSHROOM}" + heart = "\N{HEAVY BLACK HEART}" + snowflake = "\N{SNOWFLAKE}" + +PAYOUTS = { + (SMReel.two, SMReel.two, SMReel.six) : { + "payout" : lambda x: x * 2500 + x, + "phrase" : "JACKPOT! 226! Your bid has been multiplied * 2500!" + }, + (SMReel.flc, SMReel.flc, SMReel.flc) : { + "payout" : lambda x: x + 1000, + "phrase" : "4LC! +1000!" + }, + (SMReel.cherries, SMReel.cherries, SMReel.cherries) : { + "payout" : lambda x: x + 800, + "phrase" : "Three cherries! +800!" + }, + (SMReel.two, SMReel.six) : { + "payout" : lambda x: x * 4 + x, + "phrase" : "2 6! Your bid has been multiplied * 4!" + }, + (SMReel.cherries, SMReel.cherries) : { + "payout" : lambda x: x * 3 + x, + "phrase" : "Two cherries! Your bid has been multiplied * 3!" + }, + "3 symbols" : { + "payout" : lambda x: x + 500, + "phrase" : "Three symbols! +500!" + }, + "2 symbols" : { + "payout" : lambda x: x * 2 + x, + "phrase" : "Two consecutive symbols! Your bid has been multiplied * 2!" + }, +} + +SLOT_PAYOUTS_MSG = ("Slot machine payouts:\n" + "{two.value} {two.value} {six.value} Bet * 2500\n" + "{flc.value} {flc.value} {flc.value} +1000\n" + "{cherries.value} {cherries.value} {cherries.value} +800\n" + "{two.value} {six.value} Bet * 4\n" + "{cherries.value} {cherries.value} Bet * 3\n\n" + "Three symbols: +500\n" + "Two symbols: Bet * 2".format(**SMReel.__dict__)) + + +class Bank: + + def __init__(self, bot, file_path): + self.accounts = dataIO.load_json(file_path) + self.bot = bot + + def create_account(self, user, *, initial_balance=0): + server = user.server + if not self.account_exists(user): + if server.id not in self.accounts: + self.accounts[server.id] = {} + if user.id in self.accounts: # Legacy account + balance = self.accounts[user.id]["balance"] + else: + balance = initial_balance + timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") + account = {"name": user.name, + "balance": balance, + "created_at": timestamp + } + self.accounts[server.id][user.id] = account + self._save_bank() + return self.get_account(user) + else: + raise AccountAlreadyExists() + + def account_exists(self, user): + try: + self._get_account(user) + except NoAccount: + return False + return True + + def withdraw_credits(self, user, amount): + server = user.server + + if amount < 0: + raise NegativeValue() + + account = self._get_account(user) + if account["balance"] >= amount: + account["balance"] -= amount + self.accounts[server.id][user.id] = account + self._save_bank() + else: + raise InsufficientBalance() + + def deposit_credits(self, user, amount): + server = user.server + if amount < 0: + raise NegativeValue() + account = self._get_account(user) + account["balance"] += amount + self.accounts[server.id][user.id] = account + self._save_bank() + + def set_credits(self, user, amount): + server = user.server + if amount < 0: + raise NegativeValue() + account = self._get_account(user) + account["balance"] = amount + self.accounts[server.id][user.id] = account + self._save_bank() + + def transfer_credits(self, sender, receiver, amount): + if amount < 0: + raise NegativeValue() + if sender is receiver: + raise SameSenderAndReceiver() + if self.account_exists(sender) and self.account_exists(receiver): + sender_acc = self._get_account(sender) + if sender_acc["balance"] < amount: + raise InsufficientBalance() + self.withdraw_credits(sender, amount) + self.deposit_credits(receiver, amount) + else: + raise NoAccount() + + def can_spend(self, user, amount): + account = self._get_account(user) + if account["balance"] >= amount: + return True + else: + return False + + def wipe_bank(self, server): + self.accounts[server.id] = {} + self._save_bank() + + def get_server_accounts(self, server): + if server.id in self.accounts: + raw_server_accounts = deepcopy(self.accounts[server.id]) + accounts = [] + for k, v in raw_server_accounts.items(): + v["id"] = k + v["server"] = server + acc = self._create_account_obj(v) + accounts.append(acc) + return accounts + else: + return [] + + def get_all_accounts(self): + accounts = [] + for server_id, v in self.accounts.items(): + server = self.bot.get_server(server_id) + if server is None: + # Servers that have since been left will be ignored + # Same for users_id from the old bank format + continue + raw_server_accounts = deepcopy(self.accounts[server.id]) + for k, v in raw_server_accounts.items(): + v["id"] = k + v["server"] = server + acc = self._create_account_obj(v) + accounts.append(acc) + return accounts + + def get_balance(self, user): + account = self._get_account(user) + return account["balance"] + + def get_account(self, user): + acc = self._get_account(user) + acc["id"] = user.id + acc["server"] = user.server + return self._create_account_obj(acc) + + def _create_account_obj(self, account): + account["member"] = account["server"].get_member(account["id"]) + account["created_at"] = datetime.strptime(account["created_at"], + "%Y-%m-%d %H:%M:%S") + Account = namedtuple("Account", "id name balance " + "created_at server member") + return Account(**account) + + def _save_bank(self): + dataIO.save_json("data/economy/bank.json", self.accounts) + + def _get_account(self, user): + server = user.server + try: + return deepcopy(self.accounts[server.id][user.id]) + except KeyError: + raise NoAccount() + + +class SetParser: + def __init__(self, argument): + allowed = ("+", "-") + if argument and argument[0] in allowed: + try: + self.sum = int(argument) + except: + raise + if self.sum < 0: + self.operation = "withdraw" + elif self.sum > 0: + self.operation = "deposit" + else: + raise + self.sum = abs(self.sum) + elif argument.isdigit(): + self.sum = int(argument) + self.operation = "set" + else: + raise + + +class Economy: + """Economy + + Get rich and have fun with imaginary currency!""" + + def __init__(self, bot): + global default_settings + self.bot = bot + self.bank = Bank(bot, "data/economy/bank.json") + self.file_path = "data/economy/settings.json" + self.settings = dataIO.load_json(self.file_path) + if "PAYDAY_TIME" in self.settings: # old format + default_settings = self.settings + self.settings = {} + self.settings = defaultdict(lambda: default_settings, self.settings) + self.payday_register = defaultdict(dict) + self.slot_register = defaultdict(dict) + + @commands.group(name="bank", pass_context=True) + async def _bank(self, ctx): + """Bank operations""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + + @_bank.command(pass_context=True, no_pm=True) + async def register(self, ctx): + """Registers an account at the Twentysix bank""" + settings = self.settings[ctx.message.server.id] + author = ctx.message.author + credits = 0 + if ctx.message.server.id in self.settings: + credits = settings.get("REGISTER_CREDITS", 0) + try: + account = self.bank.create_account(author, initial_balance=credits) + await self.bot.say("{} Account opened. Current balance: {}" + "".format(author.mention, account.balance)) + except AccountAlreadyExists: + await self.bot.say("{} You already have an account at the" + " Twentysix bank.".format(author.mention)) + + @_bank.command(pass_context=True) + async def balance(self, ctx, user: discord.Member=None): + """Shows balance of user. + + Defaults to yours.""" + if not user: + user = ctx.message.author + try: + await self.bot.say("{} Your balance is: {}".format( + user.mention, self.bank.get_balance(user))) + except NoAccount: + await self.bot.say("{} You don't have an account at the" + " Twentysix bank. Type `{}bank register`" + " to open one.".format(user.mention, + ctx.prefix)) + else: + try: + await self.bot.say("{}'s balance is {}".format( + user.name, self.bank.get_balance(user))) + except NoAccount: + await self.bot.say("That user has no bank account.") + + @_bank.command(pass_context=True) + async def transfer(self, ctx, user: discord.Member, sum: int): + """Transfer credits to other users""" + author = ctx.message.author + try: + self.bank.transfer_credits(author, user, sum) + logger.info("{}({}) transferred {} credits to {}({})".format( + author.name, author.id, sum, user.name, user.id)) + await self.bot.say("{} credits have been transferred to {}'s" + " account.".format(sum, user.name)) + except NegativeValue: + await self.bot.say("You need to transfer at least 1 credit.") + except SameSenderAndReceiver: + await self.bot.say("You can't transfer credits to yourself.") + except InsufficientBalance: + await self.bot.say("You don't have that sum in your bank account.") + except NoAccount: + await self.bot.say("That user has no bank account.") + + @_bank.command(name="set", pass_context=True) + @checks.admin_or_permissions(manage_server=True) + async def _set(self, ctx, user: discord.Member, credits: SetParser): + """Sets credits of user's bank account. See help for more operations + + Passing positive and negative values will add/remove credits instead + + Examples: + bank set @Twentysix 26 - Sets 26 credits + bank set @Twentysix +2 - Adds 2 credits + bank set @Twentysix -6 - Removes 6 credits""" + author = ctx.message.author + try: + if credits.operation == "deposit": + self.bank.deposit_credits(user, credits.sum) + logger.info("{}({}) added {} credits to {} ({})".format( + author.name, author.id, credits.sum, user.name, user.id)) + await self.bot.say("{} credits have been added to {}" + "".format(credits.sum, user.name)) + elif credits.operation == "withdraw": + self.bank.withdraw_credits(user, credits.sum) + logger.info("{}({}) removed {} credits to {} ({})".format( + author.name, author.id, credits.sum, user.name, user.id)) + await self.bot.say("{} credits have been withdrawn from {}" + "".format(credits.sum, user.name)) + elif credits.operation == "set": + self.bank.set_credits(user, credits.sum) + logger.info("{}({}) set {} credits to {} ({})" + "".format(author.name, author.id, credits.sum, + user.name, user.id)) + await self.bot.say("{}'s credits have been set to {}".format( + user.name, credits.sum)) + except InsufficientBalance: + await self.bot.say("User doesn't have enough credits.") + except NoAccount: + await self.bot.say("User has no bank account.") + + @_bank.command(pass_context=True, no_pm=True) + @checks.serverowner_or_permissions(administrator=True) + async def reset(self, ctx, confirmation: bool=False): + """Deletes all server's bank accounts""" + if confirmation is False: + await self.bot.say("This will delete all bank accounts on " + "this server.\nIf you're sure, type " + "{}bank reset yes".format(ctx.prefix)) + else: + self.bank.wipe_bank(ctx.message.server) + await self.bot.say("All bank accounts of this server have been " + "deleted.") + + @commands.command(pass_context=True, no_pm=True) + async def payday(self, ctx): # TODO + """Get some free credits""" + author = ctx.message.author + server = author.server + id = author.id + if self.bank.account_exists(author): + if id in self.payday_register[server.id]: + seconds = abs(self.payday_register[server.id][ + id] - int(time.perf_counter())) + if seconds >= self.settings[server.id]["PAYDAY_TIME"]: + self.bank.deposit_credits(author, self.settings[ + server.id]["PAYDAY_CREDITS"]) + self.payday_register[server.id][ + id] = int(time.perf_counter()) + await self.bot.say( + "{} Here, take some credits. Enjoy! (+{}" + " credits!)".format( + author.mention, + str(self.settings[server.id]["PAYDAY_CREDITS"]))) + else: + dtime = self.display_time( + self.settings[server.id]["PAYDAY_TIME"] - seconds) + await self.bot.say( + "{} Too soon. For your next payday you have to" + " wait {}.".format(author.mention, dtime)) + else: + self.payday_register[server.id][id] = int(time.perf_counter()) + self.bank.deposit_credits(author, self.settings[ + server.id]["PAYDAY_CREDITS"]) + await self.bot.say( + "{} Here, take some credits. Enjoy! (+{} credits!)".format( + author.mention, + str(self.settings[server.id]["PAYDAY_CREDITS"]))) + else: + await self.bot.say("{} You need an account to receive credits." + " Type `{}bank register` to open one.".format( + author.mention, ctx.prefix)) + + @commands.group(pass_context=True) + async def leaderboard(self, ctx): + """Server / global leaderboard + + Defaults to server""" + if ctx.invoked_subcommand is None: + await ctx.invoke(self._server_leaderboard) + + @leaderboard.command(name="server", pass_context=True) + async def _server_leaderboard(self, ctx, top: int=10): + """Prints out the server's leaderboard + + Defaults to top 10""" + # Originally coded by Airenkun - edited by irdumb + server = ctx.message.server + if top < 1: + top = 10 + bank_sorted = sorted(self.bank.get_server_accounts(server), + key=lambda x: x.balance, reverse=True) + bank_sorted = [a for a in bank_sorted if a.member] # exclude users who left + if len(bank_sorted) < top: + top = len(bank_sorted) + topten = bank_sorted[:top] + highscore = "" + place = 1 + for acc in topten: + highscore += str(place).ljust(len(str(top)) + 1) + highscore += (str(acc.member.display_name) + " ").ljust(23 - len(str(acc.balance))) + highscore += str(acc.balance) + "\n" + place += 1 + if highscore != "": + for page in pagify(highscore, shorten_by=12): + await self.bot.say(box(page, lang="py")) + else: + await self.bot.say("There are no accounts in the bank.") + + @leaderboard.command(name="global") + async def _global_leaderboard(self, top: int=10): + """Prints out the global leaderboard + + Defaults to top 10""" + if top < 1: + top = 10 + bank_sorted = sorted(self.bank.get_all_accounts(), + key=lambda x: x.balance, reverse=True) + bank_sorted = [a for a in bank_sorted if a.member] # exclude users who left + unique_accounts = [] + for acc in bank_sorted: + if not self.already_in_list(unique_accounts, acc): + unique_accounts.append(acc) + if len(unique_accounts) < top: + top = len(unique_accounts) + topten = unique_accounts[:top] + highscore = "" + place = 1 + for acc in topten: + highscore += str(place).ljust(len(str(top)) + 1) + highscore += ("{} |{}| ".format(acc.member, acc.server) + ).ljust(23 - len(str(acc.balance))) + highscore += str(acc.balance) + "\n" + place += 1 + if highscore != "": + for page in pagify(highscore, shorten_by=12): + await self.bot.say(box(page, lang="py")) + else: + await self.bot.say("There are no accounts in the bank.") + + def already_in_list(self, accounts, user): + for acc in accounts: + if user.id == acc.id: + return True + return False + + @commands.command() + async def payouts(self): + """Shows slot machine payouts""" + await self.bot.whisper(SLOT_PAYOUTS_MSG) + + @commands.command(pass_context=True, no_pm=True) + async def slot(self, ctx, bid: int): + """Play the slot machine""" + author = ctx.message.author + server = author.server + settings = self.settings[server.id] + valid_bid = settings["SLOT_MIN"] <= bid and bid <= settings["SLOT_MAX"] + slot_time = settings["SLOT_TIME"] + last_slot = self.slot_register.get(author.id) + now = datetime.utcnow() + try: + if last_slot: + if (now - last_slot).seconds < slot_time: + raise OnCooldown() + if not valid_bid: + raise InvalidBid() + if not self.bank.can_spend(author, bid): + raise InsufficientBalance + await self.slot_machine(author, bid) + except NoAccount: + await self.bot.say("{} You need an account to use the slot " + "machine. Type `{}bank register` to open one." + "".format(author.mention, ctx.prefix)) + except InsufficientBalance: + await self.bot.say("{} You need an account with enough funds to " + "play the slot machine.".format(author.mention)) + except OnCooldown: + await self.bot.say("Slot machine is still cooling off! Wait {} " + "seconds between each pull".format(slot_time)) + except InvalidBid: + await self.bot.say("Bid must be between {} and {}." + "".format(settings["SLOT_MIN"], + settings["SLOT_MAX"])) + + async def slot_machine(self, author, bid): + default_reel = deque(SMReel) + reels = [] + self.slot_register[author.id] = datetime.utcnow() + for i in range(3): + default_reel.rotate(random.randint(-999, 999)) # weeeeee + new_reel = deque(default_reel, maxlen=3) # we need only 3 symbols + reels.append(new_reel) # for each reel + rows = ((reels[0][0], reels[1][0], reels[2][0]), + (reels[0][1], reels[1][1], reels[2][1]), + (reels[0][2], reels[1][2], reels[2][2])) + + slot = "~~\n~~" # Mobile friendly + for i, row in enumerate(rows): # Let's build the slot to show + sign = " " + if i == 1: + sign = ">" + slot += "{}{} {} {}\n".format(sign, *[c.value for c in row]) + + payout = PAYOUTS.get(rows[1]) + if not payout: + # Checks for two-consecutive-symbols special rewards + payout = PAYOUTS.get((rows[1][0], rows[1][1]), + PAYOUTS.get((rows[1][1], rows[1][2])) + ) + if not payout: + # Still nothing. Let's check for 3 generic same symbols + # or 2 consecutive symbols + has_three = rows[1][0] == rows[1][1] == rows[1][2] + has_two = (rows[1][0] == rows[1][1]) or (rows[1][1] == rows[1][2]) + if has_three: + payout = PAYOUTS["3 symbols"] + elif has_two: + payout = PAYOUTS["2 symbols"] + + if payout: + then = self.bank.get_balance(author) + pay = payout["payout"](bid) + now = then - bid + pay + self.bank.set_credits(author, now) + await self.bot.say("{}\n{} {}\n\nYour bid: {}\n{} → {}!" + "".format(slot, author.mention, + payout["phrase"], bid, then, now)) + else: + then = self.bank.get_balance(author) + self.bank.withdraw_credits(author, bid) + now = then - bid + await self.bot.say("{}\n{} Nothing!\nYour bid: {}\n{} → {}!" + "".format(slot, author.mention, bid, then, now)) + + @commands.group(pass_context=True, no_pm=True) + @checks.admin_or_permissions(manage_server=True) + async def economyset(self, ctx): + """Changes economy module settings""" + server = ctx.message.server + settings = self.settings[server.id] + if ctx.invoked_subcommand is None: + msg = "```" + for k, v in settings.items(): + msg += "{}: {}\n".format(k, v) + msg += "```" + await send_cmd_help(ctx) + await self.bot.say(msg) + + @economyset.command(pass_context=True) + async def slotmin(self, ctx, bid: int): + """Minimum slot machine bid""" + server = ctx.message.server + self.settings[server.id]["SLOT_MIN"] = bid + await self.bot.say("Minimum bid is now {} credits.".format(bid)) + dataIO.save_json(self.file_path, self.settings) + + @economyset.command(pass_context=True) + async def slotmax(self, ctx, bid: int): + """Maximum slot machine bid""" + server = ctx.message.server + self.settings[server.id]["SLOT_MAX"] = bid + await self.bot.say("Maximum bid is now {} credits.".format(bid)) + dataIO.save_json(self.file_path, self.settings) + + @economyset.command(pass_context=True) + async def slottime(self, ctx, seconds: int): + """Seconds between each slots use""" + server = ctx.message.server + self.settings[server.id]["SLOT_TIME"] = seconds + await self.bot.say("Cooldown is now {} seconds.".format(seconds)) + dataIO.save_json(self.file_path, self.settings) + + @economyset.command(pass_context=True) + async def paydaytime(self, ctx, seconds: int): + """Seconds between each payday""" + server = ctx.message.server + self.settings[server.id]["PAYDAY_TIME"] = seconds + await self.bot.say("Value modified. At least {} seconds must pass " + "between each payday.".format(seconds)) + dataIO.save_json(self.file_path, self.settings) + + @economyset.command(pass_context=True) + async def paydaycredits(self, ctx, credits: int): + """Credits earned each payday""" + server = ctx.message.server + self.settings[server.id]["PAYDAY_CREDITS"] = credits + await self.bot.say("Every payday will now give {} credits." + "".format(credits)) + dataIO.save_json(self.file_path, self.settings) + + @economyset.command(pass_context=True) + async def registercredits(self, ctx, credits: int): + """Credits given on registering an account""" + server = ctx.message.server + if credits < 0: + credits = 0 + self.settings[server.id]["REGISTER_CREDITS"] = credits + await self.bot.say("Registering an account will now give {} credits." + "".format(credits)) + dataIO.save_json(self.file_path, self.settings) + + # What would I ever do without stackoverflow? + def display_time(self, seconds, granularity=2): + intervals = ( # Source: http://stackoverflow.com/a/24542445 + ('weeks', 604800), # 60 * 60 * 24 * 7 + ('days', 86400), # 60 * 60 * 24 + ('hours', 3600), # 60 * 60 + ('minutes', 60), + ('seconds', 1), + ) + + result = [] + + for name, count in intervals: + value = seconds // count + if value: + seconds -= value * count + if value == 1: + name = name.rstrip('s') + result.append("{} {}".format(value, name)) + return ', '.join(result[:granularity]) + + +def check_folders(): + if not os.path.exists("data/economy"): + print("Creating data/economy folder...") + os.makedirs("data/economy") + + +def check_files(): + + f = "data/economy/settings.json" + if not dataIO.is_valid_json(f): + print("Creating default economy's settings.json...") + dataIO.save_json(f, {}) + + f = "data/economy/bank.json" + if not dataIO.is_valid_json(f): + print("Creating empty bank.json...") + dataIO.save_json(f, {}) + + +def setup(bot): + global logger + check_folders() + check_files() + logger = logging.getLogger("red.economy") + if logger.level == 0: + # Prevents the logger from being loaded again in case of module reload + logger.setLevel(logging.INFO) + handler = logging.FileHandler( + filename='data/economy/economy.log', encoding='utf-8', mode='a') + handler.setFormatter(logging.Formatter( + '%(asctime)s %(message)s', datefmt="[%d/%m/%Y %H:%M]")) + logger.addHandler(handler) + bot.add_cog(Economy(bot)) diff --git a/RBXLegacyDiscordBot/cogs/general.py b/RBXLegacyDiscordBot/cogs/general.py new file mode 100644 index 0000000..a33a006 --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/general.py @@ -0,0 +1,433 @@ +import discord +from discord.ext import commands +from .utils.chat_formatting import escape_mass_mentions, italics, pagify +from random import randint +from random import choice +from enum import Enum +from urllib.parse import quote_plus +import datetime +import time +import aiohttp +import asyncio + +settings = {"POLL_DURATION" : 60} + + +class RPS(Enum): + rock = "\N{MOYAI}" + paper = "\N{PAGE FACING UP}" + scissors = "\N{BLACK SCISSORS}" + + +class RPSParser: + def __init__(self, argument): + argument = argument.lower() + if argument == "rock": + self.choice = RPS.rock + elif argument == "paper": + self.choice = RPS.paper + elif argument == "scissors": + self.choice = RPS.scissors + else: + raise + + +class General: + """General commands.""" + + def __init__(self, bot): + self.bot = bot + self.stopwatches = {} + self.ball = ["As I see it, yes", "It is certain", "It is decidedly so", "Most likely", "Outlook good", + "Signs point to yes", "Without a doubt", "Yes", "Yes – definitely", "You may rely on it", "Reply hazy, try again", + "Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again", + "Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"] + self.poll_sessions = [] + + @commands.command(hidden=True) + async def ping(self): + """Pong.""" + await self.bot.say("Pong.") + + @commands.command() + async def choose(self, *choices): + """Chooses between multiple choices. + + To denote multiple choices, you should use double quotes. + """ + choices = [escape_mass_mentions(c) for c in choices] + if len(choices) < 2: + await self.bot.say('Not enough choices to pick from.') + else: + await self.bot.say(choice(choices)) + + @commands.command(pass_context=True) + async def roll(self, ctx, number : int = 100): + """Rolls random number (between 1 and user choice) + + Defaults to 100. + """ + author = ctx.message.author + if number > 1: + n = randint(1, number) + await self.bot.say("{} :game_die: {} :game_die:".format(author.mention, n)) + else: + await self.bot.say("{} Maybe higher than 1? ;P".format(author.mention)) + + @commands.command(pass_context=True) + async def flip(self, ctx, user : discord.Member=None): + """Flips a coin... or a user. + + Defaults to coin. + """ + if user != None: + msg = "" + if user.id == self.bot.user.id: + user = ctx.message.author + msg = "Nice try. You think this is funny? How about *this* instead:\n\n" + char = "abcdefghijklmnopqrstuvwxyz" + tran = "ɐqɔpǝɟƃɥᴉɾʞlɯuodbɹsʇnʌʍxʎz" + table = str.maketrans(char, tran) + name = user.display_name.translate(table) + char = char.upper() + tran = "∀qƆpƎℲפHIſʞ˥WNOԀQᴚS┴∩ΛMX⅄Z" + table = str.maketrans(char, tran) + name = name.translate(table) + await self.bot.say(msg + "(╯°□°)╯︵ " + name[::-1]) + else: + await self.bot.say("*flips a coin and... " + choice(["HEADS!*", "TAILS!*"])) + + @commands.command(pass_context=True) + async def rps(self, ctx, your_choice : RPSParser): + """Play rock paper scissors""" + author = ctx.message.author + player_choice = your_choice.choice + red_choice = choice((RPS.rock, RPS.paper, RPS.scissors)) + cond = { + (RPS.rock, RPS.paper) : False, + (RPS.rock, RPS.scissors) : True, + (RPS.paper, RPS.rock) : True, + (RPS.paper, RPS.scissors) : False, + (RPS.scissors, RPS.rock) : False, + (RPS.scissors, RPS.paper) : True + } + + if red_choice == player_choice: + outcome = None # Tie + else: + outcome = cond[(player_choice, red_choice)] + + if outcome is True: + await self.bot.say("{} You win {}!" + "".format(red_choice.value, author.mention)) + elif outcome is False: + await self.bot.say("{} You lose {}!" + "".format(red_choice.value, author.mention)) + else: + await self.bot.say("{} We're square {}!" + "".format(red_choice.value, author.mention)) + + @commands.command(name="8", aliases=["8ball"]) + async def _8ball(self, *, question : str): + """Ask 8 ball a question + + Question must end with a question mark. + """ + if question.endswith("?") and question != "?": + await self.bot.say("`" + choice(self.ball) + "`") + else: + await self.bot.say("That doesn't look like a question.") + + @commands.command(aliases=["sw"], pass_context=True) + async def stopwatch(self, ctx): + """Starts/stops stopwatch""" + author = ctx.message.author + if not author.id in self.stopwatches: + self.stopwatches[author.id] = int(time.perf_counter()) + await self.bot.say(author.mention + " Stopwatch started!") + else: + tmp = abs(self.stopwatches[author.id] - int(time.perf_counter())) + tmp = str(datetime.timedelta(seconds=tmp)) + await self.bot.say(author.mention + " Stopwatch stopped! Time: **" + tmp + "**") + self.stopwatches.pop(author.id, None) + + @commands.command() + async def lmgtfy(self, *, search_terms : str): + """Creates a lmgtfy link""" + search_terms = escape_mass_mentions(search_terms.replace(" ", "+")) + await self.bot.say("https://lmgtfy.com/?q={}".format(search_terms)) + + @commands.command(no_pm=True, hidden=True) + async def hug(self, user : discord.Member, intensity : int=1): + """Because everyone likes hugs + + Up to 10 intensity levels.""" + name = italics(user.display_name) + if intensity <= 0: + msg = "(っ˘̩╭╮˘̩)っ" + name + elif intensity <= 3: + msg = "(っ´▽`)っ" + name + elif intensity <= 6: + msg = "╰(*´︶`*)╯" + name + elif intensity <= 9: + msg = "(つ≧▽≦)つ" + name + elif intensity >= 10: + msg = "(づ ̄ ³ ̄)づ{} ⊂(´・ω・`⊂)".format(name) + await self.bot.say(msg) + + @commands.command(pass_context=True, no_pm=True) + async def userinfo(self, ctx, *, user: discord.Member=None): + """Shows users's informations""" + author = ctx.message.author + server = ctx.message.server + + if not user: + user = author + + roles = [x.name for x in user.roles if x.name != "@everyone"] + + joined_at = self.fetch_joined_at(user, server) + since_created = (ctx.message.timestamp - user.created_at).days + since_joined = (ctx.message.timestamp - joined_at).days + user_joined = joined_at.strftime("%d %b %Y %H:%M") + user_created = user.created_at.strftime("%d %b %Y %H:%M") + member_number = sorted(server.members, + key=lambda m: m.joined_at).index(user) + 1 + + created_on = "{}\n({} days ago)".format(user_created, since_created) + joined_on = "{}\n({} days ago)".format(user_joined, since_joined) + + game = "Chilling in {} status".format(user.status) + + if user.game is None: + pass + elif user.game.url is None: + game = "Playing {}".format(user.game) + else: + game = "Streaming: [{}]({})".format(user.game, user.game.url) + + if roles: + roles = sorted(roles, key=[x.name for x in server.role_hierarchy + if x.name != "@everyone"].index) + roles = ", ".join(roles) + else: + roles = "None" + + data = discord.Embed(description=game, colour=user.colour) + data.add_field(name="Joined Discord on", value=created_on) + data.add_field(name="Joined this server on", value=joined_on) + data.add_field(name="Roles", value=roles, inline=False) + data.set_footer(text="Member #{} | User ID:{}" + "".format(member_number, user.id)) + + name = str(user) + name = " ~ ".join((name, user.nick)) if user.nick else name + + if user.avatar_url: + data.set_author(name=name, url=user.avatar_url) + data.set_thumbnail(url=user.avatar_url) + else: + data.set_author(name=name) + + try: + await self.bot.say(embed=data) + except discord.HTTPException: + await self.bot.say("I need the `Embed links` permission " + "to send this") + + @commands.command(pass_context=True, no_pm=True) + async def serverinfo(self, ctx): + """Shows server's informations""" + server = ctx.message.server + online = len([m.status for m in server.members + if m.status == discord.Status.online or + m.status == discord.Status.idle]) + total_users = len(server.members) + text_channels = len([x for x in server.channels + if x.type == discord.ChannelType.text]) + voice_channels = len(server.channels) - text_channels + passed = (ctx.message.timestamp - server.created_at).days + created_at = ("Since {}. That's over {} days ago!" + "".format(server.created_at.strftime("%d %b %Y %H:%M"), + passed)) + + colour = ''.join([choice('0123456789ABCDEF') for x in range(6)]) + colour = int(colour, 16) + + data = discord.Embed( + description=created_at, + colour=discord.Colour(value=colour)) + data.add_field(name="Region", value=str(server.region)) + data.add_field(name="Users", value="{}/{}".format(online, total_users)) + data.add_field(name="Text Channels", value=text_channels) + data.add_field(name="Voice Channels", value=voice_channels) + data.add_field(name="Roles", value=len(server.roles)) + data.add_field(name="Owner", value=str(server.owner)) + data.set_footer(text="Server ID: " + server.id) + + if server.icon_url: + data.set_author(name=server.name, url=server.icon_url) + data.set_thumbnail(url=server.icon_url) + else: + data.set_author(name=server.name) + + try: + await self.bot.say(embed=data) + except discord.HTTPException: + await self.bot.say("I need the `Embed links` permission " + "to send this") + + @commands.command() + async def urban(self, *, search_terms : str, definition_number : int=1): + """Urban Dictionary search + + Definition number must be between 1 and 10""" + def encode(s): + return quote_plus(s, encoding='utf-8', errors='replace') + + # definition_number is just there to show up in the help + # all this mess is to avoid forcing double quotes on the user + + search_terms = search_terms.split(" ") + try: + if len(search_terms) > 1: + pos = int(search_terms[-1]) - 1 + search_terms = search_terms[:-1] + else: + pos = 0 + if pos not in range(0, 11): # API only provides the + pos = 0 # top 10 definitions + except ValueError: + pos = 0 + + search_terms = "+".join([encode(s) for s in search_terms]) + url = "http://api.urbandictionary.com/v0/define?term=" + search_terms + try: + async with aiohttp.get(url) as r: + result = await r.json() + if result["list"]: + definition = result['list'][pos]['definition'] + example = result['list'][pos]['example'] + defs = len(result['list']) + msg = ("**Definition #{} out of {}:\n**{}\n\n" + "**Example:\n**{}".format(pos+1, defs, definition, + example)) + msg = pagify(msg, ["\n"]) + for page in msg: + await self.bot.say(page) + else: + await self.bot.say("Your search terms gave no results.") + except IndexError: + await self.bot.say("There is no definition #{}".format(pos+1)) + except: + await self.bot.say("Error.") + + @commands.command(pass_context=True, no_pm=True) + async def poll(self, ctx, *text): + """Starts/stops a poll + + Usage example: + poll Is this a poll?;Yes;No;Maybe + poll stop""" + message = ctx.message + if len(text) == 1: + if text[0].lower() == "stop": + await self.endpoll(message) + return + if not self.getPollByChannel(message): + check = " ".join(text).lower() + if "@everyone" in check or "@here" in check: + await self.bot.say("Nice try.") + return + p = NewPoll(message, " ".join(text), self) + if p.valid: + self.poll_sessions.append(p) + await p.start() + else: + await self.bot.say("poll question;option1;option2 (...)") + else: + await self.bot.say("A poll is already ongoing in this channel.") + + async def endpoll(self, message): + if self.getPollByChannel(message): + p = self.getPollByChannel(message) + if p.author == message.author.id: # or isMemberAdmin(message) + await self.getPollByChannel(message).endPoll() + else: + await self.bot.say("Only admins and the author can stop the poll.") + else: + await self.bot.say("There's no poll ongoing in this channel.") + + def getPollByChannel(self, message): + for poll in self.poll_sessions: + if poll.channel == message.channel: + return poll + return False + + async def check_poll_votes(self, message): + if message.author.id != self.bot.user.id: + if self.getPollByChannel(message): + self.getPollByChannel(message).checkAnswer(message) + + def fetch_joined_at(self, user, server): + """Just a special case for someone special :^)""" + if user.id == "96130341705637888" and server.id == "133049272517001216": + return datetime.datetime(2016, 1, 10, 6, 8, 4, 443000) + else: + return user.joined_at + +class NewPoll(): + def __init__(self, message, text, main): + self.channel = message.channel + self.author = message.author.id + self.client = main.bot + self.poll_sessions = main.poll_sessions + msg = [ans.strip() for ans in text.split(";")] + if len(msg) < 2: # Needs at least one question and 2 choices + self.valid = False + return None + else: + self.valid = True + self.already_voted = [] + self.question = msg[0] + msg.remove(self.question) + self.answers = {} + i = 1 + for answer in msg: # {id : {answer, votes}} + self.answers[i] = {"ANSWER" : answer, "VOTES" : 0} + i += 1 + + async def start(self): + msg = "**POLL STARTED!**\n\n{}\n\n".format(self.question) + for id, data in self.answers.items(): + msg += "{}. *{}*\n".format(id, data["ANSWER"]) + msg += "\nType the number to vote!" + await self.client.send_message(self.channel, msg) + await asyncio.sleep(settings["POLL_DURATION"]) + if self.valid: + await self.endPoll() + + async def endPoll(self): + self.valid = False + msg = "**POLL ENDED!**\n\n{}\n\n".format(self.question) + for data in self.answers.values(): + msg += "*{}* - {} votes\n".format(data["ANSWER"], str(data["VOTES"])) + await self.client.send_message(self.channel, msg) + self.poll_sessions.remove(self) + + def checkAnswer(self, message): + try: + i = int(message.content) + if i in self.answers.keys(): + if message.author.id not in self.already_voted: + data = self.answers[i] + data["VOTES"] += 1 + self.answers[i] = data + self.already_voted.append(message.author.id) + except ValueError: + pass + +def setup(bot): + n = General(bot) + bot.add_listener(n.check_poll_votes, "on_message") + bot.add_cog(n) diff --git a/RBXLegacyDiscordBot/cogs/image.py b/RBXLegacyDiscordBot/cogs/image.py new file mode 100644 index 0000000..a4c2225 --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/image.py @@ -0,0 +1,168 @@ +from discord.ext import commands +from random import choice, shuffle +import aiohttp +import functools +import asyncio + +try: + from imgurpython import ImgurClient +except: + ImgurClient = False + +CLIENT_ID = "1fd3ef04daf8cab" +CLIENT_SECRET = "f963e574e8e3c17993c933af4f0522e1dc01e230" +GIPHY_API_KEY = "dc6zaTOxFJmzC" + + +class Image: + """Image related commands.""" + + def __init__(self, bot): + self.bot = bot + self.imgur = ImgurClient(CLIENT_ID, CLIENT_SECRET) + + @commands.group(name="imgur", no_pm=True, pass_context=True) + async def _imgur(self, ctx): + """Retrieves pictures from imgur""" + if ctx.invoked_subcommand is None: + await self.bot.send_cmd_help(ctx) + + @_imgur.command(pass_context=True, name="random") + async def imgur_random(self, ctx, *, term: str=None): + """Retrieves a random image from Imgur + + Search terms can be specified""" + if term is None: + task = functools.partial(self.imgur.gallery_random, page=0) + else: + task = functools.partial(self.imgur.gallery_search, term, + advanced=None, sort='time', + window='all', page=0) + task = self.bot.loop.run_in_executor(None, task) + + try: + results = await asyncio.wait_for(task, timeout=10) + except asyncio.TimeoutError: + await self.bot.say("Error: request timed out") + else: + if results: + item = choice(results) + link = item.gifv if hasattr(item, "gifv") else item.link + await self.bot.say(link) + else: + await self.bot.say("Your search terms gave no results.") + + @_imgur.command(pass_context=True, name="search") + async def imgur_search(self, ctx, *, term: str): + """Searches Imgur for the specified term and returns up to 3 results""" + task = functools.partial(self.imgur.gallery_search, term, + advanced=None, sort='time', + window='all', page=0) + task = self.bot.loop.run_in_executor(None, task) + + try: + results = await asyncio.wait_for(task, timeout=10) + except asyncio.TimeoutError: + await self.bot.say("Error: request timed out") + else: + if results: + shuffle(results) + msg = "Search results...\n" + for r in results[:3]: + msg += r.gifv if hasattr(r, "gifv") else r.link + msg += "\n" + await self.bot.say(msg) + else: + await self.bot.say("Your search terms gave no results.") + + @_imgur.command(pass_context=True, name="subreddit") + async def imgur_subreddit(self, ctx, subreddit: str, sort_type: str="top", window: str="day"): + """Gets images from the specified subreddit section + + Sort types: new, top + Time windows: day, week, month, year, all""" + sort_type = sort_type.lower() + + if sort_type not in ("new", "top"): + await self.bot.say("Only 'new' and 'top' are a valid sort type.") + return + elif window not in ("day", "week", "month", "year", "all"): + await self.bot.send_cmd_help(ctx) + return + + if sort_type == "new": + sort = "time" + elif sort_type == "top": + sort = "top" + + links = [] + + task = functools.partial(self.imgur.subreddit_gallery, subreddit, + sort=sort, window=window, page=0) + task = self.bot.loop.run_in_executor(None, task) + try: + items = await asyncio.wait_for(task, timeout=10) + except asyncio.TimeoutError: + await self.bot.say("Error: request timed out") + return + + for item in items[:3]: + link = item.gifv if hasattr(item, "gifv") else item.link + links.append("{}\n{}".format(item.title, link)) + + if links: + await self.bot.say("\n".join(links)) + else: + await self.bot.say("No results found.") + + @commands.command(pass_context=True, no_pm=True) + async def gif(self, ctx, *keywords): + """Retrieves first search result from giphy""" + if keywords: + keywords = "+".join(keywords) + else: + await self.bot.send_cmd_help(ctx) + return + + url = ("http://api.giphy.com/v1/gifs/search?&api_key={}&q={}" + "".format(GIPHY_API_KEY, keywords)) + + async with aiohttp.get(url) as r: + result = await r.json() + if r.status == 200: + if result["data"]: + await self.bot.say(result["data"][0]["url"]) + else: + await self.bot.say("No results found.") + else: + await self.bot.say("Error contacting the API") + + @commands.command(pass_context=True, no_pm=True) + async def gifr(self, ctx, *keywords): + """Retrieves a random gif from a giphy search""" + if keywords: + keywords = "+".join(keywords) + else: + await self.bot.send_cmd_help(ctx) + return + + url = ("http://api.giphy.com/v1/gifs/random?&api_key={}&tag={}" + "".format(GIPHY_API_KEY, keywords)) + + async with aiohttp.get(url) as r: + result = await r.json() + if r.status == 200: + if result["data"]: + await self.bot.say(result["data"]["url"]) + else: + await self.bot.say("No results found.") + else: + await self.bot.say("Error contacting the API") + + +def setup(bot): + if ImgurClient is False: + raise RuntimeError("You need the imgurpython module to use this.\n" + "pip3 install imgurpython") + + bot.add_cog(Image(bot)) diff --git a/RBXLegacyDiscordBot/cogs/mod.py b/RBXLegacyDiscordBot/cogs/mod.py new file mode 100644 index 0000000..7f645da --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/mod.py @@ -0,0 +1,1720 @@ +import discord +from discord.ext import commands +from .utils.dataIO import dataIO +from .utils import checks +from __main__ import send_cmd_help, settings +from datetime import datetime +from collections import deque, defaultdict +from cogs.utils.chat_formatting import escape_mass_mentions, box, pagify +import os +import re +import logging +import asyncio + + +ACTIONS_REPR = { + "BAN" : ("Ban", "\N{HAMMER}"), + "KICK" : ("Kick", "\N{WOMANS BOOTS}"), + "CMUTE" : ("Channel mute", "\N{SPEAKER WITH CANCELLATION STROKE}"), + "SMUTE" : ("Server mute", "\N{SPEAKER WITH CANCELLATION STROKE}"), + "SOFTBAN" : ("Softban", "\N{DASH SYMBOL} \N{HAMMER}"), + "HACKBAN" : ("Preemptive ban", "\N{BUST IN SILHOUETTE} \N{HAMMER}"), + "UNBAN" : ("Unban", "\N{DOVE OF PEACE}") +} + +ACTIONS_CASES = { + "BAN" : True, + "KICK" : True, + "CMUTE" : False, + "SMUTE" : True, + "SOFTBAN" : True, + "HACKBAN" : True, + "UNBAN" : True +} + +default_settings = { + "ban_mention_spam" : False, + "delete_repeats" : False, + "mod-log" : None, + "respect_hierarchy" : False +} + + +for act, enabled in ACTIONS_CASES.items(): + act = act.lower() + '_cases' + default_settings[act] = enabled + + +class ModError(Exception): + pass + + +class UnauthorizedCaseEdit(ModError): + pass + + +class CaseMessageNotFound(ModError): + pass + + +class NoModLogChannel(ModError): + pass + + +class NoModLogAccess(ModError): + pass + + +class TempCache: + """ + This is how we avoid events such as ban and unban + from triggering twice in the mod-log. + Kinda hacky but functioning + """ + def __init__(self, bot): + self.bot = bot + self._cache = [] + + def add(self, user, server, action, seconds=1): + tmp = (user.id, server.id, action) + self._cache.append(tmp) + + async def delete_value(): + await asyncio.sleep(seconds) + self._cache.remove(tmp) + + self.bot.loop.create_task(delete_value()) + + def check(self, user, server, action): + return (user.id, server.id, action) in self._cache + + +class Mod: + """Moderation tools.""" + + def __init__(self, bot): + self.bot = bot + self.ignore_list = dataIO.load_json("data/mod/ignorelist.json") + self.filter = dataIO.load_json("data/mod/filter.json") + self.past_names = dataIO.load_json("data/mod/past_names.json") + self.past_nicknames = dataIO.load_json("data/mod/past_nicknames.json") + settings = dataIO.load_json("data/mod/settings.json") + self.settings = defaultdict(lambda: default_settings.copy(), settings) + self.cache = defaultdict(lambda: deque(maxlen=3)) + self.cases = dataIO.load_json("data/mod/modlog.json") + self.last_case = defaultdict(dict) + self.temp_cache = TempCache(bot) + perms_cache = dataIO.load_json("data/mod/perms_cache.json") + self._perms_cache = defaultdict(dict, perms_cache) + + @commands.group(pass_context=True, no_pm=True) + @checks.serverowner_or_permissions(administrator=True) + async def modset(self, ctx): + """Manages server administration settings.""" + if ctx.invoked_subcommand is None: + server = ctx.message.server + await send_cmd_help(ctx) + roles = settings.get_server(server).copy() + _settings = {**self.settings[server.id], **roles} + if "respect_hierarchy" not in _settings: + _settings["respect_hierarchy"] = default_settings["respect_hierarchy"] + if "delete_delay" not in _settings: + _settings["delete_delay"] = "Disabled" + + msg = ("Admin role: {ADMIN_ROLE}\n" + "Mod role: {MOD_ROLE}\n" + "Mod-log: {mod-log}\n" + "Delete repeats: {delete_repeats}\n" + "Ban mention spam: {ban_mention_spam}\n" + "Delete delay: {delete_delay}\n" + "Respects hierarchy: {respect_hierarchy}" + "".format(**_settings)) + await self.bot.say(box(msg)) + + @modset.command(name="adminrole", pass_context=True, no_pm=True, hidden=True) + async def _modset_adminrole(self, ctx): + """Use [p]set adminrole instead""" + await self.bot.say("This command has been renamed " + "`{}set adminrole`".format(ctx.prefix)) + + @modset.command(name="modrole", pass_context=True, no_pm=True, hidden=True) + async def _modset_modrole(self, ctx): + """Use [p]set modrole instead""" + await self.bot.say("This command has been renamed " + "`{}set modrole`".format(ctx.prefix)) + + @modset.command(pass_context=True, no_pm=True) + async def modlog(self, ctx, channel : discord.Channel=None): + """Sets a channel as mod log + + Leaving the channel parameter empty will deactivate it""" + server = ctx.message.server + if channel: + self.settings[server.id]["mod-log"] = channel.id + await self.bot.say("Mod events will be sent to {}" + "".format(channel.mention)) + else: + if self.settings[server.id]["mod-log"] is None: + await send_cmd_help(ctx) + return + self.settings[server.id]["mod-log"] = None + await self.bot.say("Mod log deactivated.") + dataIO.save_json("data/mod/settings.json", self.settings) + + @modset.command(pass_context=True, no_pm=True) + async def banmentionspam(self, ctx, max_mentions : int=False): + """Enables auto ban for messages mentioning X different people + + Accepted values: 5 or superior""" + server = ctx.message.server + if max_mentions: + if max_mentions < 5: + max_mentions = 5 + self.settings[server.id]["ban_mention_spam"] = max_mentions + await self.bot.say("Autoban for mention spam enabled. " + "Anyone mentioning {} or more different people " + "in a single message will be autobanned." + "".format(max_mentions)) + else: + if self.settings[server.id]["ban_mention_spam"] is False: + await send_cmd_help(ctx) + return + self.settings[server.id]["ban_mention_spam"] = False + await self.bot.say("Autoban for mention spam disabled.") + dataIO.save_json("data/mod/settings.json", self.settings) + + @modset.command(pass_context=True, no_pm=True) + async def deleterepeats(self, ctx): + """Enables auto deletion of repeated messages""" + server = ctx.message.server + if not self.settings[server.id]["delete_repeats"]: + self.settings[server.id]["delete_repeats"] = True + await self.bot.say("Messages repeated up to 3 times will " + "be deleted.") + else: + self.settings[server.id]["delete_repeats"] = False + await self.bot.say("Repeated messages will be ignored.") + dataIO.save_json("data/mod/settings.json", self.settings) + + @modset.command(pass_context=True, no_pm=True) + async def resetcases(self, ctx): + """Resets modlog's cases""" + server = ctx.message.server + self.cases[server.id] = {} + dataIO.save_json("data/mod/modlog.json", self.cases) + await self.bot.say("Cases have been reset.") + + @modset.command(pass_context=True, no_pm=True) + async def deletedelay(self, ctx, time: int=None): + """Sets the delay until the bot removes the command message. + Must be between -1 and 60. + + A delay of -1 means the bot will not remove the message.""" + server = ctx.message.server + if time is not None: + time = min(max(time, -1), 60) # Enforces the time limits + self.settings[server.id]["delete_delay"] = time + if time == -1: + await self.bot.say("Command deleting disabled.") + else: + await self.bot.say("Delete delay set to {}" + " seconds.".format(time)) + dataIO.save_json("data/mod/settings.json", self.settings) + else: + try: + delay = self.settings[server.id]["delete_delay"] + except KeyError: + await self.bot.say("Delete delay not yet set up on this" + " server.") + else: + if delay != -1: + await self.bot.say("Bot will delete command messages after" + " {} seconds. Set this value to -1 to" + " stop deleting messages".format(delay)) + else: + await self.bot.say("I will not delete command messages.") + + @modset.command(pass_context=True, no_pm=True, name='cases') + async def set_cases(self, ctx, action: str = None, enabled: bool = None): + """Enables or disables case creation for each type of mod action + + Enabled can be 'on' or 'off'""" + server = ctx.message.server + + if action == enabled: # No args given + await self.bot.send_cmd_help(ctx) + msg = "Current settings:\n```py\n" + maxlen = max(map(lambda x: len(x[0]), ACTIONS_REPR.values())) + for action, name in ACTIONS_REPR.items(): + action = action.lower() + '_cases' + value = self.settings[server.id].get(action, + default_settings[action]) + value = 'enabled' if value else 'disabled' + msg += '%s : %s\n' % (name[0].ljust(maxlen), value) + + msg += '```' + await self.bot.say(msg) + + elif action.upper() not in ACTIONS_CASES: + msg = "That's not a valid action. Valid actions are: \n" + msg += ', '.join(sorted(map(str.lower, ACTIONS_CASES))) + await self.bot.say(msg) + + elif enabled == None: + action = action.lower() + '_cases' + value = self.settings[server.id].get(action, + default_settings[action]) + await self.bot.say('Case creation for %s is currently %s' % + (action, 'enabled' if value else 'disabled')) + else: + name = ACTIONS_REPR[action.upper()][0] + action = action.lower() + '_cases' + value = self.settings[server.id].get(action, + default_settings[action]) + if value != enabled: + self.settings[server.id][action] = enabled + dataIO.save_json("data/mod/settings.json", self.settings) + msg = ('Case creation for %s actions %s %s.' % + (name.lower(), + 'was already' if enabled == value else 'is now', + 'enabled' if enabled else 'disabled') + ) + await self.bot.say(msg) + + @modset.command(pass_context=True, no_pm=True) + @checks.serverowner_or_permissions() + async def hierarchy(self, ctx): + """Toggles role hierarchy check for mods / admins""" + server = ctx.message.server + toggled = self.settings[server.id].get("respect_hierarchy", + default_settings["respect_hierarchy"]) + if not toggled: + self.settings[server.id]["respect_hierarchy"] = True + await self.bot.say("Role hierarchy will be checked when " + "moderation commands are issued.") + else: + self.settings[server.id]["respect_hierarchy"] = False + await self.bot.say("Role hierarchy will be ignored when " + "moderation commands are issued.") + dataIO.save_json("data/mod/settings.json", self.settings) + + @commands.command(no_pm=True, pass_context=True) + @checks.admin_or_permissions(kick_members=True) + async def kick(self, ctx, user: discord.Member, *, reason: str = None): + """Kicks user.""" + author = ctx.message.author + server = author.server + + if author == user: + await self.bot.say("I cannot let you do that. Self-harm is " + "bad \N{PENSIVE FACE}") + return + elif not self.is_allowed_by_hierarchy(server, author, user): + await self.bot.say("I cannot let you do that. You are " + "not higher than the user in the role " + "hierarchy.") + return + + try: + await self.bot.kick(user) + logger.info("{}({}) kicked {}({})".format( + author.name, author.id, user.name, user.id)) + await self.new_case(server, + action="KICK", + mod=author, + user=user, + reason=reason) + await self.bot.say("Done. That felt good.") + except discord.errors.Forbidden: + await self.bot.say("I'm not allowed to do that.") + except Exception as e: + print(e) + + @commands.command(no_pm=True, pass_context=True) + @checks.admin_or_permissions(ban_members=True) + async def ban(self, ctx, user: discord.Member, days: str = None, *, reason: str = None): + """Bans user and deletes last X days worth of messages. + + If days is not a number, it's treated as the first word of the reason. + Minimum 0 days, maximum 7. Defaults to 0.""" + author = ctx.message.author + server = author.server + + if author == user: + await self.bot.say("I cannot let you do that. Self-harm is " + "bad \N{PENSIVE FACE}") + return + elif not self.is_allowed_by_hierarchy(server, author, user): + await self.bot.say("I cannot let you do that. You are " + "not higher than the user in the role " + "hierarchy.") + return + + if days: + if days.isdigit(): + days = int(days) + else: + if reason: + reason = days + ' ' + reason + else: + reason = days + days = 0 + else: + days = 0 + + if days < 0 or days > 7: + await self.bot.say("Invalid days. Must be between 0 and 7.") + return + + try: + self.temp_cache.add(user, server, "BAN") + await self.bot.ban(user, days) + logger.info("{}({}) banned {}({}), deleting {} days worth of messages".format( + author.name, author.id, user.name, user.id, str(days))) + await self.new_case(server, + action="BAN", + mod=author, + user=user, + reason=reason) + await self.bot.say("Done. It was about time.") + except discord.errors.Forbidden: + await self.bot.say("I'm not allowed to do that.") + except Exception as e: + print(e) + + @commands.command(no_pm=True, pass_context=True) + @checks.admin_or_permissions(ban_members=True) + async def hackban(self, ctx, user_id: int, *, reason: str = None): + """Preemptively bans user from the server + + A user ID needs to be provided + If the user is present in the server a normal ban will be + issued instead""" + user_id = str(user_id) + author = ctx.message.author + server = author.server + + ban_list = await self.bot.get_bans(server) + is_banned = discord.utils.get(ban_list, id=user_id) + + if is_banned: + await self.bot.say("User is already banned.") + return + + user = server.get_member(user_id) + if user is not None: + await ctx.invoke(self.ban, user=user, reason=reason) + return + + try: + await self.bot.http.ban(user_id, server.id, 0) + except discord.NotFound: + await self.bot.say("User not found. Have you provided the " + "correct user ID?") + except discord.Forbidden: + await self.bot.say("I lack the permissions to do this.") + else: + logger.info("{}({}) hackbanned {}" + "".format(author.name, author.id, user_id)) + user = await self.bot.get_user_info(user_id) + await self.new_case(server, + action="HACKBAN", + mod=author, + user=user, + reason=reason) + await self.bot.say("Done. The user will not be able to join this " + "server.") + + @commands.command(no_pm=True, pass_context=True) + @checks.admin_or_permissions(ban_members=True) + async def softban(self, ctx, user: discord.Member, *, reason: str = None): + """Kicks the user, deleting 1 day worth of messages.""" + server = ctx.message.server + channel = ctx.message.channel + can_ban = channel.permissions_for(server.me).ban_members + author = ctx.message.author + + if author == user: + await self.bot.say("I cannot let you do that. Self-harm is " + "bad \N{PENSIVE FACE}") + return + elif not self.is_allowed_by_hierarchy(server, author, user): + await self.bot.say("I cannot let you do that. You are " + "not higher than the user in the role " + "hierarchy.") + return + + try: + invite = await self.bot.create_invite(server, max_age=3600*24) + invite = "\nInvite: " + invite + except: + invite = "" + if can_ban: + try: + try: # We don't want blocked DMs preventing us from banning + msg = await self.bot.send_message(user, "You have been banned and " + "then unbanned as a quick way to delete your messages.\n" + "You can now join the server again.{}".format(invite)) + except: + pass + self.temp_cache.add(user, server, "BAN") + await self.bot.ban(user, 1) + logger.info("{}({}) softbanned {}({}), deleting 1 day worth " + "of messages".format(author.name, author.id, user.name, + user.id)) + await self.new_case(server, + action="SOFTBAN", + mod=author, + user=user, + reason=reason) + self.temp_cache.add(user, server, "UNBAN") + await self.bot.unban(server, user) + await self.bot.say("Done. Enough chaos.") + except discord.errors.Forbidden: + await self.bot.say("My role is not high enough to softban that user.") + await self.bot.delete_message(msg) + except Exception as e: + print(e) + else: + await self.bot.say("I'm not allowed to do that.") + + @commands.command(no_pm=True, pass_context=True) + @checks.admin_or_permissions(manage_nicknames=True) + async def rename(self, ctx, user : discord.Member, *, nickname=""): + """Changes user's nickname + + Leaving the nickname empty will remove it.""" + nickname = nickname.strip() + if nickname == "": + nickname = None + try: + await self.bot.change_nickname(user, nickname) + await self.bot.say("Done.") + except discord.Forbidden: + await self.bot.say("I cannot do that, I lack the " + "\"Manage Nicknames\" permission.") + + @commands.group(pass_context=True, no_pm=True, invoke_without_command=True) + @checks.mod_or_permissions(administrator=True) + async def mute(self, ctx, user : discord.Member, *, reason: str = None): + """Mutes user in the channel/server + + Defaults to channel""" + if ctx.invoked_subcommand is None: + await ctx.invoke(self.channel_mute, user=user, reason=reason) + + @checks.mod_or_permissions(administrator=True) + @mute.command(name="channel", pass_context=True, no_pm=True) + async def channel_mute(self, ctx, user : discord.Member, *, reason: str = None): + """Mutes user in the current channel""" + author = ctx.message.author + channel = ctx.message.channel + server = ctx.message.server + overwrites = channel.overwrites_for(user) + + if overwrites.send_messages is False: + await self.bot.say("That user can't send messages in this " + "channel.") + return + elif not self.is_allowed_by_hierarchy(server, author, user): + await self.bot.say("I cannot let you do that. You are " + "not higher than the user in the role " + "hierarchy.") + return + + self._perms_cache[user.id][channel.id] = overwrites.send_messages + overwrites.send_messages = False + try: + await self.bot.edit_channel_permissions(channel, user, overwrites) + except discord.Forbidden: + await self.bot.say("Failed to mute user. I need the manage roles " + "permission and the user I'm muting must be " + "lower than myself in the role hierarchy.") + else: + dataIO.save_json("data/mod/perms_cache.json", self._perms_cache) + await self.new_case(server, + action="CMUTE", + channel=channel, + mod=author, + user=user, + reason=reason) + await self.bot.say("User has been muted in this channel.") + + @checks.mod_or_permissions(administrator=True) + @mute.command(name="server", pass_context=True, no_pm=True) + async def server_mute(self, ctx, user : discord.Member, *, reason: str = None): + """Mutes user in the server""" + author = ctx.message.author + server = ctx.message.server + + if not self.is_allowed_by_hierarchy(server, author, user): + await self.bot.say("I cannot let you do that. You are " + "not higher than the user in the role " + "hierarchy.") + return + + register = {} + for channel in server.channels: + if channel.type != discord.ChannelType.text: + continue + overwrites = channel.overwrites_for(user) + if overwrites.send_messages is False: + continue + register[channel.id] = overwrites.send_messages + overwrites.send_messages = False + try: + await self.bot.edit_channel_permissions(channel, user, + overwrites) + except discord.Forbidden: + await self.bot.say("Failed to mute user. I need the manage roles " + "permission and the user I'm muting must be " + "lower than myself in the role hierarchy.") + return + else: + await asyncio.sleep(0.1) + if not register: + await self.bot.say("That user is already muted in all channels.") + return + self._perms_cache[user.id] = register + dataIO.save_json("data/mod/perms_cache.json", self._perms_cache) + await self.new_case(server, + action="SMUTE", + mod=author, + user=user, + reason=reason) + await self.bot.say("User has been muted in this server.") + + @commands.group(pass_context=True, no_pm=True, invoke_without_command=True) + @checks.mod_or_permissions(administrator=True) + async def unmute(self, ctx, user : discord.Member): + """Unmutes user in the channel/server + + Defaults to channel""" + if ctx.invoked_subcommand is None: + await ctx.invoke(self.channel_unmute, user=user) + + @checks.mod_or_permissions(administrator=True) + @unmute.command(name="channel", pass_context=True, no_pm=True) + async def channel_unmute(self, ctx, user : discord.Member): + """Unmutes user in the current channel""" + channel = ctx.message.channel + author = ctx.message.author + server = ctx.message.server + overwrites = channel.overwrites_for(user) + + if overwrites.send_messages: + await self.bot.say("That user doesn't seem to be muted " + "in this channel.") + return + elif not self.is_allowed_by_hierarchy(server, author, user): + await self.bot.say("I cannot let you do that. You are " + "not higher than the user in the role " + "hierarchy.") + return + + if user.id in self._perms_cache: + old_value = self._perms_cache[user.id].get(channel.id) + else: + old_value = None + overwrites.send_messages = old_value + is_empty = self.are_overwrites_empty(overwrites) + try: + if not is_empty: + await self.bot.edit_channel_permissions(channel, user, + overwrites) + else: + await self.bot.delete_channel_permissions(channel, user) + except discord.Forbidden: + await self.bot.say("Failed to unmute user. I need the manage roles" + " permission and the user I'm unmuting must be " + "lower than myself in the role hierarchy.") + else: + try: + del self._perms_cache[user.id][channel.id] + except KeyError: + pass + if user.id in self._perms_cache and not self._perms_cache[user.id]: + del self._perms_cache[user.id] # cleanup + dataIO.save_json("data/mod/perms_cache.json", self._perms_cache) + await self.bot.say("User has been unmuted in this channel.") + + @checks.mod_or_permissions(administrator=True) + @unmute.command(name="server", pass_context=True, no_pm=True) + async def server_unmute(self, ctx, user : discord.Member): + """Unmutes user in the server""" + server = ctx.message.server + author = ctx.message.author + + if user.id not in self._perms_cache: + await self.bot.say("That user doesn't seem to have been muted with {0}mute commands. " + "Unmute them in the channels you want with `{0}unmute `" + "".format(ctx.prefix)) + return + elif not self.is_allowed_by_hierarchy(server, author, user): + await self.bot.say("I cannot let you do that. You are " + "not higher than the user in the role " + "hierarchy.") + return + + for channel in server.channels: + if channel.type != discord.ChannelType.text: + continue + if channel.id not in self._perms_cache[user.id]: + continue + value = self._perms_cache[user.id].get(channel.id) + overwrites = channel.overwrites_for(user) + if overwrites.send_messages is False: + overwrites.send_messages = value + is_empty = self.are_overwrites_empty(overwrites) + try: + if not is_empty: + await self.bot.edit_channel_permissions(channel, user, + overwrites) + else: + await self.bot.delete_channel_permissions(channel, user) + except discord.Forbidden: + await self.bot.say("Failed to unmute user. I need the manage roles" + " permission and the user I'm unmuting must be " + "lower than myself in the role hierarchy.") + return + else: + del self._perms_cache[user.id][channel.id] + await asyncio.sleep(0.1) + if user.id in self._perms_cache and not self._perms_cache[user.id]: + del self._perms_cache[user.id] # cleanup + dataIO.save_json("data/mod/perms_cache.json", self._perms_cache) + await self.bot.say("User has been unmuted in this server.") + + @commands.group(pass_context=True) + @checks.mod_or_permissions(manage_messages=True) + async def cleanup(self, ctx): + """Deletes messages.""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + + @cleanup.command(pass_context=True, no_pm=True) + async def text(self, ctx, text: str, number: int): + """Deletes last X messages matching the specified text. + + Example: + cleanup text \"test\" 5 + + Remember to use double quotes.""" + + channel = ctx.message.channel + author = ctx.message.author + server = author.server + is_bot = self.bot.user.bot + has_permissions = channel.permissions_for(server.me).manage_messages + + def check(m): + if text in m.content: + return True + elif m == ctx.message: + return True + else: + return False + + to_delete = [ctx.message] + + if not has_permissions: + await self.bot.say("I'm not allowed to delete messages.") + return + + tries_left = 5 + tmp = ctx.message + + while tries_left and len(to_delete) - 1 < number: + async for message in self.bot.logs_from(channel, limit=100, + before=tmp): + if len(to_delete) - 1 < number and check(message): + to_delete.append(message) + tmp = message + tries_left -= 1 + + logger.info("{}({}) deleted {} messages " + " containing '{}' in channel {}".format(author.name, + author.id, len(to_delete), text, channel.id)) + + if is_bot: + await self.mass_purge(to_delete) + else: + await self.slow_deletion(to_delete) + + @cleanup.command(pass_context=True, no_pm=True) + async def user(self, ctx, user: discord.Member, number: int): + """Deletes last X messages from specified user. + + Examples: + cleanup user @\u200bTwentysix 2 + cleanup user Red 6""" + + channel = ctx.message.channel + author = ctx.message.author + server = author.server + is_bot = self.bot.user.bot + has_permissions = channel.permissions_for(server.me).manage_messages + self_delete = user == self.bot.user + + def check(m): + if m.author == user: + return True + elif m == ctx.message: + return True + else: + return False + + to_delete = [ctx.message] + + if not has_permissions and not self_delete: + await self.bot.say("I'm not allowed to delete messages.") + return + + tries_left = 5 + tmp = ctx.message + + while tries_left and len(to_delete) - 1 < number: + async for message in self.bot.logs_from(channel, limit=100, + before=tmp): + if len(to_delete) - 1 < number and check(message): + to_delete.append(message) + tmp = message + tries_left -= 1 + + logger.info("{}({}) deleted {} messages " + " made by {}({}) in channel {}" + "".format(author.name, author.id, len(to_delete), + user.name, user.id, channel.name)) + + if is_bot and not self_delete: + # For whatever reason the purge endpoint requires manage_messages + await self.mass_purge(to_delete) + else: + await self.slow_deletion(to_delete) + + @cleanup.command(pass_context=True, no_pm=True) + async def after(self, ctx, message_id : int): + """Deletes all messages after specified message + + To get a message id, enable developer mode in Discord's + settings, 'appearance' tab. Then right click a message + and copy its id. + + This command only works on bots running as bot accounts. + """ + + channel = ctx.message.channel + author = ctx.message.author + server = channel.server + is_bot = self.bot.user.bot + has_permissions = channel.permissions_for(server.me).manage_messages + + if not is_bot: + await self.bot.say("This command can only be used on bots with " + "bot accounts.") + return + + to_delete = [] + + after = await self.bot.get_message(channel, message_id) + + if not has_permissions: + await self.bot.say("I'm not allowed to delete messages.") + return + elif not after: + await self.bot.say("Message not found.") + return + + async for message in self.bot.logs_from(channel, limit=2000, + after=after): + to_delete.append(message) + + logger.info("{}({}) deleted {} messages in channel {}" + "".format(author.name, author.id, + len(to_delete), channel.name)) + + await self.mass_purge(to_delete) + + @cleanup.command(pass_context=True, no_pm=True) + async def messages(self, ctx, number: int): + """Deletes last X messages. + + Example: + cleanup messages 26""" + + channel = ctx.message.channel + author = ctx.message.author + server = author.server + is_bot = self.bot.user.bot + has_permissions = channel.permissions_for(server.me).manage_messages + + to_delete = [] + + if not has_permissions: + await self.bot.say("I'm not allowed to delete messages.") + return + + async for message in self.bot.logs_from(channel, limit=number+1): + to_delete.append(message) + + logger.info("{}({}) deleted {} messages in channel {}" + "".format(author.name, author.id, + number, channel.name)) + + if is_bot: + await self.mass_purge(to_delete) + else: + await self.slow_deletion(to_delete) + + @cleanup.command(pass_context=True, no_pm=True, name='bot') + async def cleanup_bot(self, ctx, number: int): + """Cleans up command messages and messages from the bot""" + + channel = ctx.message.channel + author = ctx.message.author + server = channel.server + is_bot = self.bot.user.bot + has_permissions = channel.permissions_for(server.me).manage_messages + + prefixes = self.bot.command_prefix + if isinstance(prefixes, str): + prefixes = [prefixes] + elif callable(prefixes): + if asyncio.iscoroutine(prefixes): + await self.bot.say('Coroutine prefixes not yet implemented.') + return + prefixes = prefixes(self.bot, ctx.message) + + # In case some idiot sets a null prefix + if '' in prefixes: + prefixes.pop('') + + def check(m): + if m.author.id == self.bot.user.id: + return True + elif m == ctx.message: + return True + p = discord.utils.find(m.content.startswith, prefixes) + if p and len(p) > 0: + return m.content[len(p):].startswith(tuple(self.bot.commands)) + return False + + to_delete = [ctx.message] + + if not has_permissions: + await self.bot.say("I'm not allowed to delete messages.") + return + + tries_left = 5 + tmp = ctx.message + + while tries_left and len(to_delete) - 1 < number: + async for message in self.bot.logs_from(channel, limit=100, + before=tmp): + if len(to_delete) - 1 < number and check(message): + to_delete.append(message) + tmp = message + tries_left -= 1 + + logger.info("{}({}) deleted {} " + " command messages in channel {}" + "".format(author.name, author.id, len(to_delete), + channel.name)) + + if is_bot: + await self.mass_purge(to_delete) + else: + await self.slow_deletion(to_delete) + + @cleanup.command(pass_context=True, name='self') + async def cleanup_self(self, ctx, number: int, match_pattern: str = None): + """Cleans up messages owned by the bot. + + By default, all messages are cleaned. If a third argument is specified, + it is used for pattern matching: If it begins with r( and ends with ), + then it is interpreted as a regex, and messages that match it are + deleted. Otherwise, it is used in a simple substring test. + + Some helpful regex flags to include in your pattern: + Dots match newlines: (?s); Ignore case: (?i); Both: (?si) + """ + channel = ctx.message.channel + author = ctx.message.author + is_bot = self.bot.user.bot + + # You can always delete your own messages, this is needed to purge + can_mass_purge = False + if type(author) is discord.Member: + me = channel.server.me + can_mass_purge = channel.permissions_for(me).manage_messages + + use_re = (match_pattern and match_pattern.startswith('r(') and + match_pattern.endswith(')')) + + if use_re: + match_pattern = match_pattern[1:] # strip 'r' + match_re = re.compile(match_pattern) + + def content_match(c): + return bool(match_re.match(c)) + elif match_pattern: + def content_match(c): + return match_pattern in c + else: + def content_match(_): + return True + + def check(m): + if m.author.id != self.bot.user.id: + return False + elif content_match(m.content): + return True + return False + + to_delete = [] + # Selfbot convenience, delete trigger message + if author == self.bot.user: + to_delete.append(ctx.message) + number += 1 + + tries_left = 5 + tmp = ctx.message + + while tries_left and len(to_delete) < number: + async for message in self.bot.logs_from(channel, limit=100, + before=tmp): + if len(to_delete) < number and check(message): + to_delete.append(message) + tmp = message + tries_left -= 1 + + if channel.name: + channel_name = 'channel ' + channel.name + else: + channel_name = str(channel) + + logger.info("{}({}) deleted {} messages " + "sent by the bot in {}" + "".format(author.name, author.id, len(to_delete), + channel_name)) + + if is_bot and can_mass_purge: + await self.mass_purge(to_delete) + else: + await self.slow_deletion(to_delete) + + @commands.command(pass_context=True) + @checks.mod_or_permissions(manage_messages=True) + async def reason(self, ctx, case, *, reason : str=""): + """Lets you specify a reason for mod-log's cases + + Defaults to last case assigned to yourself, if available.""" + author = ctx.message.author + server = author.server + try: + case = int(case) + if not reason: + await send_cmd_help(ctx) + return + except: + if reason: + reason = "{} {}".format(case, reason) + else: + reason = case + case = self.last_case[server.id].get(author.id) + if case is None: + await send_cmd_help(ctx) + return + try: + await self.update_case(server, case=case, mod=author, + reason=reason) + except UnauthorizedCaseEdit: + await self.bot.say("That case is not yours.") + except KeyError: + await self.bot.say("That case doesn't exist.") + except NoModLogChannel: + await self.bot.say("There's no mod-log channel set.") + except CaseMessageNotFound: + await self.bot.say("I couldn't find the case's message.") + except NoModLogAccess: + await self.bot.say("I'm not allowed to access the mod-log " + "channel (or its message history)") + else: + await self.bot.say("Case #{} updated.".format(case)) + + @commands.group(pass_context=True, no_pm=True) + @checks.admin_or_permissions(manage_channels=True) + async def ignore(self, ctx): + """Adds servers/channels to ignorelist""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + await self.bot.say(self.count_ignored()) + + @ignore.command(name="channel", pass_context=True) + async def ignore_channel(self, ctx, channel: discord.Channel=None): + """Ignores channel + + Defaults to current one""" + current_ch = ctx.message.channel + if not channel: + if current_ch.id not in self.ignore_list["CHANNELS"]: + self.ignore_list["CHANNELS"].append(current_ch.id) + dataIO.save_json("data/mod/ignorelist.json", self.ignore_list) + await self.bot.say("Channel added to ignore list.") + else: + await self.bot.say("Channel already in ignore list.") + else: + if channel.id not in self.ignore_list["CHANNELS"]: + self.ignore_list["CHANNELS"].append(channel.id) + dataIO.save_json("data/mod/ignorelist.json", self.ignore_list) + await self.bot.say("Channel added to ignore list.") + else: + await self.bot.say("Channel already in ignore list.") + + @ignore.command(name="server", pass_context=True) + async def ignore_server(self, ctx): + """Ignores current server""" + server = ctx.message.server + if server.id not in self.ignore_list["SERVERS"]: + self.ignore_list["SERVERS"].append(server.id) + dataIO.save_json("data/mod/ignorelist.json", self.ignore_list) + await self.bot.say("This server has been added to the ignore list.") + else: + await self.bot.say("This server is already being ignored.") + + @commands.group(pass_context=True, no_pm=True) + @checks.admin_or_permissions(manage_channels=True) + async def unignore(self, ctx): + """Removes servers/channels from ignorelist""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + await self.bot.say(self.count_ignored()) + + @unignore.command(name="channel", pass_context=True) + async def unignore_channel(self, ctx, channel: discord.Channel=None): + """Removes channel from ignore list + + Defaults to current one""" + current_ch = ctx.message.channel + if not channel: + if current_ch.id in self.ignore_list["CHANNELS"]: + self.ignore_list["CHANNELS"].remove(current_ch.id) + dataIO.save_json("data/mod/ignorelist.json", self.ignore_list) + await self.bot.say("This channel has been removed from the ignore list.") + else: + await self.bot.say("This channel is not in the ignore list.") + else: + if channel.id in self.ignore_list["CHANNELS"]: + self.ignore_list["CHANNELS"].remove(channel.id) + dataIO.save_json("data/mod/ignorelist.json", self.ignore_list) + await self.bot.say("Channel removed from ignore list.") + else: + await self.bot.say("That channel is not in the ignore list.") + + @unignore.command(name="server", pass_context=True) + async def unignore_server(self, ctx): + """Removes current server from ignore list""" + server = ctx.message.server + if server.id in self.ignore_list["SERVERS"]: + self.ignore_list["SERVERS"].remove(server.id) + dataIO.save_json("data/mod/ignorelist.json", self.ignore_list) + await self.bot.say("This server has been removed from the ignore list.") + else: + await self.bot.say("This server is not in the ignore list.") + + def count_ignored(self): + msg = "```Currently ignoring:\n" + msg += str(len(self.ignore_list["CHANNELS"])) + " channels\n" + msg += str(len(self.ignore_list["SERVERS"])) + " servers\n```\n" + return msg + + @commands.group(name="filter", pass_context=True, no_pm=True) + @checks.mod_or_permissions(manage_messages=True) + async def _filter(self, ctx): + """Adds/removes words from filter + + Use double quotes to add/remove sentences + Using this command with no subcommands will send + the list of the server's filtered words.""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + server = ctx.message.server + author = ctx.message.author + if server.id in self.filter: + if self.filter[server.id]: + words = ", ".join(self.filter[server.id]) + words = "Filtered in this server:\n\n" + words + try: + for page in pagify(words, delims=[" ", "\n"], shorten_by=8): + await self.bot.send_message(author, page) + except discord.Forbidden: + await self.bot.say("I can't send direct messages to you.") + + @_filter.command(name="add", pass_context=True) + async def filter_add(self, ctx, *words: str): + """Adds words to the filter + + Use double quotes to add sentences + Examples: + filter add word1 word2 word3 + filter add \"This is a sentence\"""" + if words == (): + await send_cmd_help(ctx) + return + server = ctx.message.server + added = 0 + if server.id not in self.filter.keys(): + self.filter[server.id] = [] + for w in words: + if w.lower() not in self.filter[server.id] and w != "": + self.filter[server.id].append(w.lower()) + added += 1 + if added: + dataIO.save_json("data/mod/filter.json", self.filter) + await self.bot.say("Words added to filter.") + else: + await self.bot.say("Words already in the filter.") + + @_filter.command(name="remove", pass_context=True) + async def filter_remove(self, ctx, *words: str): + """Remove words from the filter + + Use double quotes to remove sentences + Examples: + filter remove word1 word2 word3 + filter remove \"This is a sentence\"""" + if words == (): + await send_cmd_help(ctx) + return + server = ctx.message.server + removed = 0 + if server.id not in self.filter.keys(): + await self.bot.say("There are no filtered words in this server.") + return + for w in words: + if w.lower() in self.filter[server.id]: + self.filter[server.id].remove(w.lower()) + removed += 1 + if removed: + dataIO.save_json("data/mod/filter.json", self.filter) + await self.bot.say("Words removed from filter.") + else: + await self.bot.say("Those words weren't in the filter.") + + @commands.group(no_pm=True, pass_context=True) + @checks.admin_or_permissions(manage_roles=True) + async def editrole(self, ctx): + """Edits roles settings""" + if ctx.invoked_subcommand is None: + await send_cmd_help(ctx) + + @editrole.command(aliases=["color"], pass_context=True) + async def colour(self, ctx, role: discord.Role, value: discord.Colour): + """Edits a role's colour + + Use double quotes if the role contains spaces. + Colour must be in hexadecimal format. + \"http://www.w3schools.com/colors/colors_picker.asp\" + Examples: + !editrole colour \"The Transistor\" #ff0000 + !editrole colour Test #ff9900""" + author = ctx.message.author + try: + await self.bot.edit_role(ctx.message.server, role, color=value) + logger.info("{}({}) changed the colour of role '{}'".format( + author.name, author.id, role.name)) + await self.bot.say("Done.") + except discord.Forbidden: + await self.bot.say("I need permissions to manage roles first.") + except Exception as e: + print(e) + await self.bot.say("Something went wrong.") + + @editrole.command(name="name", pass_context=True) + @checks.admin_or_permissions(administrator=True) + async def edit_role_name(self, ctx, role: discord.Role, name: str): + """Edits a role's name + + Use double quotes if the role or the name contain spaces. + Examples: + !editrole name \"The Transistor\" Test""" + if name == "": + await self.bot.say("Name cannot be empty.") + return + try: + author = ctx.message.author + old_name = role.name # probably not necessary? + await self.bot.edit_role(ctx.message.server, role, name=name) + logger.info("{}({}) changed the name of role '{}' to '{}'".format( + author.name, author.id, old_name, name)) + await self.bot.say("Done.") + except discord.Forbidden: + await self.bot.say("I need permissions to manage roles first.") + except Exception as e: + print(e) + await self.bot.say("Something went wrong.") + + @commands.command() + async def names(self, user : discord.Member): + """Show previous names/nicknames of a user""" + server = user.server + names = self.past_names[user.id] if user.id in self.past_names else None + try: + nicks = self.past_nicknames[server.id][user.id] + nicks = [escape_mass_mentions(nick) for nick in nicks] + except: + nicks = None + msg = "" + if names: + names = [escape_mass_mentions(name) for name in names] + msg += "**Past 20 names**:\n" + msg += ", ".join(names) + if nicks: + if msg: + msg += "\n\n" + msg += "**Past 20 nicknames**:\n" + msg += ", ".join(nicks) + if msg: + await self.bot.say(msg) + else: + await self.bot.say("That user doesn't have any recorded name or " + "nickname change.") + + async def mass_purge(self, messages): + while messages: + if len(messages) > 1: + await self.bot.delete_messages(messages[:100]) + messages = messages[100:] + else: + await self.bot.delete_message(messages[0]) + messages = [] + await asyncio.sleep(1.5) + + async def slow_deletion(self, messages): + for message in messages: + try: + await self.bot.delete_message(message) + except: + pass + + def is_admin_or_superior(self, obj): + if isinstance(obj, discord.Message): + user = obj.author + elif isinstance(obj, discord.Member): + user = obj + elif isinstance(obj, discord.Role): + pass + else: + raise TypeError('Only messages, members or roles may be passed') + + server = obj.server + admin_role = settings.get_server_admin(server) + + if isinstance(obj, discord.Role): + return obj.name == admin_role + + if user.id == settings.owner: + return True + elif discord.utils.get(user.roles, name=admin_role): + return True + else: + return False + + def is_mod_or_superior(self, obj): + if isinstance(obj, discord.Message): + user = obj.author + elif isinstance(obj, discord.Member): + user = obj + elif isinstance(obj, discord.Role): + pass + else: + raise TypeError('Only messages, members or roles may be passed') + + server = obj.server + admin_role = settings.get_server_admin(server) + mod_role = settings.get_server_mod(server) + + if isinstance(obj, discord.Role): + return obj.name in [admin_role, mod_role] + + if user.id == settings.owner: + return True + elif discord.utils.get(user.roles, name=admin_role): + return True + elif discord.utils.get(user.roles, name=mod_role): + return True + else: + return False + + def is_allowed_by_hierarchy(self, server, mod, user): + toggled = self.settings[server.id].get("respect_hierarchy", + default_settings["respect_hierarchy"]) + is_special = mod == server.owner or mod.id == self.bot.settings.owner + + if not toggled: + return True + else: + return mod.top_role.position > user.top_role.position or is_special + + async def new_case(self, server, *, action, mod=None, user, reason=None, until=None, channel=None): + action_type = action.lower() + "_cases" + if not self.settings[server.id].get(action_type, default_settings[action_type]): + return + + mod_channel = server.get_channel(self.settings[server.id]["mod-log"]) + if mod_channel is None: + return + + if server.id not in self.cases: + self.cases[server.id] = {} + + case_n = len(self.cases[server.id]) + 1 + + case = { + "case" : case_n, + "created" : datetime.utcnow().timestamp(), + "modified" : None, + "action" : action, + "channel" : channel.id if channel else None, + "user" : str(user), + "user_id" : user.id, + "reason" : reason, + "moderator" : str(mod) if mod is not None else None, + "moderator_id" : mod.id if mod is not None else None, + "amended_by" : None, + "amended_id" : None, + "message" : None, + "until" : None, + } + + case_msg = self.format_case_msg(case) + + try: + msg = await self.bot.send_message(mod_channel, case_msg) + case["message"] = msg.id + except: + pass + + self.cases[server.id][str(case_n)] = case + + if mod: + self.last_case[server.id][mod.id] = case_n + + dataIO.save_json("data/mod/modlog.json", self.cases) + + async def update_case(self, server, *, case, mod=None, reason=None, + until=False): + channel = server.get_channel(self.settings[server.id]["mod-log"]) + if channel is None: + raise NoModLogChannel() + + case = str(case) + case = self.cases[server.id][case] + + if case["moderator_id"] is not None: + if case["moderator_id"] != mod.id: + if self.is_admin_or_superior(mod): + case["amended_by"] = str(mod) + case["amended_id"] = mod.id + else: + raise UnauthorizedCaseEdit() + else: + case["moderator"] = str(mod) + case["moderator_id"] = mod.id + + if case["reason"]: # Existing reason + case["modified"] = datetime.utcnow().timestamp() + case["reason"] = reason + + if until is not False: + case["until"] = until + + case_msg = self.format_case_msg(case) + + dataIO.save_json("data/mod/modlog.json", self.cases) + + if case["message"] is None: # The case's message was never sent + raise CaseMessageNotFound() + + try: + msg = await self.bot.get_message(channel, case["message"]) + except discord.NotFound: + raise CaseMessageNotFound() + except discord.Forbidden: + raise NoModLogAccess() + else: + await self.bot.edit_message(msg, case_msg) + + + def format_case_msg(self, case): + tmp = case.copy() + if case["reason"] is None: + tmp["reason"] = "Type [p]reason %i to add it" % tmp["case"] + if case["moderator"] is None: + tmp["moderator"] = "Unknown" + tmp["moderator_id"] = "Nobody has claimed responsibility yet" + if case["action"] in ACTIONS_REPR: + tmp["action"] = ' '.join(ACTIONS_REPR[tmp["action"]]) + + channel = case.get("channel") + if channel: + channel = self.bot.get_channel(channel) + tmp["action"] += ' in ' + channel.mention + + case_msg = ( + "**Case #{case}** | {action}\n" + "**User:** {user} ({user_id})\n" + "**Moderator:** {moderator} ({moderator_id})\n" + ).format(**tmp) + + created = case.get('created') + until = case.get('until') + if created and until: + start = datetime.fromtimestamp(created) + end = datetime.fromtimestamp(until) + end_fmt = end.strftime('%Y-%m-%d %H:%M:%S UTC') + duration = end - start + dur_fmt = strfdelta(duration) + case_msg += ("**Until:** {}\n" + "**Duration:** {}\n").format(end_fmt, dur_fmt) + + amended = case.get('amended_by') + if amended: + amended_id = case.get('amended_id') + case_msg += "**Amended by:** %s (%s)\n" % (amended, amended_id) + + modified = case.get('modified') + if modified: + modified = datetime.fromtimestamp(modified) + modified_fmt = modified.strftime('%Y-%m-%d %H:%M:%S UTC') + case_msg += "**Last modified:** %s\n" % modified_fmt + + case_msg += "**Reason:** %s\n" % tmp["reason"] + + return case_msg + + async def check_filter(self, message): + server = message.server + if server.id in self.filter.keys(): + for w in self.filter[server.id]: + if w in message.content.lower(): + try: + await self.bot.delete_message(message) + logger.info("Message deleted in server {}." + "Filtered: {}" + "".format(server.id, w)) + return True + except: + pass + return False + + async def check_duplicates(self, message): + server = message.server + author = message.author + if server.id not in self.settings: + return False + if self.settings[server.id]["delete_repeats"]: + if not message.content: + return False + self.cache[author].append(message) + msgs = self.cache[author] + if len(msgs) == 3 and \ + msgs[0].content == msgs[1].content == msgs[2].content: + try: + await self.bot.delete_message(message) + return True + except: + pass + return False + + async def check_mention_spam(self, message): + server = message.server + author = message.author + if server.id not in self.settings: + return False + if self.settings[server.id]["ban_mention_spam"]: + max_mentions = self.settings[server.id]["ban_mention_spam"] + mentions = set(message.mentions) + if len(mentions) >= max_mentions: + try: + self.temp_cache.add(author, server, "BAN") + await self.bot.ban(author, 1) + except: + logger.info("Failed to ban member for mention spam in " + "server {}".format(server.id)) + else: + await self.new_case(server, + action="BAN", + mod=server.me, + user=author, + reason="Mention spam (Autoban)") + return True + return False + + async def on_command(self, command, ctx): + """Currently used for: + * delete delay""" + server = ctx.message.server + message = ctx.message + try: + delay = self.settings[server.id]["delete_delay"] + except KeyError: + # We have no delay set + return + except AttributeError: + # DM + return + + if delay == -1: + return + + async def _delete_helper(bot, message): + try: + await bot.delete_message(message) + logger.debug("Deleted command msg {}".format(message.id)) + except: + pass # We don't really care if it fails or not + + await asyncio.sleep(delay) + await _delete_helper(self.bot, message) + + async def on_message(self, message): + author = message.author + if message.server is None or self.bot.user == author: + return + + valid_user = isinstance(author, discord.Member) and not author.bot + + # Bots and mods or superior are ignored from the filter + if not valid_user or self.is_mod_or_superior(message): + return + + deleted = await self.check_filter(message) + if not deleted: + deleted = await self.check_duplicates(message) + if not deleted: + deleted = await self.check_mention_spam(message) + + async def on_message_edit(self, _, message): + author = message.author + if message.server is None or self.bot.user == author: + return + + valid_user = isinstance(author, discord.Member) and not author.bot + + if not valid_user or self.is_mod_or_superior(message): + return + + await self.check_filter(message) + + async def on_member_ban(self, member): + server = member.server + if not self.temp_cache.check(member, server, "BAN"): + await self.new_case(server, + user=member, + action="BAN") + + async def on_member_unban(self, server, user): + if not self.temp_cache.check(user, server, "UNBAN"): + await self.new_case(server, + user=user, + action="UNBAN") + + async def check_names(self, before, after): + if before.name != after.name: + if before.id not in self.past_names: + self.past_names[before.id] = [after.name] + else: + if after.name not in self.past_names[before.id]: + names = deque(self.past_names[before.id], maxlen=20) + names.append(after.name) + self.past_names[before.id] = list(names) + dataIO.save_json("data/mod/past_names.json", self.past_names) + + if before.nick != after.nick and after.nick is not None: + server = before.server + if server.id not in self.past_nicknames: + self.past_nicknames[server.id] = {} + if before.id in self.past_nicknames[server.id]: + nicks = deque(self.past_nicknames[server.id][before.id], + maxlen=20) + else: + nicks = [] + if after.nick not in nicks: + nicks.append(after.nick) + self.past_nicknames[server.id][before.id] = list(nicks) + dataIO.save_json("data/mod/past_nicknames.json", + self.past_nicknames) + + def are_overwrites_empty(self, overwrites): + """There is currently no cleaner way to check if a + PermissionOverwrite object is empty""" + original = [p for p in iter(overwrites)] + empty = [p for p in iter(discord.PermissionOverwrite())] + return original == empty + + +def strfdelta(delta): + s = [] + if delta.days: + ds = '%i day' % delta.days + if delta.days > 1: + ds += 's' + s.append(ds) + hrs, rem = divmod(delta.seconds, 60*60) + if hrs: + hs = '%i hr' % hrs + if hrs > 1: + hs += 's' + s.append(hs) + mins, secs = divmod(rem, 60) + if mins: + s.append('%i min' % mins) + if secs: + s.append('%i sec' % secs) + return ' '.join(s) + + +def check_folders(): + folders = ("data", "data/mod/") + for folder in folders: + if not os.path.exists(folder): + print("Creating " + folder + " folder...") + os.makedirs(folder) + + +def check_files(): + ignore_list = {"SERVERS": [], "CHANNELS": []} + + files = { + "ignorelist.json" : ignore_list, + "filter.json" : {}, + "past_names.json" : {}, + "past_nicknames.json" : {}, + "settings.json" : {}, + "modlog.json" : {}, + "perms_cache.json" : {} + } + + for filename, value in files.items(): + if not os.path.isfile("data/mod/{}".format(filename)): + print("Creating empty {}".format(filename)) + dataIO.save_json("data/mod/{}".format(filename), value) + + +def setup(bot): + global logger + check_folders() + check_files() + logger = logging.getLogger("mod") + # Prevents the logger from being loaded again in case of module reload + if logger.level == 0: + logger.setLevel(logging.INFO) + handler = logging.FileHandler( + filename='data/mod/mod.log', encoding='utf-8', mode='a') + handler.setFormatter( + logging.Formatter('%(asctime)s %(message)s', datefmt="[%d/%m/%Y %H:%M]")) + logger.addHandler(handler) + n = Mod(bot) + bot.add_listener(n.check_names, "on_member_update") + bot.add_cog(n) diff --git a/RBXLegacyDiscordBot/cogs/owner.py b/RBXLegacyDiscordBot/cogs/owner.py new file mode 100644 index 0000000..f3c74c7 --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/owner.py @@ -0,0 +1,1095 @@ +import discord +from discord.ext import commands +from cogs.utils import checks +from __main__ import set_cog +from .utils.dataIO import dataIO +from .utils.chat_formatting import pagify, box + +import importlib +import traceback +import logging +import asyncio +import threading +import datetime +import glob +import os +import aiohttp + +log = logging.getLogger("red.owner") + + +class CogNotFoundError(Exception): + pass + + +class CogLoadError(Exception): + pass + + +class NoSetupError(CogLoadError): + pass + + +class CogUnloadError(Exception): + pass + + +class OwnerUnloadWithoutReloadError(CogUnloadError): + pass + + +class Owner: + """All owner-only commands that relate to debug bot operations.""" + + def __init__(self, bot): + self.bot = bot + self.setowner_lock = False + self.disabled_commands = dataIO.load_json("data/red/disabled_commands.json") + self.global_ignores = dataIO.load_json("data/red/global_ignores.json") + self.session = aiohttp.ClientSession(loop=self.bot.loop) + + def __unload(self): + self.session.close() + + @commands.command() + @checks.is_owner() + async def load(self, *, cog_name: str): + """Loads a cog + + Example: load mod""" + module = cog_name.strip() + if "cogs." not in module: + module = "cogs." + module + try: + self._load_cog(module) + except CogNotFoundError: + await self.bot.say("That cog could not be found.") + except CogLoadError as e: + log.exception(e) + traceback.print_exc() + await self.bot.say("There was an issue loading the cog. Check" + " your console or logs for more information.") + except Exception as e: + log.exception(e) + traceback.print_exc() + await self.bot.say('Cog was found and possibly loaded but ' + 'something went wrong. Check your console ' + 'or logs for more information.') + else: + set_cog(module, True) + await self.disable_commands() + await self.bot.say("The cog has been loaded.") + + @commands.group(invoke_without_command=True) + @checks.is_owner() + async def unload(self, *, cog_name: str): + """Unloads a cog + + Example: unload mod""" + module = cog_name.strip() + if "cogs." not in module: + module = "cogs." + module + if not self._does_cogfile_exist(module): + await self.bot.say("That cog file doesn't exist. I will not" + " turn off autoloading at start just in case" + " this isn't supposed to happen.") + else: + set_cog(module, False) + try: # No matter what we should try to unload it + self._unload_cog(module) + except OwnerUnloadWithoutReloadError: + await self.bot.say("I cannot allow you to unload the Owner plugin" + " unless you are in the process of reloading.") + except CogUnloadError as e: + log.exception(e) + traceback.print_exc() + await self.bot.say('Unable to safely unload that cog.') + else: + await self.bot.say("The cog has been unloaded.") + + @unload.command(name="all") + @checks.is_owner() + async def unload_all(self): + """Unloads all cogs""" + cogs = self._list_cogs() + still_loaded = [] + for cog in cogs: + set_cog(cog, False) + try: + self._unload_cog(cog) + except OwnerUnloadWithoutReloadError: + pass + except CogUnloadError as e: + log.exception(e) + traceback.print_exc() + still_loaded.append(cog) + if still_loaded: + still_loaded = ", ".join(still_loaded) + await self.bot.say("I was unable to unload some cogs: " + "{}".format(still_loaded)) + else: + await self.bot.say("All cogs are now unloaded.") + + @checks.is_owner() + @commands.command(name="reload") + async def _reload(self, *, cog_name: str): + """Reloads a cog + + Example: reload audio""" + module = cog_name.strip() + if "cogs." not in module: + module = "cogs." + module + + try: + self._unload_cog(module, reloading=True) + except: + pass + + try: + self._load_cog(module) + except CogNotFoundError: + await self.bot.say("That cog cannot be found.") + except NoSetupError: + await self.bot.say("That cog does not have a setup function.") + except CogLoadError as e: + log.exception(e) + traceback.print_exc() + await self.bot.say("That cog could not be loaded. Check your" + " console or logs for more information.") + else: + set_cog(module, True) + await self.disable_commands() + await self.bot.say("The cog has been reloaded.") + + @commands.command(name="cogs") + @checks.is_owner() + async def _show_cogs(self): + """Shows loaded/unloaded cogs""" + # This function assumes that all cogs are in the cogs folder, + # which is currently true. + + # Extracting filename from __module__ Example: cogs.owner + loaded = [c.__module__.split(".")[1] for c in self.bot.cogs.values()] + # What's in the folder but not loaded is unloaded + unloaded = [c.split(".")[1] for c in self._list_cogs() + if c.split(".")[1] not in loaded] + + if not unloaded: + unloaded = ["None"] + + msg = ("+ Loaded\n" + "{}\n\n" + "- Unloaded\n" + "{}" + "".format(", ".join(sorted(loaded)), + ", ".join(sorted(unloaded))) + ) + for page in pagify(msg, [" "], shorten_by=16): + await self.bot.say(box(page.lstrip(" "), lang="diff")) + + @commands.command(pass_context=True, hidden=True) + @checks.is_owner() + async def debug(self, ctx, *, code): + """Evaluates code""" + def check(m): + if m.content.strip().lower() == "more": + return True + + author = ctx.message.author + channel = ctx.message.channel + + code = code.strip('` ') + result = None + + global_vars = globals().copy() + global_vars['bot'] = self.bot + global_vars['ctx'] = ctx + global_vars['message'] = ctx.message + global_vars['author'] = ctx.message.author + global_vars['channel'] = ctx.message.channel + global_vars['server'] = ctx.message.server + + try: + result = eval(code, global_vars, locals()) + except Exception as e: + await self.bot.say(box('{}: {}'.format(type(e).__name__, str(e)), + lang="py")) + return + + if asyncio.iscoroutine(result): + result = await result + + result = str(result) + + if not ctx.message.channel.is_private: + censor = (self.bot.settings.email, + self.bot.settings.password, + self.bot.settings.token) + r = "[EXPUNGED]" + for w in censor: + if w is None or w == "": + continue + result = result.replace(w, r) + result = result.replace(w.lower(), r) + result = result.replace(w.upper(), r) + + result = list(pagify(result, shorten_by=16)) + + for i, page in enumerate(result): + if i != 0 and i % 4 == 0: + last = await self.bot.say("There are still {} messages. " + "Type `more` to continue." + "".format(len(result) - (i+1))) + msg = await self.bot.wait_for_message(author=author, + channel=channel, + check=check, + timeout=10) + if msg is None: + try: + await self.bot.delete_message(last) + except: + pass + finally: + break + await self.bot.say(box(page, lang="py")) + + @commands.group(name="set", pass_context=True) + async def _set(self, ctx): + """Changes Red's core settings""" + if ctx.invoked_subcommand is None: + await self.bot.send_cmd_help(ctx) + return + + @_set.command(pass_context=True) + async def owner(self, ctx): + """Sets owner""" + if self.bot.settings.no_prompt is True: + await self.bot.say("Console interaction is disabled. Start Red " + "without the `--no-prompt` flag to use this " + "command.") + return + if self.setowner_lock: + await self.bot.say("A set owner command is already pending.") + return + + if self.bot.settings.owner is not None: + await self.bot.say( + "The owner is already set. Remember that setting the owner " + "to someone else other than who hosts the bot has security " + "repercussions and is *NOT recommended*. Proceed at your own risk." + ) + await asyncio.sleep(3) + + await self.bot.say("Confirm in the console that you're the owner.") + self.setowner_lock = True + t = threading.Thread(target=self._wait_for_answer, + args=(ctx.message.author,)) + t.start() + + @_set.command() + @checks.is_owner() + async def defaultmodrole(self, *, role_name: str): + """Sets the default mod role name + + This is used if a server-specific role is not set""" + self.bot.settings.default_mod = role_name + self.bot.settings.save_settings() + await self.bot.say("The default mod role name has been set.") + + @_set.command() + @checks.is_owner() + async def defaultadminrole(self, *, role_name: str): + """Sets the default admin role name + + This is used if a server-specific role is not set""" + self.bot.settings.default_admin = role_name + self.bot.settings.save_settings() + await self.bot.say("The default admin role name has been set.") + + @_set.command(pass_context=True) + @checks.is_owner() + async def prefix(self, ctx, *prefixes): + """Sets Red's global prefixes + + Accepts multiple prefixes separated by a space. Enclose in double + quotes if a prefix contains spaces. + Example: set prefix ! $ ? "two words" """ + if prefixes == (): + await self.bot.send_cmd_help(ctx) + return + + self.bot.settings.prefixes = sorted(prefixes, reverse=True) + self.bot.settings.save_settings() + log.debug("Setting global prefixes to:\n\t{}" + "".format(self.bot.settings.prefixes)) + + p = "prefixes" if len(prefixes) > 1 else "prefix" + await self.bot.say("Global {} set".format(p)) + + @_set.command(pass_context=True, no_pm=True) + @checks.serverowner_or_permissions(administrator=True) + async def serverprefix(self, ctx, *prefixes): + """Sets Red's prefixes for this server + + Accepts multiple prefixes separated by a space. Enclose in double + quotes if a prefix contains spaces. + Example: set serverprefix ! $ ? "two words" + + Issuing this command with no parameters will reset the server + prefixes and the global ones will be used instead.""" + server = ctx.message.server + + if prefixes == (): + self.bot.settings.set_server_prefixes(server, []) + self.bot.settings.save_settings() + current_p = ", ".join(self.bot.settings.prefixes) + await self.bot.say("Server prefixes reset. Current prefixes: " + "`{}`".format(current_p)) + return + + prefixes = sorted(prefixes, reverse=True) + self.bot.settings.set_server_prefixes(server, prefixes) + self.bot.settings.save_settings() + log.debug("Setting server's {} prefixes to:\n\t{}" + "".format(server.id, self.bot.settings.prefixes)) + + p = "Prefixes" if len(prefixes) > 1 else "Prefix" + await self.bot.say("{} set for this server.\n" + "To go back to the global prefixes, do" + " `{}set serverprefix` " + "".format(p, prefixes[0])) + + @_set.command(pass_context=True) + @checks.is_owner() + async def name(self, ctx, *, name): + """Sets Red's name""" + name = name.strip() + if name != "": + try: + await self.bot.edit_profile(self.bot.settings.password, + username=name) + except: + await self.bot.say("Failed to change name. Remember that you" + " can only do it up to 2 times an hour." + "Use nicknames if you need frequent " + "changes. {}set nickname" + "".format(ctx.prefix)) + else: + await self.bot.say("Done.") + else: + await self.bot.send_cmd_help(ctx) + + @_set.command(pass_context=True, no_pm=True) + @checks.is_owner() + async def nickname(self, ctx, *, nickname=""): + """Sets Red's nickname + + Leaving this empty will remove it.""" + nickname = nickname.strip() + if nickname == "": + nickname = None + try: + await self.bot.change_nickname(ctx.message.server.me, nickname) + await self.bot.say("Done.") + except discord.Forbidden: + await self.bot.say("I cannot do that, I lack the " + "\"Change Nickname\" permission.") + + @_set.command(pass_context=True) + @checks.is_owner() + async def game(self, ctx, *, game=None): + """Sets Red's playing status + + Leaving this empty will clear it.""" + + server = ctx.message.server + + current_status = server.me.status if server is not None else None + + if game: + game = game.strip() + await self.bot.change_presence(game=discord.Game(name=game), + status=current_status) + log.debug('Status set to "{}" by owner'.format(game)) + else: + await self.bot.change_presence(game=None, status=current_status) + log.debug('status cleared by owner') + await self.bot.say("Done.") + + @_set.command(pass_context=True) + @checks.is_owner() + async def status(self, ctx, *, status=None): + """Sets Red's status + + Statuses: + online + idle + dnd + invisible""" + + statuses = { + "online" : discord.Status.online, + "idle" : discord.Status.idle, + "dnd" : discord.Status.dnd, + "invisible" : discord.Status.invisible + } + + server = ctx.message.server + + current_game = server.me.game if server is not None else None + + if status is None: + await self.bot.change_presence(status=discord.Status.online, + game=current_game) + await self.bot.say("Status reset.") + else: + status = statuses.get(status.lower(), None) + if status: + await self.bot.change_presence(status=status, + game=current_game) + await self.bot.say("Status changed.") + else: + await self.bot.send_cmd_help(ctx) + + @_set.command(pass_context=True) + @checks.is_owner() + async def stream(self, ctx, streamer=None, *, stream_title=None): + """Sets Red's streaming status + + Leaving both streamer and stream_title empty will clear it.""" + + server = ctx.message.server + + current_status = server.me.status if server is not None else None + + if stream_title: + stream_title = stream_title.strip() + if "twitch.tv/" not in streamer: + streamer = "https://www.twitch.tv/" + streamer + game = discord.Game(type=1, url=streamer, name=stream_title) + await self.bot.change_presence(game=game, status=current_status) + log.debug('Owner has set streaming status and url to "{}" and {}'.format(stream_title, streamer)) + elif streamer is not None: + await self.bot.send_cmd_help(ctx) + return + else: + await self.bot.change_presence(game=None, status=current_status) + log.debug('stream cleared by owner') + await self.bot.say("Done.") + + @_set.command() + @checks.is_owner() + async def avatar(self, url): + """Sets Red's avatar""" + try: + async with self.session.get(url) as r: + data = await r.read() + await self.bot.edit_profile(self.bot.settings.password, avatar=data) + await self.bot.say("Done.") + log.debug("changed avatar") + except Exception as e: + await self.bot.say("Error, check your console or logs for " + "more information.") + log.exception(e) + traceback.print_exc() + + @_set.command(name="token") + @checks.is_owner() + async def _token(self, token): + """Sets Red's login token""" + if len(token) < 50: + await self.bot.say("Invalid token.") + else: + self.bot.settings.token = token + self.bot.settings.save_settings() + await self.bot.say("Token set. Restart me.") + log.debug("Token changed.") + + @_set.command(name="adminrole", pass_context=True, no_pm=True) + @checks.serverowner() + async def _server_adminrole(self, ctx, *, role: discord.Role): + """Sets the admin role for this server""" + server = ctx.message.server + if server.id not in self.bot.settings.servers: + await self.bot.say("Remember to set modrole too.") + self.bot.settings.set_server_admin(server, role.name) + await self.bot.say("Admin role set to '{}'".format(role.name)) + + @_set.command(name="modrole", pass_context=True, no_pm=True) + @checks.serverowner() + async def _server_modrole(self, ctx, *, role: discord.Role): + """Sets the mod role for this server""" + server = ctx.message.server + if server.id not in self.bot.settings.servers: + await self.bot.say("Remember to set adminrole too.") + self.bot.settings.set_server_mod(server, role.name) + await self.bot.say("Mod role set to '{}'".format(role.name)) + + @commands.group(pass_context=True) + @checks.is_owner() + async def blacklist(self, ctx): + """Blacklist management commands + + Blacklisted users will be unable to issue commands""" + if ctx.invoked_subcommand is None: + await self.bot.send_cmd_help(ctx) + + @blacklist.command(name="add") + async def _blacklist_add(self, user: discord.Member): + """Adds user to Red's global blacklist""" + if user.id not in self.global_ignores["blacklist"]: + self.global_ignores["blacklist"].append(user.id) + self.save_global_ignores() + await self.bot.say("User has been blacklisted.") + else: + await self.bot.say("User is already blacklisted.") + + @blacklist.command(name="remove") + async def _blacklist_remove(self, user: discord.Member): + """Removes user from Red's global blacklist""" + if user.id in self.global_ignores["blacklist"]: + self.global_ignores["blacklist"].remove(user.id) + self.save_global_ignores() + await self.bot.say("User has been removed from the blacklist.") + else: + await self.bot.say("User is not blacklisted.") + + @blacklist.command(name="list") + async def _blacklist_list(self): + """Lists users on the blacklist""" + blacklist = self._populate_list(self.global_ignores["blacklist"]) + + if blacklist: + for page in blacklist: + await self.bot.say(box(page)) + else: + await self.bot.say("The blacklist is empty.") + + @blacklist.command(name="clear") + async def _blacklist_clear(self): + """Clears the global blacklist""" + self.global_ignores["blacklist"] = [] + self.save_global_ignores() + await self.bot.say("Blacklist is now empty.") + + @commands.group(pass_context=True) + @checks.is_owner() + async def whitelist(self, ctx): + """Whitelist management commands + + If the whitelist is not empty, only whitelisted users will + be able to use Red""" + if ctx.invoked_subcommand is None: + await self.bot.send_cmd_help(ctx) + + @whitelist.command(name="add") + async def _whitelist_add(self, user: discord.Member): + """Adds user to Red's global whitelist""" + if user.id not in self.global_ignores["whitelist"]: + if not self.global_ignores["whitelist"]: + msg = "\nNon-whitelisted users will be ignored." + else: + msg = "" + self.global_ignores["whitelist"].append(user.id) + self.save_global_ignores() + await self.bot.say("User has been whitelisted." + msg) + else: + await self.bot.say("User is already whitelisted.") + + @whitelist.command(name="remove") + async def _whitelist_remove(self, user: discord.Member): + """Removes user from Red's global whitelist""" + if user.id in self.global_ignores["whitelist"]: + self.global_ignores["whitelist"].remove(user.id) + self.save_global_ignores() + await self.bot.say("User has been removed from the whitelist.") + else: + await self.bot.say("User is not whitelisted.") + + @whitelist.command(name="list") + async def _whitelist_list(self): + """Lists users on the whitelist""" + whitelist = self._populate_list(self.global_ignores["whitelist"]) + + if whitelist: + for page in whitelist: + await self.bot.say(box(page)) + else: + await self.bot.say("The whitelist is empty.") + + @whitelist.command(name="clear") + async def _whitelist_clear(self): + """Clears the global whitelist""" + self.global_ignores["whitelist"] = [] + self.save_global_ignores() + await self.bot.say("Whitelist is now empty.") + + @commands.command() + @checks.is_owner() + async def shutdown(self, silently : bool=False): + """Shuts down Red""" + wave = "\N{WAVING HAND SIGN}" + skin = "\N{EMOJI MODIFIER FITZPATRICK TYPE-3}" + try: # We don't want missing perms to stop our shutdown + if not silently: + await self.bot.say("Shutting down... " + wave + skin) + except: + pass + await self.bot.shutdown() + + @commands.command() + @checks.is_owner() + async def restart(self, silently : bool=False): + """Attempts to restart Red + + Makes Red quit with exit code 26 + The restart is not guaranteed: it must be dealt + with by the process manager in use""" + try: + if not silently: + await self.bot.say("Restarting...") + except: + pass + await self.bot.shutdown(restart=True) + + @commands.group(name="command", pass_context=True) + @checks.is_owner() + async def command_disabler(self, ctx): + """Disables/enables commands + + With no subcommands returns the disabled commands list""" + if ctx.invoked_subcommand is None: + await self.bot.send_cmd_help(ctx) + if self.disabled_commands: + msg = "Disabled commands:\n```xl\n" + for cmd in self.disabled_commands: + msg += "{}, ".format(cmd) + msg = msg.strip(", ") + await self.bot.whisper("{}```".format(msg)) + + @command_disabler.command() + async def disable(self, *, command): + """Disables commands/subcommands""" + comm_obj = await self.get_command(command) + if comm_obj is KeyError: + await self.bot.say("That command doesn't seem to exist.") + elif comm_obj is False: + await self.bot.say("You cannot disable owner restricted commands.") + else: + comm_obj.enabled = False + comm_obj.hidden = True + self.disabled_commands.append(command) + self.save_disabled_commands() + await self.bot.say("Command has been disabled.") + + @command_disabler.command() + async def enable(self, *, command): + """Enables commands/subcommands""" + if command in self.disabled_commands: + self.disabled_commands.remove(command) + self.save_disabled_commands() + await self.bot.say("Command enabled.") + else: + await self.bot.say("That command is not disabled.") + return + try: + comm_obj = await self.get_command(command) + comm_obj.enabled = True + comm_obj.hidden = False + except: # In case it was in the disabled list but not currently loaded + pass # No point in even checking what returns + + async def get_command(self, command): + command = command.split() + try: + comm_obj = self.bot.commands[command[0]] + if len(command) > 1: + command.pop(0) + for cmd in command: + comm_obj = comm_obj.commands[cmd] + except KeyError: + return KeyError + for check in comm_obj.checks: + if hasattr(check, "__name__") and check.__name__ == "is_owner_check": + return False + return comm_obj + + async def disable_commands(self): # runs at boot + for cmd in self.disabled_commands: + cmd_obj = await self.get_command(cmd) + try: + cmd_obj.enabled = False + cmd_obj.hidden = True + except: + pass + + @commands.command() + @checks.is_owner() + async def join(self): + """Shows Red's invite URL""" + if self.bot.user.bot: + await self.bot.whisper("Invite URL: " + self.bot.oauth_url) + else: + await self.bot.say("I'm not a bot account. I have no invite URL.") + + @commands.command(pass_context=True, no_pm=True) + @checks.is_owner() + async def leave(self, ctx): + """Leaves server""" + message = ctx.message + + await self.bot.say("Are you sure you want me to leave this server?" + " Type yes to confirm.") + response = await self.bot.wait_for_message(author=message.author) + + if response.content.lower().strip() == "yes": + await self.bot.say("Alright. Bye :wave:") + log.debug('Leaving "{}"'.format(message.server.name)) + await self.bot.leave_server(message.server) + else: + await self.bot.say("Ok I'll stay here then.") + + @commands.command(pass_context=True) + @checks.is_owner() + async def servers(self, ctx): + """Lists and allows to leave servers""" + owner = ctx.message.author + servers = sorted(list(self.bot.servers), + key=lambda s: s.name.lower()) + msg = "" + for i, server in enumerate(servers): + msg += "{}: {}\n".format(i, server.name) + msg += "\nTo leave a server just type its number." + + for page in pagify(msg, ['\n']): + await self.bot.say(page) + + while msg is not None: + msg = await self.bot.wait_for_message(author=owner, timeout=15) + try: + msg = int(msg.content) + await self.leave_confirmation(servers[msg], owner, ctx) + break + except (IndexError, ValueError, AttributeError): + pass + + async def leave_confirmation(self, server, owner, ctx): + await self.bot.say("Are you sure you want me " + "to leave {}? (yes/no)".format(server.name)) + + msg = await self.bot.wait_for_message(author=owner, timeout=15) + + if msg is None: + await self.bot.say("I guess not.") + elif msg.content.lower().strip() in ("yes", "y"): + await self.bot.leave_server(server) + if server != ctx.message.server: + await self.bot.say("Done.") + else: + await self.bot.say("Alright then.") + + @commands.command(pass_context=True) + @commands.cooldown(1, 60, commands.BucketType.user) + async def contact(self, ctx, *, message : str): + """Sends a message to the owner""" + if self.bot.settings.owner is None: + await self.bot.say("I have no owner set.") + return + server = ctx.message.server + owner = discord.utils.get(self.bot.get_all_members(), + id=self.bot.settings.owner) + author = ctx.message.author + footer = "User ID: " + author.id + + if ctx.message.server is None: + source = "through DM" + else: + source = "from {}".format(server) + footer += " | Server ID: " + server.id + + if isinstance(author, discord.Member): + colour = author.colour + else: + colour = discord.Colour.red() + + description = "Sent by {} {}".format(author, source) + + e = discord.Embed(colour=colour, description=message) + if author.avatar_url: + e.set_author(name=description, icon_url=author.avatar_url) + else: + e.set_author(name=description) + e.set_footer(text=footer) + + try: + await self.bot.send_message(owner, embed=e) + except discord.InvalidArgument: + await self.bot.say("I cannot send your message, I'm unable to find" + " my owner... *sigh*") + except discord.HTTPException: + await self.bot.say("Your message is too long.") + except: + await self.bot.say("I'm unable to deliver your message. Sorry.") + else: + await self.bot.say("Your message has been sent.") + + @commands.command() + async def info(self): + """Shows info about Red""" + author_repo = "https://github.com/Twentysix26" + red_repo = author_repo + "/Red-DiscordBot" + server_url = "https://discord.gg/red" + dpy_repo = "https://github.com/Rapptz/discord.py" + python_url = "https://www.python.org/" + since = datetime.datetime(2016, 1, 2, 0, 0) + days_since = (datetime.datetime.utcnow() - since).days + dpy_version = "[{}]({})".format(discord.__version__, dpy_repo) + py_version = "[{}.{}.{}]({})".format(*os.sys.version_info[:3], + python_url) + + owner_set = self.bot.settings.owner is not None + owner = self.bot.settings.owner if owner_set else None + if owner: + owner = discord.utils.get(self.bot.get_all_members(), id=owner) + if not owner: + try: + owner = await self.bot.get_user_info(self.bot.settings.owner) + except: + owner = None + if not owner: + owner = "Unknown" + + about = ( + "This is an instance of [Red, an open source Discord bot]({}) " + "created by [Twentysix]({}) and improved by many.\n\n" + "Red is backed by a passionate community who contributes and " + "creates content for everyone to enjoy. [Join us today]({}) " + "and help us improve!\n\n" + "".format(red_repo, author_repo, server_url)) + + embed = discord.Embed(colour=discord.Colour.red()) + embed.add_field(name="Instance owned by", value=str(owner)) + embed.add_field(name="Python", value=py_version) + embed.add_field(name="discord.py", value=dpy_version) + embed.add_field(name="About Red", value=about, inline=False) + embed.set_footer(text="Bringing joy since 02 Jan 2016 (over " + "{} days ago!)".format(days_since)) + + try: + await self.bot.say(embed=embed) + except discord.HTTPException: + await self.bot.say("I need the `Embed links` permission " + "to send this") + + @commands.command() + async def uptime(self): + """Shows Red's uptime""" + since = self.bot.uptime.strftime("%Y-%m-%d %H:%M:%S") + passed = self.get_bot_uptime() + await self.bot.say("Been up for: **{}** (since {} UTC)" + "".format(passed, since)) + + @commands.command() + async def version(self): + """Shows Red's current version""" + response = self.bot.loop.run_in_executor(None, self._get_version) + result = await asyncio.wait_for(response, timeout=10) + try: + await self.bot.say(embed=result) + except discord.HTTPException: + await self.bot.say("I need the `Embed links` permission " + "to send this") + + @commands.command(pass_context=True) + @checks.is_owner() + async def traceback(self, ctx, public: bool=False): + """Sends to the owner the last command exception that has occurred + + If public (yes is specified), it will be sent to the chat instead""" + if not public: + destination = ctx.message.author + else: + destination = ctx.message.channel + + if self.bot._last_exception: + for page in pagify(self.bot._last_exception): + await self.bot.send_message(destination, box(page, lang="py")) + else: + await self.bot.say("No exception has occurred yet.") + + def _populate_list(self, _list): + """Used for both whitelist / blacklist + + Returns a paginated list""" + users = [] + total = len(_list) + + for user_id in _list: + user = discord.utils.get(self.bot.get_all_members(), id=user_id) + if user: + users.append(str(user)) + + if users: + not_found = total - len(users) + users = ", ".join(users) + if not_found: + users += "\n\n ... and {} users I could not find".format(not_found) + return list(pagify(users, delims=[" ", "\n"])) + + return [] + + def _load_cog(self, cogname): + if not self._does_cogfile_exist(cogname): + raise CogNotFoundError(cogname) + try: + mod_obj = importlib.import_module(cogname) + importlib.reload(mod_obj) + self.bot.load_extension(mod_obj.__name__) + except SyntaxError as e: + raise CogLoadError(*e.args) + except: + raise + + def _unload_cog(self, cogname, reloading=False): + if not reloading and cogname == "cogs.owner": + raise OwnerUnloadWithoutReloadError( + "Can't unload the owner plugin :P") + try: + self.bot.unload_extension(cogname) + except: + raise CogUnloadError + + def _list_cogs(self): + cogs = [os.path.basename(f) for f in glob.glob("cogs/*.py")] + return ["cogs." + os.path.splitext(f)[0] for f in cogs] + + def _does_cogfile_exist(self, module): + if "cogs." not in module: + module = "cogs." + module + if module not in self._list_cogs(): + return False + return True + + def _wait_for_answer(self, author): + print(author.name + " requested to be set as owner. If this is you, " + "type 'yes'. Otherwise press enter.") + print() + print("*DO NOT* set anyone else as owner. This has security " + "repercussions.") + + choice = "None" + while choice.lower() != "yes" and choice == "None": + choice = input("> ") + + if choice == "yes": + self.bot.settings.owner = author.id + self.bot.settings.save_settings() + print(author.name + " has been set as owner.") + self.setowner_lock = False + self.owner.hidden = True + else: + print("The set owner request has been ignored.") + self.setowner_lock = False + + def _get_version(self): + if not os.path.isdir(".git"): + msg = "This instance of Red hasn't been installed with git." + e = discord.Embed(title=msg, + colour=discord.Colour.red()) + return e + + commands = " && ".join(( + r'git config --get remote.origin.url', # Remote URL + r'git rev-list --count HEAD', # Number of commits + r'git rev-parse --abbrev-ref HEAD', # Branch name + r'git show -s -n 3 HEAD --format="%cr|%s|%H"' # Last 3 commits + )) + result = os.popen(commands).read() + url, ncommits, branch, commits = result.split("\n", 3) + if url.endswith(".git"): + url = url[:-4] + if url.startswith("git@"): + domain, _, resource = url[4:].partition(':') + url = 'https://{}/{}'.format(domain, resource) + repo_name = url.split("/")[-1] + + embed = discord.Embed(title="Updates of " + repo_name, + description="Last three updates", + colour=discord.Colour.red(), + url="{}/tree/{}".format(url, branch)) + + for line in commits.split('\n'): + if not line: + continue + when, commit, chash = line.split("|") + commit_url = url + "/commit/" + chash + content = "[{}]({}) - {} ".format(chash[:6], commit_url, commit) + embed.add_field(name=when, value=content, inline=False) + + embed.set_footer(text="Total commits: " + ncommits) + + return embed + + def get_bot_uptime(self, *, brief=False): + # Courtesy of Danny + now = datetime.datetime.utcnow() + delta = now - self.bot.uptime + hours, remainder = divmod(int(delta.total_seconds()), 3600) + minutes, seconds = divmod(remainder, 60) + days, hours = divmod(hours, 24) + + if not brief: + if days: + fmt = '{d} days, {h} hours, {m} minutes, and {s} seconds' + else: + fmt = '{h} hours, {m} minutes, and {s} seconds' + else: + fmt = '{h}h {m}m {s}s' + if days: + fmt = '{d}d ' + fmt + + return fmt.format(d=days, h=hours, m=minutes, s=seconds) + + def save_global_ignores(self): + dataIO.save_json("data/red/global_ignores.json", self.global_ignores) + + def save_disabled_commands(self): + dataIO.save_json("data/red/disabled_commands.json", self.disabled_commands) + + +def _import_old_data(data): + """Migration from mod.py""" + try: + data["blacklist"] = dataIO.load_json("data/mod/blacklist.json") + except FileNotFoundError: + pass + + try: + data["whitelist"] = dataIO.load_json("data/mod/whitelist.json") + except FileNotFoundError: + pass + + return data + + +def check_files(): + if not os.path.isfile("data/red/disabled_commands.json"): + print("Creating empty disabled_commands.json...") + dataIO.save_json("data/red/disabled_commands.json", []) + + if not os.path.isfile("data/red/global_ignores.json"): + print("Creating empty global_ignores.json...") + data = {"blacklist": [], "whitelist": []} + try: + data = _import_old_data(data) + except Exception as e: + log.error("Failed to migrate blacklist / whitelist data from " + "mod.py: {}".format(e)) + + dataIO.save_json("data/red/global_ignores.json", data) + + +def setup(bot): + check_files() + n = Owner(bot) + bot.add_cog(n) diff --git a/RBXLegacyDiscordBot/cogs/streams.py b/RBXLegacyDiscordBot/cogs/streams.py new file mode 100644 index 0000000..39f95bc --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/streams.py @@ -0,0 +1,689 @@ +from discord.ext import commands +from .utils.dataIO import dataIO +from .utils.chat_formatting import escape_mass_mentions +from .utils import checks +from collections import defaultdict +from string import ascii_letters +from random import choice +import discord +import os +import re +import aiohttp +import asyncio +import logging +import json + + +class StreamsError(Exception): + pass + + +class StreamNotFound(StreamsError): + pass + + +class APIError(StreamsError): + pass + + +class InvalidCredentials(StreamsError): + pass + + +class OfflineStream(StreamsError): + pass + + +class Streams: + """Streams + + Alerts for a variety of streaming services""" + + def __init__(self, bot): + self.bot = bot + self.twitch_streams = dataIO.load_json("data/streams/twitch.json") + self.hitbox_streams = dataIO.load_json("data/streams/hitbox.json") + self.mixer_streams = dataIO.load_json("data/streams/beam.json") + self.picarto_streams = dataIO.load_json("data/streams/picarto.json") + settings = dataIO.load_json("data/streams/settings.json") + self.settings = defaultdict(dict, settings) + self.messages_cache = defaultdict(list) + + @commands.command() + async def hitbox(self, stream: str): + """Checks if hitbox stream is online""" + stream = escape_mass_mentions(stream) + regex = r'^(https?\:\/\/)?(www\.)?(hitbox\.tv\/)' + stream = re.sub(regex, '', stream) + try: + embed = await self.hitbox_online(stream) + except OfflineStream: + await self.bot.say(stream + " is offline.") + except StreamNotFound: + await self.bot.say("That stream doesn't exist.") + except APIError: + await self.bot.say("Error contacting the API.") + else: + await self.bot.say(embed=embed) + + @commands.command(pass_context=True) + async def twitch(self, ctx, stream: str): + """Checks if twitch stream is online""" + stream = escape_mass_mentions(stream) + regex = r'^(https?\:\/\/)?(www\.)?(twitch\.tv\/)' + stream = re.sub(regex, '', stream) + try: + data = await self.fetch_twitch_ids(stream, raise_if_none=True) + embed = await self.twitch_online(data[0]["_id"]) + except OfflineStream: + await self.bot.say(stream + " is offline.") + except StreamNotFound: + await self.bot.say("That stream doesn't exist.") + except APIError: + await self.bot.say("Error contacting the API.") + except InvalidCredentials: + await self.bot.say("Owner: Client-ID is invalid or not set. " + "See `{}streamset twitchtoken`" + "".format(ctx.prefix)) + else: + await self.bot.say(embed=embed) + + @commands.command() + async def mixer(self, stream: str): + """Checks if mixer stream is online""" + stream = escape_mass_mentions(stream) + regex = r'^(https?\:\/\/)?(www\.)?(mixer\.com\/)' + stream = re.sub(regex, '', stream) + try: + embed = await self.mixer_online(stream) + except OfflineStream: + await self.bot.say(stream + " is offline.") + except StreamNotFound: + await self.bot.say("That stream doesn't exist.") + except APIError: + await self.bot.say("Error contacting the API.") + else: + await self.bot.say(embed=embed) + + @commands.command() + async def picarto(self, stream: str): + """Checks if picarto stream is online""" + stream = escape_mass_mentions(stream) + regex = r'^(https?\:\/\/)?(www\.)?(picarto\.tv\/)' + stream = re.sub(regex, '', stream) + try: + embed = await self.picarto_online(stream) + except OfflineStream: + await self.bot.say(stream + " is offline.") + except StreamNotFound: + await self.bot.say("That stream doesn't exist.") + except APIError: + await self.bot.say("Error contacting the API.") + else: + await self.bot.say(embed=embed) + + @commands.group(pass_context=True, no_pm=True) + @checks.mod_or_permissions(manage_server=True) + async def streamalert(self, ctx): + """Adds/removes stream alerts from the current channel""" + if ctx.invoked_subcommand is None: + await self.bot.send_cmd_help(ctx) + + @streamalert.command(name="twitch", pass_context=True) + async def twitch_alert(self, ctx, stream: str): + """Adds/removes twitch alerts from the current channel""" + stream = escape_mass_mentions(stream) + regex = r'^(https?\:\/\/)?(www\.)?(twitch\.tv\/)' + stream = re.sub(regex, '', stream) + channel = ctx.message.channel + try: + data = await self.fetch_twitch_ids(stream, raise_if_none=True) + except StreamNotFound: + await self.bot.say("That stream doesn't exist.") + return + except APIError: + await self.bot.say("Error contacting the API.") + return + except InvalidCredentials: + await self.bot.say("Owner: Client-ID is invalid or not set. " + "See `{}streamset twitchtoken`" + "".format(ctx.prefix)) + return + + enabled = self.enable_or_disable_if_active(self.twitch_streams, + stream, + channel, + _id=data[0]["_id"]) + + if enabled: + await self.bot.say("Alert activated. I will notify this channel " + "when {} is live.".format(stream)) + else: + await self.bot.say("Alert has been removed from this channel.") + + dataIO.save_json("data/streams/twitch.json", self.twitch_streams) + + @streamalert.command(name="hitbox", pass_context=True) + async def hitbox_alert(self, ctx, stream: str): + """Adds/removes hitbox alerts from the current channel""" + stream = escape_mass_mentions(stream) + regex = r'^(https?\:\/\/)?(www\.)?(hitbox\.tv\/)' + stream = re.sub(regex, '', stream) + channel = ctx.message.channel + try: + await self.hitbox_online(stream) + except StreamNotFound: + await self.bot.say("That stream doesn't exist.") + return + except APIError: + await self.bot.say("Error contacting the API.") + return + except OfflineStream: + pass + + enabled = self.enable_or_disable_if_active(self.hitbox_streams, + stream, + channel) + + if enabled: + await self.bot.say("Alert activated. I will notify this channel " + "when {} is live.".format(stream)) + else: + await self.bot.say("Alert has been removed from this channel.") + + dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams) + + @streamalert.command(name="mixer", pass_context=True) + async def mixer_alert(self, ctx, stream: str): + """Adds/removes mixer alerts from the current channel""" + stream = escape_mass_mentions(stream) + regex = r'^(https?\:\/\/)?(www\.)?(mixer\.com\/)' + stream = re.sub(regex, '', stream) + channel = ctx.message.channel + try: + await self.mixer_online(stream) + except StreamNotFound: + await self.bot.say("That stream doesn't exist.") + return + except APIError: + await self.bot.say("Error contacting the API.") + return + except OfflineStream: + pass + + enabled = self.enable_or_disable_if_active(self.mixer_streams, + stream, + channel) + + if enabled: + await self.bot.say("Alert activated. I will notify this channel " + "when {} is live.".format(stream)) + else: + await self.bot.say("Alert has been removed from this channel.") + + dataIO.save_json("data/streams/beam.json", self.mixer_streams) + + @streamalert.command(name="picarto", pass_context=True) + async def picarto_alert(self, ctx, stream: str): + """Adds/removes picarto alerts from the current channel""" + stream = escape_mass_mentions(stream) + regex = r'^(https?\:\/\/)?(www\.)?(picarto\.tv\/)' + stream = re.sub(regex, '', stream) + channel = ctx.message.channel + try: + await self.picarto_online(stream) + except StreamNotFound: + await self.bot.say("That stream doesn't exist.") + return + except APIError: + await self.bot.say("Error contacting the API.") + return + except OfflineStream: + pass + + enabled = self.enable_or_disable_if_active(self.picarto_streams, + stream, + channel) + + if enabled: + await self.bot.say("Alert activated. I will notify this channel " + "when {} is live.".format(stream)) + else: + await self.bot.say("Alert has been removed from this channel.") + + dataIO.save_json("data/streams/picarto.json", self.picarto_streams) + + @streamalert.command(name="stop", pass_context=True) + async def stop_alert(self, ctx): + """Stops all streams alerts in the current channel""" + channel = ctx.message.channel + + streams = ( + self.hitbox_streams, + self.twitch_streams, + self.mixer_streams, + self.picarto_streams + ) + + for stream_type in streams: + to_delete = [] + + for s in stream_type: + if channel.id in s["CHANNELS"]: + s["CHANNELS"].remove(channel.id) + if not s["CHANNELS"]: + to_delete.append(s) + + for s in to_delete: + stream_type.remove(s) + + dataIO.save_json("data/streams/twitch.json", self.twitch_streams) + dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams) + dataIO.save_json("data/streams/beam.json", self.mixer_streams) + dataIO.save_json("data/streams/picarto.json", self.picarto_streams) + + await self.bot.say("There will be no more stream alerts in this " + "channel.") + + @commands.group(pass_context=True) + async def streamset(self, ctx): + """Stream settings""" + if ctx.invoked_subcommand is None: + await self.bot.send_cmd_help(ctx) + + @streamset.command() + @checks.is_owner() + async def twitchtoken(self, token : str): + """Sets the Client-ID for Twitch + + https://blog.twitch.tv/client-id-required-for-kraken-api-calls-afbb8e95f843""" + self.settings["TWITCH_TOKEN"] = token + dataIO.save_json("data/streams/settings.json", self.settings) + await self.bot.say('Twitch Client-ID set.') + + @streamset.command(pass_context=True, no_pm=True) + @checks.admin() + async def mention(self, ctx, *, mention_type : str): + """Sets mentions for stream alerts + + Types: everyone, here, none""" + server = ctx.message.server + mention_type = mention_type.lower() + + if mention_type in ("everyone", "here"): + self.settings[server.id]["MENTION"] = "@" + mention_type + await self.bot.say("When a stream is online @\u200b{} will be " + "mentioned.".format(mention_type)) + elif mention_type == "none": + self.settings[server.id]["MENTION"] = "" + await self.bot.say("Mentions disabled.") + else: + await self.bot.send_cmd_help(ctx) + + dataIO.save_json("data/streams/settings.json", self.settings) + + @streamset.command(pass_context=True, no_pm=True) + @checks.admin() + async def autodelete(self, ctx): + """Toggles automatic notification deletion for streams that go offline""" + server = ctx.message.server + settings = self.settings[server.id] + current = settings.get("AUTODELETE", True) + settings["AUTODELETE"] = not current + if settings["AUTODELETE"]: + await self.bot.say("Notifications will be automatically deleted " + "once the stream goes offline.") + else: + await self.bot.say("Notifications won't be deleted anymore.") + + dataIO.save_json("data/streams/settings.json", self.settings) + + async def hitbox_online(self, stream): + url = "https://api.hitbox.tv/media/live/" + stream + + async with aiohttp.get(url) as r: + data = await r.json(encoding='utf-8') + + if "livestream" not in data: + raise StreamNotFound() + elif data["livestream"][0]["media_is_live"] == "0": + raise OfflineStream() + elif data["livestream"][0]["media_is_live"] == "1": + return self.hitbox_embed(data) + + raise APIError() + + async def twitch_online(self, stream): + session = aiohttp.ClientSession() + url = "https://api.twitch.tv/kraken/streams/" + stream + header = { + 'Client-ID': self.settings.get("TWITCH_TOKEN", ""), + 'Accept': 'application/vnd.twitchtv.v5+json' + } + + async with session.get(url, headers=header) as r: + data = await r.json(encoding='utf-8') + await session.close() + if r.status == 200: + if data["stream"] is None: + raise OfflineStream() + return self.twitch_embed(data) + elif r.status == 400: + raise InvalidCredentials() + elif r.status == 404: + raise StreamNotFound() + else: + raise APIError() + + async def mixer_online(self, stream): + url = "https://mixer.com/api/v1/channels/" + stream + + async with aiohttp.get(url) as r: + data = await r.json(encoding='utf-8') + if r.status == 200: + if data["online"] is True: + return self.mixer_embed(data) + else: + raise OfflineStream() + elif r.status == 404: + raise StreamNotFound() + else: + raise APIError() + + async def picarto_online(self, stream): + url = "https://api.picarto.tv/v1/channel/name/" + stream + + async with aiohttp.get(url) as r: + data = await r.text(encoding='utf-8') + if r.status == 200: + data = json.loads(data) + if data["online"] is True: + return self.picarto_embed(data) + else: + raise OfflineStream() + elif r.status == 404: + raise StreamNotFound() + else: + raise APIError() + + async def fetch_twitch_ids(self, *streams, raise_if_none=False): + def chunks(l): + for i in range(0, len(l), 100): + yield l[i:i + 100] + + base_url = "https://api.twitch.tv/kraken/users?login=" + header = { + 'Client-ID': self.settings.get("TWITCH_TOKEN", ""), + 'Accept': 'application/vnd.twitchtv.v5+json' + } + results = [] + + for streams_list in chunks(streams): + session = aiohttp.ClientSession() + url = base_url + ",".join(streams_list) + async with session.get(url, headers=header) as r: + data = await r.json() + if r.status == 200: + results.extend(data["users"]) + elif r.status == 400: + raise InvalidCredentials() + else: + raise APIError() + await session.close() + + if not results and raise_if_none: + raise StreamNotFound() + + return results + + def twitch_embed(self, data): + channel = data["stream"]["channel"] + url = channel["url"] + logo = channel["logo"] + if logo is None: + logo = "https://static-cdn.jtvnw.net/jtv_user_pictures/xarth/404_user_70x70.png" + status = channel["status"] + if not status: + status = "Untitled broadcast" + embed = discord.Embed(title=status, url=url) + embed.set_author(name=channel["display_name"]) + embed.add_field(name="Followers", value=channel["followers"]) + embed.add_field(name="Total views", value=channel["views"]) + embed.set_thumbnail(url=logo) + if data["stream"]["preview"]["medium"]: + embed.set_image(url=data["stream"]["preview"]["medium"] + self.rnd_attr()) + if channel["game"]: + embed.set_footer(text="Playing: " + channel["game"]) + embed.color = 0x6441A4 + return embed + + def hitbox_embed(self, data): + base_url = "https://edge.sf.hitbox.tv" + livestream = data["livestream"][0] + channel = livestream["channel"] + url = channel["channel_link"] + embed = discord.Embed(title=livestream["media_status"], url=url) + embed.set_author(name=livestream["media_name"]) + embed.add_field(name="Followers", value=channel["followers"]) + #embed.add_field(name="Views", value=channel["views"]) + embed.set_thumbnail(url=base_url + channel["user_logo"]) + if livestream["media_thumbnail"]: + embed.set_image(url=base_url + livestream["media_thumbnail"] + self.rnd_attr()) + embed.set_footer(text="Playing: " + livestream["category_name"]) + embed.color = 0x98CB00 + return embed + + def mixer_embed(self, data): + default_avatar = ("https://mixer.com/_latest/assets/images/main/" + "avatars/default.jpg") + user = data["user"] + url = "https://mixer.com/" + data["token"] + embed = discord.Embed(title=data["name"], url=url) + embed.set_author(name=user["username"]) + embed.add_field(name="Followers", value=data["numFollowers"]) + embed.add_field(name="Total views", value=data["viewersTotal"]) + if user["avatarUrl"]: + embed.set_thumbnail(url=user["avatarUrl"]) + else: + embed.set_thumbnail(url=default_avatar) + if data["thumbnail"]: + embed.set_image(url=data["thumbnail"]["url"] + self.rnd_attr()) + embed.color = 0x4C90F3 + if data["type"] is not None: + embed.set_footer(text="Playing: " + data["type"]["name"]) + return embed + + def picarto_embed(self, data): + avatar = ("https://picarto.tv/user_data/usrimg/{}/dsdefault.jpg{}" + "".format(data["name"].lower(), self.rnd_attr())) + url = "https://picarto.tv/" + data["name"] + thumbnail = ("https://thumb.picarto.tv/thumbnail/{}.jpg" + "".format(data["name"])) + embed = discord.Embed(title=data["title"], url=url) + embed.set_author(name=data["name"]) + embed.set_image(url=thumbnail + self.rnd_attr()) + embed.add_field(name="Followers", value=data["followers"]) + embed.add_field(name="Total views", value=data["viewers_total"]) + embed.set_thumbnail(url=avatar) + embed.color = 0x132332 + data["tags"] = ", ".join(data["tags"]) + + if not data["tags"]: + data["tags"] = "None" + + if data["adult"]: + data["adult"] = "NSFW | " + else: + data["adult"] = "" + + embed.color = 0x4C90F3 + embed.set_footer(text="{adult}Category: {category} | Tags: {tags}" + "".format(**data)) + return embed + + def enable_or_disable_if_active(self, streams, stream, channel, _id=None): + """Returns True if enabled or False if disabled""" + for i, s in enumerate(streams): + if s["NAME"] != stream: + continue + + if channel.id in s["CHANNELS"]: + streams[i]["CHANNELS"].remove(channel.id) + if not s["CHANNELS"]: + streams.remove(s) + return False + else: + streams[i]["CHANNELS"].append(channel.id) + return True + + data = {"CHANNELS": [channel.id], + "NAME": stream, + "ALREADY_ONLINE": False} + + if _id: + data["ID"] = _id + + streams.append(data) + + return True + + async def stream_checker(self): + CHECK_DELAY = 60 + + try: + await self._migration_twitch_v5() + except InvalidCredentials: + print("Error during convertion of twitch usernames to IDs: " + "invalid token") + except Exception as e: + print("Error during convertion of twitch usernames to IDs: " + "{}".format(e)) + + while self == self.bot.get_cog("Streams"): + save = False + + streams = ((self.twitch_streams, self.twitch_online), + (self.hitbox_streams, self.hitbox_online), + (self.mixer_streams, self.mixer_online), + (self.picarto_streams, self.picarto_online)) + + for streams_list, parser in streams: + if parser == self.twitch_online: + _type = "ID" + else: + _type = "NAME" + for stream in streams_list: + if _type not in stream: + continue + key = (parser, stream[_type]) + try: + embed = await parser(stream[_type]) + except OfflineStream: + if stream["ALREADY_ONLINE"]: + stream["ALREADY_ONLINE"] = False + save = True + await self.delete_old_notifications(key) + except: # We don't want our task to die + continue + else: + if stream["ALREADY_ONLINE"]: + continue + save = True + stream["ALREADY_ONLINE"] = True + messages_sent = [] + for channel_id in stream["CHANNELS"]: + channel = self.bot.get_channel(channel_id) + if channel is None: + continue + mention = self.settings.get(channel.server.id, {}).get("MENTION", "") + can_speak = channel.permissions_for(channel.server.me).send_messages + message = mention + " {} is live!".format(stream["NAME"]) + if channel and can_speak: + m = await self.bot.send_message(channel, message, embed=embed) + messages_sent.append(m) + self.messages_cache[key] = messages_sent + + await asyncio.sleep(0.5) + + if save: + dataIO.save_json("data/streams/twitch.json", self.twitch_streams) + dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams) + dataIO.save_json("data/streams/beam.json", self.mixer_streams) + dataIO.save_json("data/streams/picarto.json", self.picarto_streams) + + await asyncio.sleep(CHECK_DELAY) + + async def delete_old_notifications(self, key): + for message in self.messages_cache[key]: + server = message.server + settings = self.settings.get(server.id, {}) + is_enabled = settings.get("AUTODELETE", True) + try: + if is_enabled: + await self.bot.delete_message(message) + except: + pass + + del self.messages_cache[key] + + def rnd_attr(self): + """Avoids Discord's caching""" + return "?rnd=" + "".join([choice(ascii_letters) for i in range(6)]) + + async def _migration_twitch_v5(self): + # Migration of old twitch streams to API v5 + to_convert = [] + for stream in self.twitch_streams: + if "ID" not in stream: + to_convert.append(stream["NAME"]) + + if not to_convert: + return + + results = await self.fetch_twitch_ids(*to_convert) + + for stream in self.twitch_streams: + for result in results: + if stream["NAME"].lower() == result["name"].lower(): + stream["ID"] = result["_id"] + + # We might as well delete the invalid / renamed ones + self.twitch_streams = [s for s in self.twitch_streams if "ID" in s] + + dataIO.save_json("data/streams/twitch.json", self.twitch_streams) + + +def check_folders(): + if not os.path.exists("data/streams"): + print("Creating data/streams folder...") + os.makedirs("data/streams") + + +def check_files(): + stream_files = ( + "twitch.json", + "hitbox.json", + "beam.json", + "picarto.json" + ) + + for filename in stream_files: + if not dataIO.is_valid_json("data/streams/" + filename): + print("Creating empty {}...".format(filename)) + dataIO.save_json("data/streams/" + filename, []) + + f = "data/streams/settings.json" + if not dataIO.is_valid_json(f): + print("Creating empty settings.json...") + dataIO.save_json(f, {}) + + +def setup(bot): + logger = logging.getLogger('aiohttp.client') + logger.setLevel(50) # Stops warning spam + check_folders() + check_files() + n = Streams(bot) + loop = asyncio.get_event_loop() + loop.create_task(n.stream_checker()) + bot.add_cog(n) diff --git a/RBXLegacyDiscordBot/cogs/trivia.py b/RBXLegacyDiscordBot/cogs/trivia.py new file mode 100644 index 0000000..8c5c619 --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/trivia.py @@ -0,0 +1,332 @@ +from discord.ext import commands +from random import choice +from .utils.dataIO import dataIO +from .utils import checks +from .utils.chat_formatting import box +from collections import Counter, defaultdict, namedtuple +import discord +import time +import os +import asyncio +import chardet + +DEFAULTS = {"MAX_SCORE" : 10, + "TIMEOUT" : 120, + "DELAY" : 15, + "BOT_PLAYS" : False, + "REVEAL_ANSWER": True} + +TriviaLine = namedtuple("TriviaLine", "question answers") + + +class Trivia: + """General commands.""" + def __init__(self, bot): + self.bot = bot + self.trivia_sessions = [] + self.file_path = "data/trivia/settings.json" + settings = dataIO.load_json(self.file_path) + self.settings = defaultdict(lambda: DEFAULTS.copy(), settings) + + @commands.group(pass_context=True, no_pm=True) + @checks.mod_or_permissions(administrator=True) + async def triviaset(self, ctx): + """Change trivia settings""" + server = ctx.message.server + if ctx.invoked_subcommand is None: + settings = self.settings[server.id] + msg = box("Red gains points: {BOT_PLAYS}\n" + "Seconds to answer: {DELAY}\n" + "Points to win: {MAX_SCORE}\n" + "Reveal answer on timeout: {REVEAL_ANSWER}\n" + "".format(**settings)) + msg += "\nSee {}help triviaset to edit the settings".format(ctx.prefix) + await self.bot.say(msg) + + @triviaset.command(pass_context=True) + async def maxscore(self, ctx, score : int): + """Points required to win""" + server = ctx.message.server + if score > 0: + self.settings[server.id]["MAX_SCORE"] = score + self.save_settings() + await self.bot.say("Points required to win set to {}".format(score)) + else: + await self.bot.say("Score must be superior to 0.") + + @triviaset.command(pass_context=True) + async def timelimit(self, ctx, seconds : int): + """Maximum seconds to answer""" + server = ctx.message.server + if seconds > 4: + self.settings[server.id]["DELAY"] = seconds + self.save_settings() + await self.bot.say("Maximum seconds to answer set to {}".format(seconds)) + else: + await self.bot.say("Seconds must be at least 5.") + + @triviaset.command(pass_context=True) + async def botplays(self, ctx): + """Red gains points""" + server = ctx.message.server + if self.settings[server.id]["BOT_PLAYS"]: + self.settings[server.id]["BOT_PLAYS"] = False + await self.bot.say("Alright, I won't embarass you at trivia anymore.") + else: + self.settings[server.id]["BOT_PLAYS"] = True + await self.bot.say("I'll gain a point everytime you don't answer in time.") + self.save_settings() + + @triviaset.command(pass_context=True) + async def revealanswer(self, ctx): + """Reveals answer to the question on timeout""" + server = ctx.message.server + if self.settings[server.id]["REVEAL_ANSWER"]: + self.settings[server.id]["REVEAL_ANSWER"] = False + await self.bot.say("I won't reveal the answer to the questions anymore.") + else: + self.settings[server.id]["REVEAL_ANSWER"] = True + await self.bot.say("I'll reveal the answer if no one knows it.") + self.save_settings() + + @commands.group(pass_context=True, invoke_without_command=True, no_pm=True) + async def trivia(self, ctx, list_name: str): + """Start a trivia session with the specified list""" + message = ctx.message + server = message.server + session = self.get_trivia_by_channel(message.channel) + if not session: + try: + trivia_list = self.parse_trivia_list(list_name) + except FileNotFoundError: + await self.bot.say("That trivia list doesn't exist.") + except Exception as e: + print(e) + await self.bot.say("Error loading the trivia list.") + else: + settings = self.settings[server.id] + t = TriviaSession(self.bot, trivia_list, message, settings) + self.trivia_sessions.append(t) + await t.new_question() + else: + await self.bot.say("A trivia session is already ongoing in this channel.") + + @trivia.group(name="stop", pass_context=True, no_pm=True) + async def trivia_stop(self, ctx): + """Stops an ongoing trivia session""" + author = ctx.message.author + server = author.server + admin_role = self.bot.settings.get_server_admin(server) + mod_role = self.bot.settings.get_server_mod(server) + is_admin = discord.utils.get(author.roles, name=admin_role) + is_mod = discord.utils.get(author.roles, name=mod_role) + is_owner = author.id == self.bot.settings.owner + is_server_owner = author == server.owner + is_authorized = is_admin or is_mod or is_owner or is_server_owner + + session = self.get_trivia_by_channel(ctx.message.channel) + if session: + if author == session.starter or is_authorized: + await session.end_game() + await self.bot.say("Trivia stopped.") + else: + await self.bot.say("You are not allowed to do that.") + else: + await self.bot.say("There's no trivia session ongoing in this channel.") + + @trivia.group(name="list") + async def trivia_list(self): + """Shows available trivia lists""" + lists = os.listdir("data/trivia/") + lists = [l for l in lists if l.endswith(".txt") and " " not in l] + lists = [l.replace(".txt", "") for l in lists] + + if lists: + msg = "+ Available trivia lists\n\n" + ", ".join(sorted(lists)) + msg = box(msg, lang="diff") + if len(lists) < 100: + await self.bot.say(msg) + else: + await self.bot.whisper(msg) + else: + await self.bot.say("There are no trivia lists available.") + + def parse_trivia_list(self, filename): + path = "data/trivia/{}.txt".format(filename) + parsed_list = [] + + with open(path, "rb") as f: + try: + encoding = chardet.detect(f.read())["encoding"] + except: + encoding = "ISO-8859-1" + + with open(path, "r", encoding=encoding) as f: + trivia_list = f.readlines() + + for line in trivia_list: + if "`" not in line: + continue + line = line.replace("\n", "") + line = line.split("`") + question = line[0] + answers = [] + for l in line[1:]: + answers.append(l.strip()) + if len(line) >= 2 and question and answers: + line = TriviaLine(question=question, answers=answers) + parsed_list.append(line) + + if not parsed_list: + raise ValueError("Empty trivia list") + + return parsed_list + + def get_trivia_by_channel(self, channel): + for t in self.trivia_sessions: + if t.channel == channel: + return t + return None + + async def on_message(self, message): + if message.author != self.bot.user: + session = self.get_trivia_by_channel(message.channel) + if session: + await session.check_answer(message) + + async def on_trivia_end(self, instance): + if instance in self.trivia_sessions: + self.trivia_sessions.remove(instance) + + def save_settings(self): + dataIO.save_json(self.file_path, self.settings) + + +class TriviaSession(): + def __init__(self, bot, trivia_list, message, settings): + self.bot = bot + self.reveal_messages = ("I know this one! {}!", + "Easy: {}.", + "Oh really? It's {} of course.") + self.fail_messages = ("To the next one I guess...", + "Moving on...", + "I'm sure you'll know the answer of the next one.", + "\N{PENSIVE FACE} Next one.") + self.current_line = None # {"QUESTION" : "String", "ANSWERS" : []} + self.question_list = trivia_list + self.channel = message.channel + self.starter = message.author + self.scores = Counter() + self.status = "new question" + self.timer = None + self.timeout = time.perf_counter() + self.count = 0 + self.settings = settings + + async def stop_trivia(self): + self.status = "stop" + self.bot.dispatch("trivia_end", self) + + async def end_game(self): + self.status = "stop" + if self.scores: + await self.send_table() + self.bot.dispatch("trivia_end", self) + + async def new_question(self): + for score in self.scores.values(): + if score == self.settings["MAX_SCORE"]: + await self.end_game() + return True + if self.question_list == []: + await self.end_game() + return True + self.current_line = choice(self.question_list) + self.question_list.remove(self.current_line) + self.status = "waiting for answer" + self.count += 1 + self.timer = int(time.perf_counter()) + msg = "**Question number {}!**\n\n{}".format(self.count, self.current_line.question) + await self.bot.say(msg) + + while self.status != "correct answer" and abs(self.timer - int(time.perf_counter())) <= self.settings["DELAY"]: + if abs(self.timeout - int(time.perf_counter())) >= self.settings["TIMEOUT"]: + await self.bot.say("Guys...? Well, I guess I'll stop then.") + await self.stop_trivia() + return True + await asyncio.sleep(1) #Waiting for an answer or for the time limit + if self.status == "correct answer": + self.status = "new question" + await asyncio.sleep(3) + if not self.status == "stop": + await self.new_question() + elif self.status == "stop": + return True + else: + if self.settings["REVEAL_ANSWER"]: + msg = choice(self.reveal_messages).format(self.current_line.answers[0]) + else: + msg = choice(self.fail_messages) + if self.settings["BOT_PLAYS"]: + msg += " **+1** for me!" + self.scores[self.bot.user] += 1 + self.current_line = None + await self.bot.say(msg) + await self.bot.type() + await asyncio.sleep(3) + if not self.status == "stop": + await self.new_question() + + async def send_table(self): + t = "+ Results: \n\n" + for user, score in self.scores.most_common(): + t += "+ {}\t{}\n".format(user, score) + await self.bot.say(box(t, lang="diff")) + + async def check_answer(self, message): + if message.author == self.bot.user: + return + elif self.current_line is None: + return + + self.timeout = time.perf_counter() + has_guessed = False + + for answer in self.current_line.answers: + answer = answer.lower() + guess = message.content.lower() + if " " not in answer: # Exact matching, issue #331 + guess = guess.split(" ") + for word in guess: + if word == answer: + has_guessed = True + else: # The answer has spaces, we can't be as strict + if answer in guess: + has_guessed = True + + if has_guessed: + self.current_line = None + self.status = "correct answer" + self.scores[message.author] += 1 + msg = "You got it {}! **+1** to you!".format(message.author.name) + await self.bot.send_message(message.channel, msg) + + +def check_folders(): + folders = ("data", "data/trivia/") + for folder in folders: + if not os.path.exists(folder): + print("Creating " + folder + " folder...") + os.makedirs(folder) + + +def check_files(): + if not os.path.isfile("data/trivia/settings.json"): + print("Creating empty settings.json...") + dataIO.save_json("data/trivia/settings.json", {}) + + +def setup(bot): + check_folders() + check_files() + bot.add_cog(Trivia(bot)) diff --git a/RBXLegacyDiscordBot/cogs/utils/__init__.py b/RBXLegacyDiscordBot/cogs/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/RBXLegacyDiscordBot/cogs/utils/chat_formatting.py b/RBXLegacyDiscordBot/cogs/utils/chat_formatting.py new file mode 100644 index 0000000..3f8e3fe --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/utils/chat_formatting.py @@ -0,0 +1,80 @@ +def error(text): + return "\N{NO ENTRY SIGN} {}".format(text) + + +def warning(text): + return "\N{WARNING SIGN} {}".format(text) + + +def info(text): + return "\N{INFORMATION SOURCE} {}".format(text) + + +def question(text): + return "\N{BLACK QUESTION MARK ORNAMENT} {}".format(text) + + +def bold(text): + return "**{}**".format(text) + + +def box(text, lang=""): + ret = "```{}\n{}\n```".format(lang, text) + return ret + + +def inline(text): + return "`{}`".format(text) + + +def italics(text): + return "*{}*".format(text) + + +def pagify(text, delims=["\n"], *, escape=True, shorten_by=8, + page_length=2000): + """DOES NOT RESPECT MARKDOWN BOXES OR INLINE CODE""" + in_text = text + if escape: + num_mentions = text.count("@here") + text.count("@everyone") + shorten_by += num_mentions + page_length -= shorten_by + while len(in_text) > page_length: + closest_delim = max([in_text.rfind(d, 0, page_length) + for d in delims]) + closest_delim = closest_delim if closest_delim != -1 else page_length + if escape: + to_send = escape_mass_mentions(in_text[:closest_delim]) + else: + to_send = in_text[:closest_delim] + yield to_send + in_text = in_text[closest_delim:] + + if escape: + yield escape_mass_mentions(in_text) + else: + yield in_text + + +def strikethrough(text): + return "~~{}~~".format(text) + + +def underline(text): + return "__{}__".format(text) + + +def escape(text, *, mass_mentions=False, formatting=False): + if mass_mentions: + text = text.replace("@everyone", "@\u200beveryone") + text = text.replace("@here", "@\u200bhere") + if formatting: + text = (text.replace("`", "\\`") + .replace("*", "\\*") + .replace("_", "\\_") + .replace("~", "\\~")) + return text + + +def escape_mass_mentions(text): + return escape(text, mass_mentions=True) diff --git a/RBXLegacyDiscordBot/cogs/utils/checks.py b/RBXLegacyDiscordBot/cogs/utils/checks.py new file mode 100644 index 0000000..bdc6104 --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/utils/checks.py @@ -0,0 +1,88 @@ +from discord.ext import commands +import discord.utils +from __main__ import settings + +# +# This is a modified version of checks.py, originally made by Rapptz +# +# https://github.com/Rapptz +# https://github.com/Rapptz/RoboDanny/tree/async +# + +def is_owner_check(ctx): + return ctx.message.author.id == settings.owner + +def is_owner(): + return commands.check(is_owner_check) + +# The permission system of the bot is based on a "just works" basis +# You have permissions and the bot has permissions. If you meet the permissions +# required to execute the command (and the bot does as well) then it goes through +# and you can execute the command. +# If these checks fail, then there are two fallbacks. +# A role with the name of Bot Mod and a role with the name of Bot Admin. +# Having these roles provides you access to certain commands without actually having +# the permissions required for them. +# Of course, the owner will always be able to execute commands. + +def check_permissions(ctx, perms): + if is_owner_check(ctx): + return True + elif not perms: + return False + + ch = ctx.message.channel + author = ctx.message.author + resolved = ch.permissions_for(author) + return all(getattr(resolved, name, None) == value for name, value in perms.items()) + +def role_or_permissions(ctx, check, **perms): + if check_permissions(ctx, perms): + return True + + ch = ctx.message.channel + author = ctx.message.author + if ch.is_private: + return False # can't have roles in PMs + + role = discord.utils.find(check, author.roles) + return role is not None + +def mod_or_permissions(**perms): + def predicate(ctx): + server = ctx.message.server + mod_role = settings.get_server_mod(server).lower() + admin_role = settings.get_server_admin(server).lower() + return role_or_permissions(ctx, lambda r: r.name.lower() in (mod_role,admin_role), **perms) + + return commands.check(predicate) + +def admin_or_permissions(**perms): + def predicate(ctx): + server = ctx.message.server + admin_role = settings.get_server_admin(server) + return role_or_permissions(ctx, lambda r: r.name.lower() == admin_role.lower(), **perms) + + return commands.check(predicate) + +def serverowner_or_permissions(**perms): + def predicate(ctx): + if ctx.message.server is None: + return False + server = ctx.message.server + owner = server.owner + + if ctx.message.author.id == owner.id: + return True + + return check_permissions(ctx,perms) + return commands.check(predicate) + +def serverowner(): + return serverowner_or_permissions() + +def admin(): + return admin_or_permissions() + +def mod(): + return mod_or_permissions() diff --git a/RBXLegacyDiscordBot/cogs/utils/dataIO.py b/RBXLegacyDiscordBot/cogs/utils/dataIO.py new file mode 100644 index 0000000..d8bc673 --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/utils/dataIO.py @@ -0,0 +1,79 @@ +import json +import os +import logging +from random import randint + +class InvalidFileIO(Exception): + pass + +class DataIO(): + def __init__(self): + self.logger = logging.getLogger("red") + + def save_json(self, filename, data): + """Atomically saves json file""" + rnd = randint(1000, 9999) + path, ext = os.path.splitext(filename) + tmp_file = "{}-{}.tmp".format(path, rnd) + self._save_json(tmp_file, data) + try: + self._read_json(tmp_file) + except json.decoder.JSONDecodeError: + self.logger.exception("Attempted to write file {} but JSON " + "integrity check on tmp file has failed. " + "The original file is unaltered." + "".format(filename)) + return False + os.replace(tmp_file, filename) + return True + + def load_json(self, filename): + """Loads json file""" + return self._read_json(filename) + + def is_valid_json(self, filename): + """Verifies if json file exists / is readable""" + try: + self._read_json(filename) + return True + except FileNotFoundError: + return False + except json.decoder.JSONDecodeError: + return False + + def _read_json(self, filename): + with open(filename, encoding='utf-8', mode="r") as f: + data = json.load(f) + return data + + def _save_json(self, filename, data): + with open(filename, encoding='utf-8', mode="w") as f: + json.dump(data, f, indent=4,sort_keys=True, + separators=(',',' : ')) + return data + + def _legacy_fileio(self, filename, IO, data=None): + """Old fileIO provided for backwards compatibility""" + if IO == "save" and data != None: + return self.save_json(filename, data) + elif IO == "load" and data == None: + return self.load_json(filename) + elif IO == "check" and data == None: + return self.is_valid_json(filename) + else: + raise InvalidFileIO("FileIO was called with invalid" + " parameters") + +def get_value(filename, key): + with open(filename, encoding='utf-8', mode="r") as f: + data = json.load(f) + return data[key] + +def set_value(filename, key, value): + data = fileIO(filename, "load") + data[key] = value + fileIO(filename, "save", data) + return True + +dataIO = DataIO() +fileIO = dataIO._legacy_fileio # backwards compatibility diff --git a/RBXLegacyDiscordBot/cogs/utils/settings.py b/RBXLegacyDiscordBot/cogs/utils/settings.py new file mode 100644 index 0000000..f8b41b7 --- /dev/null +++ b/RBXLegacyDiscordBot/cogs/utils/settings.py @@ -0,0 +1,291 @@ +from .dataIO import dataIO +from copy import deepcopy +import discord +import os +import argparse + + +default_path = "data/red/settings.json" + + +class Settings: + + def __init__(self, path=default_path, parse_args=True): + self.path = path + self.check_folders() + self.default_settings = { + "TOKEN": None, + "EMAIL": None, + "PASSWORD": None, + "OWNER": None, + "PREFIXES": [], + "default": {"ADMIN_ROLE": "Transistor", + "MOD_ROLE": "Process", + "PREFIXES": []} + } + self._memory_only = False + + if not dataIO.is_valid_json(self.path): + self.bot_settings = deepcopy(self.default_settings) + self.save_settings() + else: + current = dataIO.load_json(self.path) + if current.keys() != self.default_settings.keys(): + for key in self.default_settings.keys(): + if key not in current.keys(): + current[key] = self.default_settings[key] + print("Adding " + str(key) + + " field to red settings.json") + dataIO.save_json(self.path, current) + self.bot_settings = dataIO.load_json(self.path) + + if "default" not in self.bot_settings: + self.update_old_settings_v1() + + if "LOGIN_TYPE" in self.bot_settings: + self.update_old_settings_v2() + if parse_args: + self.parse_cmd_arguments() + + def parse_cmd_arguments(self): + parser = argparse.ArgumentParser(description="Red - Discord Bot") + parser.add_argument("--owner", help="ID of the owner. Only who hosts " + "Red should be owner, this has " + "security implications") + parser.add_argument("--prefix", "-p", action="append", + help="Global prefix. Can be multiple") + parser.add_argument("--admin-role", help="Role seen as admin role by " + "Red") + parser.add_argument("--mod-role", help="Role seen as mod role by Red") + parser.add_argument("--no-prompt", + action="store_true", + help="Disables console inputs. Features requiring " + "console interaction could be disabled as a " + "result") + parser.add_argument("--no-cogs", + action="store_true", + help="Starts Red with no cogs loaded, only core") + parser.add_argument("--self-bot", + action='store_true', + help="Specifies if Red should log in as selfbot") + parser.add_argument("--memory-only", + action="store_true", + help="Arguments passed and future edits to the " + "settings will not be saved to disk") + parser.add_argument("--dry-run", + action="store_true", + help="Makes Red quit with code 0 just before the " + "login. This is useful for testing the boot " + "process.") + parser.add_argument("--debug", + action="store_true", + help="Enables debug mode") + + args = parser.parse_args() + + if args.owner: + self.owner = args.owner + if args.prefix: + self.prefixes = sorted(args.prefix, reverse=True) + if args.admin_role: + self.default_admin = args.admin_role + if args.mod_role: + self.default_mod = args.mod_role + + self.no_prompt = args.no_prompt + self.self_bot = args.self_bot + self._memory_only = args.memory_only + self._no_cogs = args.no_cogs + self.debug = args.debug + self._dry_run = args.dry_run + + self.save_settings() + + def check_folders(self): + folders = ("data", os.path.dirname(self.path), "cogs", "cogs/utils") + for folder in folders: + if not os.path.exists(folder): + print("Creating " + folder + " folder...") + os.makedirs(folder) + + def save_settings(self): + if not self._memory_only: + dataIO.save_json(self.path, self.bot_settings) + + def update_old_settings_v1(self): + # This converts the old settings format + mod = self.bot_settings["MOD_ROLE"] + admin = self.bot_settings["ADMIN_ROLE"] + del self.bot_settings["MOD_ROLE"] + del self.bot_settings["ADMIN_ROLE"] + self.bot_settings["default"] = {"MOD_ROLE": mod, + "ADMIN_ROLE": admin, + "PREFIXES": [] + } + self.save_settings() + + def update_old_settings_v2(self): + # The joys of backwards compatibility + settings = self.bot_settings + if settings["EMAIL"] == "EmailHere": + settings["EMAIL"] = None + if settings["PASSWORD"] == "": + settings["PASSWORD"] = None + if settings["LOGIN_TYPE"] == "token": + settings["TOKEN"] = settings["EMAIL"] + settings["EMAIL"] = None + settings["PASSWORD"] = None + else: + settings["TOKEN"] = None + del settings["LOGIN_TYPE"] + self.save_settings() + + @property + def owner(self): + return self.bot_settings["OWNER"] + + @owner.setter + def owner(self, value): + self.bot_settings["OWNER"] = value + + @property + def token(self): + return os.environ.get("RED_TOKEN", self.bot_settings["TOKEN"]) + + @token.setter + def token(self, value): + self.bot_settings["TOKEN"] = value + self.bot_settings["EMAIL"] = None + self.bot_settings["PASSWORD"] = None + + @property + def email(self): + return os.environ.get("RED_EMAIL", self.bot_settings["EMAIL"]) + + @email.setter + def email(self, value): + self.bot_settings["EMAIL"] = value + self.bot_settings["TOKEN"] = None + + @property + def password(self): + return os.environ.get("RED_PASSWORD", self.bot_settings["PASSWORD"]) + + @password.setter + def password(self, value): + self.bot_settings["PASSWORD"] = value + + @property + def login_credentials(self): + if self.token: + return (self.token,) + elif self.email and self.password: + return (self.email, self.password) + else: + return tuple() + + @property + def prefixes(self): + return self.bot_settings["PREFIXES"] + + @prefixes.setter + def prefixes(self, value): + assert isinstance(value, list) + self.bot_settings["PREFIXES"] = value + + @property + def default_admin(self): + if "default" not in self.bot_settings: + self.update_old_settings() + return self.bot_settings["default"].get("ADMIN_ROLE", "") + + @default_admin.setter + def default_admin(self, value): + if "default" not in self.bot_settings: + self.update_old_settings() + self.bot_settings["default"]["ADMIN_ROLE"] = value + + @property + def default_mod(self): + if "default" not in self.bot_settings: + self.update_old_settings_v1() + return self.bot_settings["default"].get("MOD_ROLE", "") + + @default_mod.setter + def default_mod(self, value): + if "default" not in self.bot_settings: + self.update_old_settings_v1() + self.bot_settings["default"]["MOD_ROLE"] = value + + @property + def servers(self): + ret = {} + server_ids = list( + filter(lambda x: str(x).isdigit(), self.bot_settings)) + for server in server_ids: + ret.update({server: self.bot_settings[server]}) + return ret + + def get_server(self, server): + if server is None: + return self.bot_settings["default"].copy() + assert isinstance(server, discord.Server) + return self.bot_settings.get(server.id, + self.bot_settings["default"]).copy() + + def get_server_admin(self, server): + if server is None: + return self.default_admin + assert isinstance(server, discord.Server) + if server.id not in self.bot_settings: + return self.default_admin + return self.bot_settings[server.id].get("ADMIN_ROLE", "") + + def set_server_admin(self, server, value): + if server is None: + return + assert isinstance(server, discord.Server) + if server.id not in self.bot_settings: + self.add_server(server.id) + self.bot_settings[server.id]["ADMIN_ROLE"] = value + self.save_settings() + + def get_server_mod(self, server): + if server is None: + return self.default_mod + assert isinstance(server, discord.Server) + if server.id not in self.bot_settings: + return self.default_mod + return self.bot_settings[server.id].get("MOD_ROLE", "") + + def set_server_mod(self, server, value): + if server is None: + return + assert isinstance(server, discord.Server) + if server.id not in self.bot_settings: + self.add_server(server.id) + self.bot_settings[server.id]["MOD_ROLE"] = value + self.save_settings() + + def get_server_prefixes(self, server): + if server is None or server.id not in self.bot_settings: + return self.prefixes + return self.bot_settings[server.id].get("PREFIXES", []) + + def set_server_prefixes(self, server, prefixes): + if server is None: + return + assert isinstance(server, discord.Server) + if server.id not in self.bot_settings: + self.add_server(server.id) + self.bot_settings[server.id]["PREFIXES"] = prefixes + self.save_settings() + + def get_prefixes(self, server): + """Returns server's prefixes if set, otherwise global ones""" + p = self.get_server_prefixes(server) + return p if p else self.prefixes + + def add_server(self, sid): + self.bot_settings[sid] = self.bot_settings["default"].copy() + self.save_settings() diff --git a/RBXLegacyDiscordBot/launcher.py b/RBXLegacyDiscordBot/launcher.py new file mode 100644 index 0000000..47a92cf --- /dev/null +++ b/RBXLegacyDiscordBot/launcher.py @@ -0,0 +1,573 @@ +from __future__ import print_function +import os +import sys +import subprocess +try: # Older Pythons lack this + import urllib.request # We'll let them reach the Python + from importlib.util import find_spec # check anyway +except ImportError: + pass +import platform +import webbrowser +import hashlib +import argparse +import shutil +import stat +import time +try: + import pip +except ImportError: + pip = None + +REQS_DIR = "lib" +sys.path.insert(0, REQS_DIR) +REQS_TXT = "requirements.txt" +REQS_NO_AUDIO_TXT = "requirements_no_audio.txt" +FFMPEG_BUILDS_URL = "https://ffmpeg.zeranoe.com/builds/" + +INTRO = ("==========================\n" + "Red Discord Bot - Launcher\n" + "==========================\n") + +IS_WINDOWS = os.name == "nt" +IS_MAC = sys.platform == "darwin" +IS_64BIT = platform.machine().endswith("64") +INTERACTIVE_MODE = not len(sys.argv) > 1 # CLI flags = non-interactive +PYTHON_OK = sys.version_info >= (3, 5) + +FFMPEG_FILES = { + "ffmpeg.exe" : "e0d60f7c0d27ad9d7472ddf13e78dc89", + "ffplay.exe" : "d100abe8281cbcc3e6aebe550c675e09", + "ffprobe.exe" : "0e84b782c0346a98434ed476e937764f" +} + + +def parse_cli_arguments(): + parser = argparse.ArgumentParser(description="Red - Discord Bot's launcher") + parser.add_argument("--start", "-s", + help="Starts Red", + action="store_true") + parser.add_argument("--auto-restart", + help="Autorestarts Red in case of issues", + action="store_true") + parser.add_argument("--update-red", + help="Updates Red (git)", + action="store_true") + parser.add_argument("--update-reqs", + help="Updates requirements (w/ audio)", + action="store_true") + parser.add_argument("--update-reqs-no-audio", + help="Updates requirements (w/o audio)", + action="store_true") + parser.add_argument("--repair", + help="Issues a git reset --hard", + action="store_true") + return parser.parse_args() + + +def install_reqs(audio): + remove_reqs_readonly() + interpreter = sys.executable + + if interpreter is None: + print("Python interpreter not found.") + return + + txt = REQS_TXT if audio else REQS_NO_AUDIO_TXT + + args = [ + interpreter, "-m", + "pip", "install", + "--upgrade", + "--target", REQS_DIR, + "-r", txt + ] + + if IS_MAC: # --target is a problem on Homebrew. See PR #552 + args.remove("--target") + args.remove(REQS_DIR) + + code = subprocess.call(args) + + if code == 0: + print("\nRequirements setup completed.") + else: + print("\nAn error occurred and the requirements setup might " + "not be completed. Consult the docs.\n") + + +def update_pip(): + interpreter = sys.executable + + if interpreter is None: + print("Python interpreter not found.") + return + + args = [ + interpreter, "-m", + "pip", "install", + "--upgrade", "pip" + ] + + code = subprocess.call(args) + + if code == 0: + print("\nPip has been updated.") + else: + print("\nAn error occurred and pip might not have been updated.") + + +def update_red(): + try: + code = subprocess.call(("git", "pull", "--ff-only")) + except FileNotFoundError: + print("\nError: Git not found. It's either not installed or not in " + "the PATH environment variable like requested in the guide.") + return + if code == 0: + print("\nRed has been updated") + else: + print("\nRed could not update properly. If this is caused by edits " + "you have made to the code you can try the repair option from " + "the Maintenance submenu") + + +def reset_red(reqs=False, data=False, cogs=False, git_reset=False): + if reqs: + try: + shutil.rmtree(REQS_DIR, onerror=remove_readonly) + print("Installed local packages have been wiped.") + except FileNotFoundError: + pass + except Exception as e: + print("An error occurred when trying to remove installed " + "requirements: {}".format(e)) + if data: + try: + shutil.rmtree("data", onerror=remove_readonly) + print("'data' folder has been wiped.") + except FileNotFoundError: + pass + except Exception as e: + print("An error occurred when trying to remove the 'data' folder: " + "{}".format(e)) + + if cogs: + try: + shutil.rmtree("cogs", onerror=remove_readonly) + print("'cogs' folder has been wiped.") + except FileNotFoundError: + pass + except Exception as e: + print("An error occurred when trying to remove the 'cogs' folder: " + "{}".format(e)) + + if git_reset: + code = subprocess.call(("git", "reset", "--hard")) + if code == 0: + print("Red has been restored to the last local commit.") + else: + print("The repair has failed.") + + +def download_ffmpeg(bitness): + clear_screen() + repo = "https://github.com/Twentysix26/Red-DiscordBot/raw/master/" + verified = [] + + if bitness == "32bit": + print("Please download 'ffmpeg 32bit static' from the page that " + "is about to open.\nOnce done, open the 'bin' folder located " + "inside the zip.\nThere should be 3 files: ffmpeg.exe, " + "ffplay.exe, ffprobe.exe.\nPut all three of them into the " + "bot's main folder.") + time.sleep(4) + webbrowser.open(FFMPEG_BUILDS_URL) + return + + for filename in FFMPEG_FILES: + if os.path.isfile(filename): + print("{} already present. Verifying integrity... " + "".format(filename), end="") + _hash = calculate_md5(filename) + if _hash == FFMPEG_FILES[filename]: + verified.append(filename) + print("Ok") + continue + else: + print("Hash mismatch. Redownloading.") + print("Downloading {}... Please wait.".format(filename)) + with urllib.request.urlopen(repo + filename) as data: + with open(filename, "wb") as f: + f.write(data.read()) + print("Download completed.") + + for filename, _hash in FFMPEG_FILES.items(): + if filename in verified: + continue + print("Verifying {}... ".format(filename), end="") + if not calculate_md5(filename) != _hash: + print("Passed.") + else: + print("Hash mismatch. Please redownload.") + + print("\nAll files have been downloaded.") + + +def verify_requirements(): + sys.path_importer_cache = {} # I don't know if the cache reset has any + basic = find_spec("discord") # side effect. Without it, the lib folder + audio = find_spec("nacl") # wouldn't be seen if it didn't exist + if not basic: # when the launcher was started + return None + elif not audio: + return False + else: + return True + + +def is_git_installed(): + try: + subprocess.call(["git", "--version"], stdout=subprocess.DEVNULL, + stdin =subprocess.DEVNULL, + stderr=subprocess.DEVNULL) + except FileNotFoundError: + return False + else: + return True + + +def requirements_menu(): + clear_screen() + while True: + print(INTRO) + print("Main requirements:\n") + print("1. Install basic + audio requirements (recommended)") + print("2. Install basic requirements") + if IS_WINDOWS: + print("\nffmpeg (required for audio):") + print("3. Install ffmpeg 32bit") + if IS_64BIT: + print("4. Install ffmpeg 64bit (recommended on Windows 64bit)") + print("\n0. Go back") + choice = user_choice() + if choice == "1": + install_reqs(audio=True) + wait() + elif choice == "2": + install_reqs(audio=False) + wait() + elif choice == "3" and IS_WINDOWS: + download_ffmpeg(bitness="32bit") + wait() + elif choice == "4" and (IS_WINDOWS and IS_64BIT): + download_ffmpeg(bitness="64bit") + wait() + elif choice == "0": + break + clear_screen() + + +def update_menu(): + clear_screen() + while True: + print(INTRO) + reqs = verify_requirements() + if reqs is None: + status = "No requirements installed" + elif reqs is False: + status = "Basic requirements installed (no audio)" + else: + status = "Basic + audio requirements installed" + print("Status: " + status + "\n") + print("Update:\n") + print("Red:") + print("1. Update Red + requirements (recommended)") + print("2. Update Red") + print("3. Update requirements") + print("\nOthers:") + print("4. Update pip (might require admin privileges)") + print("\n0. Go back") + choice = user_choice() + if choice == "1": + update_red() + print("Updating requirements...") + reqs = verify_requirements() + if reqs is not None: + install_reqs(audio=reqs) + else: + print("The requirements haven't been installed yet.") + wait() + elif choice == "2": + update_red() + wait() + elif choice == "3": + reqs = verify_requirements() + if reqs is not None: + install_reqs(audio=reqs) + else: + print("The requirements haven't been installed yet.") + wait() + elif choice == "4": + update_pip() + wait() + elif choice == "0": + break + clear_screen() + + +def maintenance_menu(): + clear_screen() + while True: + print(INTRO) + print("Maintenance:\n") + print("1. Repair Red (discards code changes, keeps data intact)") + print("2. Wipe 'data' folder (all settings, cogs' data...)") + print("3. Wipe 'lib' folder (all local requirements / local installed" + " python packages)") + print("4. Factory reset") + print("\n0. Go back") + choice = user_choice() + if choice == "1": + print("Any code modification you have made will be lost. Data/" + "non-default cogs will be left intact. Are you sure?") + if user_pick_yes_no(): + reset_red(git_reset=True) + wait() + elif choice == "2": + print("Are you sure? This will wipe the 'data' folder, which " + "contains all your settings and cogs' data.\nThe 'cogs' " + "folder, however, will be left intact.") + if user_pick_yes_no(): + reset_red(data=True) + wait() + elif choice == "3": + reset_red(reqs=True) + wait() + elif choice == "4": + print("Are you sure? This will wipe ALL your Red's installation " + "data.\nYou'll lose all your settings, cogs and any " + "modification you have made.\nThere is no going back.") + if user_pick_yes_no(): + reset_red(reqs=True, data=True, cogs=True, git_reset=True) + wait() + elif choice == "0": + break + clear_screen() + + +def run_red(autorestart): + interpreter = sys.executable + + if interpreter is None: # This should never happen + raise RuntimeError("Couldn't find Python's interpreter") + + if verify_requirements() is None: + print("You don't have the requirements to start Red. " + "Install them from the launcher.") + if not INTERACTIVE_MODE: + exit(1) + + cmd = (interpreter, "red.py") + + while True: + try: + code = subprocess.call(cmd) + except KeyboardInterrupt: + code = 0 + break + else: + if code == 0: + break + elif code == 26: + print("Restarting Red...") + continue + else: + if not autorestart: + break + + print("Red has been terminated. Exit code: %d" % code) + + if INTERACTIVE_MODE: + wait() + + +def clear_screen(): + if IS_WINDOWS: + os.system("cls") + else: + os.system("clear") + + +def wait(): + if INTERACTIVE_MODE: + input("Press enter to continue.") + + +def user_choice(): + return input("> ").lower().strip() + + +def user_pick_yes_no(): + choice = None + yes = ("yes", "y") + no = ("no", "n") + while choice not in yes and choice not in no: + choice = input("Yes/No > ").lower().strip() + return choice in yes + + +def remove_readonly(func, path, excinfo): + os.chmod(path, 0o755) + func(path) + + +def remove_reqs_readonly(): + """Workaround for issue #569""" + if not os.path.isdir(REQS_DIR): + return + os.chmod(REQS_DIR, 0o755) + for root, dirs, files in os.walk(REQS_DIR): + for d in dirs: + os.chmod(os.path.join(root, d), 0o755) + for f in files: + os.chmod(os.path.join(root, f), 0o755) + + +def calculate_md5(filename): + hash_md5 = hashlib.md5() + with open(filename, "rb") as f: + for chunk in iter(lambda: f.read(4096), b""): + hash_md5.update(chunk) + return hash_md5.hexdigest() + + +def create_fast_start_scripts(): + """Creates scripts for fast boot of Red without going + through the launcher""" + interpreter = sys.executable + if not interpreter: + return + + call = "\"{}\" launcher.py".format(interpreter) + start_red = "{} --start".format(call) + start_red_autorestart = "{} --start --auto-restart".format(call) + modified = False + + if IS_WINDOWS: + ccd = "pushd %~dp0\n" + pause = "\npause" + ext = ".bat" + else: + ccd = 'cd "$(dirname "$0")"\n' + pause = "\nread -rsp $'Press enter to continue...\\n'" + if not IS_MAC: + ext = ".sh" + else: + ext = ".command" + + start_red = ccd + start_red + pause + start_red_autorestart = ccd + start_red_autorestart + pause + + files = { + "start_red" + ext : start_red, + "start_red_autorestart" + ext : start_red_autorestart + } + + if not IS_WINDOWS: + files["start_launcher" + ext] = ccd + call + + for filename, content in files.items(): + if not os.path.isfile(filename): + print("Creating {}... (fast start scripts)".format(filename)) + modified = True + with open(filename, "w") as f: + f.write(content) + + if not IS_WINDOWS and modified: # Let's make them executable on Unix + for script in files: + st = os.stat(script) + os.chmod(script, st.st_mode | stat.S_IEXEC) + + +def main(): + print("Verifying git installation...") + has_git = is_git_installed() + is_git_installation = os.path.isdir(".git") + if IS_WINDOWS: + os.system("TITLE Red Discord Bot - Launcher") + clear_screen() + + try: + create_fast_start_scripts() + except Exception as e: + print("Failed making fast start scripts: {}\n".format(e)) + + while True: + print(INTRO) + + if not is_git_installation: + print("WARNING: It doesn't look like Red has been " + "installed with git.\nThis means that you won't " + "be able to update and some features won't be working.\n" + "A reinstallation is recommended. Follow the guide " + "properly this time:\n" + "https://twentysix26.github.io/Red-Docs/\n") + + if not has_git: + print("WARNING: Git not found. This means that it's either not " + "installed or not in the PATH environment variable like " + "requested in the guide.\n") + + print("1. Run Red /w autorestart in case of issues") + print("2. Run Red") + print("3. Update") + print("4. Install requirements") + print("5. Maintenance (repair, reset...)") + print("\n0. Quit") + choice = user_choice() + if choice == "1": + run_red(autorestart=True) + elif choice == "2": + run_red(autorestart=False) + elif choice == "3": + update_menu() + elif choice == "4": + requirements_menu() + elif choice == "5": + maintenance_menu() + elif choice == "0": + break + clear_screen() + +args = parse_cli_arguments() + +if __name__ == '__main__': + abspath = os.path.abspath(__file__) + dirname = os.path.dirname(abspath) + # Sets current directory to the script's + os.chdir(dirname) + if not PYTHON_OK: + print("Red needs Python 3.5 or superior. Install the required " + "version.\nPress enter to continue.") + if INTERACTIVE_MODE: + wait() + exit(1) + if pip is None: + print("Red cannot work without the pip module. Please make sure to " + "install Python without unchecking any option during the setup") + wait() + exit(1) + if args.repair: + reset_red(git_reset=True) + if args.update_red: + update_red() + if args.update_reqs: + install_reqs(audio=True) + elif args.update_reqs_no_audio: + install_reqs(audio=False) + if INTERACTIVE_MODE: + main() + elif args.start: + print("Starting Red...") + run_red(autorestart=args.auto_restart) diff --git a/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/DESCRIPTION.rst b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..23f809b --- /dev/null +++ b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,91 @@ +PyNaCl +====== + +.. image:: https://pypip.in/version/PyNaCl/badge.svg?style=flat + :target: https://pypi.python.org/pypi/PyNaCl/ + :alt: Latest Version + +.. image:: https://travis-ci.org/pyca/pynacl.svg?branch=master + :target: https://travis-ci.org/pyca/pynacl + +.. image:: https://coveralls.io/repos/pyca/pynacl/badge.svg?branch=master + :target: https://coveralls.io/r/pyca/pynacl?branch=master + +PyNaCl is a Python binding to the `Networking and Cryptography library`_, +a crypto library with the stated goal of improving usability, security and +speed. + +.. _Networking and Cryptography library: https://nacl.cr.yp.to/ + + +Installation +------------ + + +Linux +~~~~~ + +PyNaCl relies on libsodium_, a portable C library. A copy is bundled +with PyNaCl so to install you can run: + +.. code-block:: console + + $ pip install pynacl + +If you'd prefer to use one provided by your distribution you can disable +the bundled copy during install by running: + +.. code-block:: console + + $ SODIUM_INSTALL=system pip install pynacl + + +.. _libsodium: https://github.com/jedisct1/libsodium + +Mac OS X & Windows +~~~~~~~~~~~~~~~~~~ + +PyNaCl ships as a binary wheel on OS X and Windows so all dependencies +are included. Make sure you have an up-to-date pip and run: + +.. code-block:: console + + $ pip install pynacl + + +Features +-------- + +* Digital signatures +* Secret-key encryption +* Public-key encryption + + +Changes +------- + +* 1.0.1: + + * Fix an issue with absolute paths that prevented the creation of wheels. + +* 1.0: + + * PyNaCl has been ported to use the new APIs available in cffi 1.0+. + Due to this change we no longer support PyPy releases older than 2.6. + + * Python 3.2 support has been dropped. + + * Functions to convert between Ed25519 and Curve25519 keys have been added. + +* 0.3.0: + + * The low-level API (`nacl.c.*`) has been changed to match the + upstream NaCl C/C++ conventions (as well as those of other NaCl bindings). + The order of arguments and return values has changed significantly. To + avoid silent failures, `nacl.c` has been removed, and replaced with + `nacl.bindings` (with the new argument ordering). If you have code which + calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review + the new docstrings and update your code/imports to match the new + conventions. + + diff --git a/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/INSTALLER b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/METADATA b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/METADATA new file mode 100644 index 0000000..ab92c68 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/METADATA @@ -0,0 +1,114 @@ +Metadata-Version: 2.0 +Name: PyNaCl +Version: 1.0.1 +Summary: Python binding to the Networking and Cryptography (NaCl) library +Home-page: https://github.com/pyca/pynacl/ +Author: The PyNaCl developers +Author-email: cryptography-dev@python.org +License: Apache License 2.0 +Platform: UNKNOWN +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Requires-Dist: cffi (>=1.1.0) +Requires-Dist: six +Provides-Extra: tests +Requires-Dist: pytest; extra == 'tests' + +PyNaCl +====== + +.. image:: https://pypip.in/version/PyNaCl/badge.svg?style=flat + :target: https://pypi.python.org/pypi/PyNaCl/ + :alt: Latest Version + +.. image:: https://travis-ci.org/pyca/pynacl.svg?branch=master + :target: https://travis-ci.org/pyca/pynacl + +.. image:: https://coveralls.io/repos/pyca/pynacl/badge.svg?branch=master + :target: https://coveralls.io/r/pyca/pynacl?branch=master + +PyNaCl is a Python binding to the `Networking and Cryptography library`_, +a crypto library with the stated goal of improving usability, security and +speed. + +.. _Networking and Cryptography library: https://nacl.cr.yp.to/ + + +Installation +------------ + + +Linux +~~~~~ + +PyNaCl relies on libsodium_, a portable C library. A copy is bundled +with PyNaCl so to install you can run: + +.. code-block:: console + + $ pip install pynacl + +If you'd prefer to use one provided by your distribution you can disable +the bundled copy during install by running: + +.. code-block:: console + + $ SODIUM_INSTALL=system pip install pynacl + + +.. _libsodium: https://github.com/jedisct1/libsodium + +Mac OS X & Windows +~~~~~~~~~~~~~~~~~~ + +PyNaCl ships as a binary wheel on OS X and Windows so all dependencies +are included. Make sure you have an up-to-date pip and run: + +.. code-block:: console + + $ pip install pynacl + + +Features +-------- + +* Digital signatures +* Secret-key encryption +* Public-key encryption + + +Changes +------- + +* 1.0.1: + + * Fix an issue with absolute paths that prevented the creation of wheels. + +* 1.0: + + * PyNaCl has been ported to use the new APIs available in cffi 1.0+. + Due to this change we no longer support PyPy releases older than 2.6. + + * Python 3.2 support has been dropped. + + * Functions to convert between Ed25519 and Curve25519 keys have been added. + +* 0.3.0: + + * The low-level API (`nacl.c.*`) has been changed to match the + upstream NaCl C/C++ conventions (as well as those of other NaCl bindings). + The order of arguments and return values has changed significantly. To + avoid silent failures, `nacl.c` has been removed, and replaced with + `nacl.bindings` (with the new argument ordering). If you have code which + calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review + the new docstrings and update your code/imports to match the new + conventions. + + diff --git a/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/RECORD b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/RECORD new file mode 100644 index 0000000..3bea63f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/RECORD @@ -0,0 +1,40 @@ +PyNaCl-1.0.1.dist-info/DESCRIPTION.rst,sha256=3qnL8XwzGV1BtOJoieda8zEbuLvAY0wWIroa08MdjXY,2310 +PyNaCl-1.0.1.dist-info/METADATA,sha256=05N1teI88uA_YyplqlutrzCcpWNADo_66_DDqH483Eo,3194 +PyNaCl-1.0.1.dist-info/RECORD,, +PyNaCl-1.0.1.dist-info/WHEEL,sha256=xiHTm3JxoVljPSD6nSGhq3B4VY9iUqMNXwYQ259n1PI,102 +PyNaCl-1.0.1.dist-info/metadata.json,sha256=a5yJUJb5gjQCsCaQGG4I-ZbQFD9z8LMYAstuJ8z8XZ0,1064 +PyNaCl-1.0.1.dist-info/top_level.txt,sha256=wfdEOI_G2RIzmzsMyhpqP17HUh6Jcqi99to9aHLEslo,13 +nacl/__init__.py,sha256=avK2K7KSLic4oYjD2vCcAYqKG5Ed277wMfZuwNwr2oc,1170 +nacl/_sodium.cp36-win32.pyd,sha256=XdBkQdVqquHCCaULVcrij_XAtlGq3faEgYwe96huYT4,183296 +nacl/encoding.py,sha256=tOiyIQVVpGU6A4Lzr0tMuqomhc_Aj0V_c1t56a-ZtPw,1928 +nacl/exceptions.py,sha256=cY0MvWUHpa443Qi9ZjikX2bg2zWC4ko0vChO90JEaf4,881 +nacl/hash.py,sha256=SP9wJIcs5bOg2l52JCpqe_p1BjwOA_NYdWsuHuJ9cFs,962 +nacl/public.py,sha256=BVD0UMu26mYhMrjxuKBC2xzP7YwILO0eLAKN5ZMTMK0,7519 +nacl/secret.py,sha256=704VLB1VR0FO8vuAILG2O3Idh8KYf2S3jAu_fidf5cU,4777 +nacl/signing.py,sha256=X8I0AUhA5jZH0pZi9c4uRqg9LQvCXZ1uBmGHJ1QTdmY,6661 +nacl/utils.py,sha256=E8TKyHN6g_xCOn7eB9KrmO7JIbayuLXbcpv4kWBu_rQ,1601 +nacl/bindings/__init__.py,sha256=GklZvnvt_q9Mlo9XOIG490c-y-G8CVD6gaWqyGncXtQ,3164 +nacl/bindings/crypto_box.py,sha256=k2hnwFH5nSyUBqaIYYWXpGJOlYvssC46XmIqxqH811k,5603 +nacl/bindings/crypto_hash.py,sha256=kBA-JVoRt9WkkFcF--prKz5BnWyHzLmif-PLt7FM4MU,1942 +nacl/bindings/crypto_scalarmult.py,sha256=VHTiWhkhbXxUiok9SyFxHzgEhBBYMZ7kuPpNZtW9nGs,1579 +nacl/bindings/crypto_secretbox.py,sha256=Q_E3fpCfhyvaKkb7ndwRfaHrMZk0aTDWUGWpguEUXeA,2641 +nacl/bindings/crypto_sign.py,sha256=fD_346rtF3CCtlXkjy30A-HGkXY1Z8GOSRv_0kUTsFg,4857 +nacl/bindings/randombytes.py,sha256=eThts6s-9xBXOl3GNzT57fV1dZUhzPjjAmAVIUHfcrc,988 +nacl/bindings/sodium_core.py,sha256=8B6CPFXlkmzPCRJ_Asvc-KFS5yaqe6OCYqsL5xOsvgE,950 +PyNaCl-1.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +nacl/bindings/__pycache__/crypto_box.cpython-36.pyc,, +nacl/bindings/__pycache__/crypto_hash.cpython-36.pyc,, +nacl/bindings/__pycache__/crypto_scalarmult.cpython-36.pyc,, +nacl/bindings/__pycache__/crypto_secretbox.cpython-36.pyc,, +nacl/bindings/__pycache__/crypto_sign.cpython-36.pyc,, +nacl/bindings/__pycache__/randombytes.cpython-36.pyc,, +nacl/bindings/__pycache__/sodium_core.cpython-36.pyc,, +nacl/bindings/__pycache__/__init__.cpython-36.pyc,, +nacl/__pycache__/encoding.cpython-36.pyc,, +nacl/__pycache__/exceptions.cpython-36.pyc,, +nacl/__pycache__/hash.cpython-36.pyc,, +nacl/__pycache__/public.cpython-36.pyc,, +nacl/__pycache__/secret.cpython-36.pyc,, +nacl/__pycache__/signing.cpython-36.pyc,, +nacl/__pycache__/utils.cpython-36.pyc,, +nacl/__pycache__/__init__.cpython-36.pyc,, diff --git a/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/WHEEL b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/WHEEL new file mode 100644 index 0000000..7872c33 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: false +Tag: cp36-cp36m-win32 + diff --git a/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/top_level.txt b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/top_level.txt new file mode 100644 index 0000000..f52507f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/PyNaCl-1.0.1.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_sodium +nacl diff --git a/RBXLegacyDiscordBot/lib/_cffi_backend.cp36-win32.pyd b/RBXLegacyDiscordBot/lib/_cffi_backend.cp36-win32.pyd new file mode 100644 index 0000000000000000000000000000000000000000..38f5f006c7be85738018d8931b51bc93a7ae4f28 GIT binary patch literal 128512 zcmeFa4}28W)jvF&og@=jm_;@kG0LioMu{2?R3JeU$Uo)Z#=r)|B-jGKZKR4YE1`xM z+-%KoU2C7(KDM<_?)zV>?*4}NYj@v!?|qv0tINErLic*Vey_J=UX}M7 z_bpv^{isnRd{)+tN4lI-wmssvuN!xI{FCthyN|zBISuzel+O0k{lCT)`3re?OXYNa zf4K5Seh*jXNLYeag0P`9H?}jgP<0;okW8k;W;5&nM1*S{8|GSAmS zf2kzRcBD$rT>17w8|Ijla`{NdXi3UJ!#@(ChsWaX##MwcdZOnPNlK$P{MmQOk3d2) z{!%zF011|U_+8xGvmDY=glf2VQnWWk>ZkkAzq}M_=_QhMpxhy44~9?XJ#HRa=630vXX$2Y60sBAMZCq zZlnn*^=75f}_s6>HoeIxf@mLE18nwNgY z&`)Z9tf)KB_#4Q#JJr~aoBDFBdps(Osv23q&Z2I2c%55PzZXM{-?#{Lw5VpMOR9GV zAHUt!f>pF&HMh^07E&cdcj@Ft#g_oU?!qeq54!5MG*QXn= zr%O^tD!mg`8n*+qC8jMiIu%I@-aSro8!v(>7&QbY`~H}PfTt;-UIaWk)8P)b;>V~N ziBcCuVxen{PtZ(c;tjS?Y%|49<*}k58Y?#)L0vjZ&zuDUjS>X398n{)>E(K4%zDGO zNZMrqBS$nv8*7GK^(nvu`wH7+UBNAc zkrjy9NMMoJLvntsdtSvRdNVFW4O>f(4*g99(bZ8^OW{Q@|YLxC9(WljTs4{+w zj9IT9Iv(j$TejncVycY8e+N<8!9PVX(udNkjQ!}~MFQl+B@U62q9#WALYE=jj~o$> zr$CkQU1TDjh(i*X9O={Kh#4ALWiZMqVrr>j(;2D|S$=h(RBf^dn#Swb?PGI#7=}k) zFpen$QRogL$^g2O#u!^>ok&betxs9>)-{`q(+;;j>Nx#o9;4q&kJ9ht!}L4-UHmS8G*kMVZlg}} zw@VN3Tk2kZo6yN`*X_dXsc)zDY1L+_EJXH2srbqbyxl0(`^ zk(*9}^g2=v{8GI;QhV@3qpl;h-+HQ-7v2+-I-1YmorZ~cXPk%Vvs3%=-hNDCqcAtQ z={@)i4XzJ?CE`?^wI~5rQRwVce^cyunocGs#AY=+{T z;VS4{Afjd8{gunjT=|;L|VGB79#-M>n^-+am2xl_9KUu*dp+(IOiGQOO&-y3kc#- z#sWNae*$1*7M_eUkrGWY_JKZ7N*1RKPC0Wtej>YviAEN9%uu!&$};{8utke%;>Le) zc+C}bp}s~p1;>rkJWy>haHsnGk%GI9;g{2#B8Ad-r}~Kf8LuH*V>e<@FZreza@;Cj zMj+iUC|!5g76(UjxW^S>AqC?pHGI|)YBa7uG$`#_5c*Z3{5nd2Amr3t9n7LC%|_6v z!;8kn&c?b+HqyKh4V|nG9~-}eYFzJKG_f<>X};5$pGt9h=}En`UoSoL?6c4Ap!6aD zNf5ThfzY~I>h=0mgT#-$^-8dp*iHlSFR-1qId@9Z@~P+$$sJm!%>+ znEC{`AdPy3(6-_G)4menW)X{faQ$**jA#WM+5?!rxVU4Ev0W~SqD2;>IhGhOLKq5vh}lqir!$NhJl${uu6v|Fc0amq5m0+c?9_oPOBLQnl-D+} zKoNYn!s<9rc-cwdr(fb(2da0>{bSK9V7;bU(4mcr?A9_)9oR(Ut9yC0^d#5{U{>^Av`7g&bC7y+Str zgl6%#6uiaTa!4ow#s%N23V`$d1c`zBR5nL>>ze$&DCbAPy`YnTU!Ob$c^Xd;o$PYy zkw?+pdbE{(zt=*)KW?VqUpLb4YYq5a{^<9f;kW3M{Ptsw-y$pd?b#Ll_VXZaPklS` z6mG0k)+0~gAxaNVlnL5*0xL)TN_zb0(zlj?(9eE$XE& znVCgENFchY~gaezReZP5sHS&x(-3y-3xl>9w`YW-456J7UimHMCTv&DM4zDCyPDSEYw zmA=d790T83|LNE7U3MEQJv#YbZP|*EovifmyiaNM4Hskz;;Xjlf? zOk^>k_f}J#9!qmb+LhCnq@_q&_Vkr>%be~{b4uE11mc#CK&KQM!=@LZI^q2eoj-J^ zSpn-#rn6F1<~XXs8Kcp7Q+@iv{5_)7XwAF2Q(rCXvlP7wH9!0uaYY@>s-*(>H!GNhH-PkLXfOE zA;^DM@ZXjESL448{Pzj|`#k^M!hbvY?=$>&7ypg%-&gr>8~=R?znD_)(?+666-h=4iUC9X+A;ceCIvkX2 z4ZBmREXRU+DYz04!T!G@ugsOjHbPr2r{b=2BfuYQ07Sri&nhXMO2Wvpv8A^B4c+7q zwhn^&5Mk`nY>Y%_!5SVXHoqhnF zqUB)AFBIrf9Qc)p@t4f%#sg_@EX zLzO`rKInA+h8a8(Yq;2$Dmx%uL}E3I)rwE+*zVf+V%}pl;-+p-JxjESdn&S|yNm7{ zy~r!*!6PaEUU25{F3&LfCz-Fu!x?e@>zG2ol}Q=Nj1_Y_rvcGMozWn(l`fXWDwOCW z<_DMtL;cgwB0_y+HGccb@*La7p}lPv<3}ys*?ug=&%3qK=)BNDvnqR&^IX=iuj`4J zYRgwC*F#%3e+GWNW-6Y0N#mQ1P%|wM^r1!7W+qX9)$unWre0$205d^Mi5M#2!c@JB z5#bMZ4ol#m0!mM^H`#)IR-~{ya>B7ff3$3=-}ot_XmG~V)+4BIa4!-kcmP-&WxEp* z?9FJUV}qj@;_vB+Z$yhzjhakM*JqID`}(^6iSI0be1ju;o1?X4gbA3>w!Mpt*oh4( zGkA(a^AKO|D8~!rvy<$N_AY4b{hvL{-Uxr_{OlRFt`}{jLB-qHa0d(Z1gfkcuvX4o zDn4bqqsxxbtOp_LE#tMn2-PM*E7_u>vk3uJ!5nHza(^eMU2mmLnxex!lhmBlE#(*~ zlR8R#AT?-TB|fN${O+>u+`@@)554QNeOc<55??j~axH*7x|^9t*33Z3UZYm38*AsdMtb&_P{Yd9A_ z(mf`4bReF}I}Az3%M?BPns6FM6DyMgtSHMoKv^Eq%1I7lMe=l_eQj)zZhkYfpQ8>$ zsa>`HK{@AB6oq<+E)93c;n!uVY@`lSZf#8h1BfqQqJU; z&4+uE5 zI<{Orl%_Yq#8g4?B|ev4E9+$nh|B{T&6?Vzgwl5q=jg65jwlWZS$oXPC75#9T!s0S zg4-2s6d3ml0Ac+Y#cuNf7kdYSs&N$S2^cbDKQ=QP_z(c5wCl`CuQh@xnO_MmMH6A^ z%shs&Q8T}BaTkWc19H7oWqkBEvW8`giY7HCcW7g77~GhSGS1*cgUkoeG8wsBSeh&A z3fQecA(+o;qk}}k!;vGQw7Td_d2t+bWad-E3j~u0jWRPA;x$^9Qd@`EDl@Z<(=j-~ za56X&O0PvW&c#sHIEpYg6)C1SIc69~sU?13clTaQ=CH^s z0Y82{`F(VA@n?UqXgit+zF=m0cXL0Vl^B1h zBuXI4Hq};IqH6z>?0hh<`61;RHA3r>=WHq~SE93WSygU(zv4IUzJ+>)i1motG6re* zvPU+1kG+Xz`ivXDMWp3ICpn4#&62AULlJ!a7**ph_yMtcfr;_Mnqy|t>{Yk8Ml`BU zM!)i2N4-I#YRjWY4N|})V`esTl)5Oha5@z?Pp-C$841+vPj#zw#PBCAUhJY3Km+Utme>Z)0T z2g4DnNL_s6Qsfsv%tb*T<)5i>kx|}6;h2?5{h)fHYfG^S-754M;vQNK#uvc>#KOcm zIpErnVx-hkfskbD;#EXXon??(+Ko4)r&XE>bgwefIIA&oysMpK^pB}w5fU@;1T#9!&h zEe8=?mf~-@1O{ei@;4vexHyHk65k~Ija~?_2sM)E#gCT?`(*`Q{QM>O?(3Kpo<^bO zOTJbFA!atkSg&TLk9TMVx;Py(NqAQwOeiaSCC%%uk+e%UPC{((B}xtsQOH3RfYlHj zBr4zT(rX;y(`M)bP%oC;B8#BQ=f`+-bUmpAmOelWLL}i-u}euB3{*#AO~D_8KXOJ3 zQ`*gxsOvSVRHgY&V(iiqTYg`}Y?=&4jBcRO5cVCRY@tuUm+M(LrjyOXKn;{nLEI#y@UcP% zgYoP>WzLu2O`mlJ5FTJQ(?;lp_QORMX0!B>eeGE!MZp#CMSz5SRR-t7W{I^52;&;&0_ zSGQN7yZyYo-&S9)xS;(bWh6a}#X~se8ToccSw9u5U)cQl80{)`y8>`d!~~UMgFE>{ zI40|XGu`POWmpqKnzEB5KEZ=z5iCg)n3Y49*n=jNW=-I=i)}WBMpqlZ=;nX{(Zv*NQbAdI=~8`85Ir{^ZwpNa(xjMDU9Dca60?y$X9ee| zT&p9milX2#izN#S4os|qC0jE(iEiw6g%xLE`4WWdES8~oj(`DEzg@;KPY8w_? zzFg^`_U*(&iu&?xPX0na?eV=|7GD{aeSV>P^uLI?0*a?CPmB$r~r5D5WCH7e1S}02KybKv}>)& zJE7h{-c^80H#{+@wD4L*(k8KDS3xQXR@yaGTTGcA|Hesp$8!Cvs5HD`gh%@gMApn?S` z;~1Q8z$@G6#lP_-v@8%jOi057YxO1<&tS|z6WAfY@fcVwS`LBJ?zjN^6hh%0cbJ(u zqO9_Bm!&0_Wl8JE$sc@_3LK1WkWej}Q%ke~JtH0Xpv*a`R0Y@x_5pg#2Wgl3i)T_a zEE5;flLlk8du(#MjU&9>AWavPzWP(30@5&Vx1k|#?&%(csh35Ht%Rw{q=G?Sp5T&U zg?k`6S}G6vAN?y+C!?`kF;)Pd+{Ga*q$zQd(=jr+0BNy`)H;eF3WXF;#Fs4HUXMOe z*!zCtc9Lf-MekxE+m}POx9mA2$x<%EB3P5m_N_N7{sz16NaI;7Ue$8IDq|6zNt@KB zhabec%hym4rf1^>SP_?{A7HaU493CVgBH?E)(>qU$&9eLmUZxq)|+J8(6ZK>v*>jqP46$_Er07~~H^t%<;s*1< z9qfZac+*Of1pljHy5Z)Y%%^GOg6An%4Z3-_%EOhSB*f9M!+=Tm3*xIo>5JphnYsE} zw_dDJY@L3;%XHT2i#&R97HbzBo_`SNlDkqNxbX@GFkQN;VMcKU%r#aQQXOKUF@7+R z{RRUhXc$EWL69COu9t}BKfOgXe?Zi&tBtR3ka>?~)UnRmt~%yUkcJtggqp> zQ`=%l?-yC^va%9`+QscO7UMl{{nSV~x<=ITPh!|(Zi|qa8zwdyd9YuSx{Vq;QM0Wc z6jJs0hOB89hsL!W(Jt7q&@pY1BQ!c3O9{89v>XW?hdWp5C9!ZP=9EyTP_!1a{o9Zu z=9jL6xNpryCT!wl3 zl-gIUEC^Jyl~7y&?uT0Bp_vMepcY!ZE*&{gadL(g`kpku?kFpk$=GK>MDySRF& z#215OiPCL<({N9A75jkg3{?BsgJez5f}omh*_epeQWuf+y$7S0?TaR@!f^XhCCn=r z?^(u=Z$&&ti7RqY)qUs+R$hqgV9-%pz(1w%I%O%Vo&ucsnl9V;JgOTkd|}YI2mcm6 z5is~JRMf|wrBpF8_TjnV2P;n*eG{(Z~+e)BDwgK9fTh+)Zyn(#DGlB7J@HaQ<3wSJ8 zq^!$8dA_dYR2nR=Xr82go6M5|b2WP0p0EX5YPDLLkTtB9vJ+4ZoNHwD<+)0<_;2C< z%hX4{fjrcgo26T!nEE1zZ;`NaS_uaFra^TB<*?o^atf%&DguQ*lL-ABSg%(^sE}U> z_Bi#$e&ZVxsg+%a=nV$r{p=!cy8Ewd@ot;Z6lBH9(Pa-!h?eL1`J8{0sy}PkReU|Bx~l6kUO5+44p{b~ zp1h7J$#q;xFy&tKim#wky9QFz1Pr0eE2*|w!5m^ogK7)YYu5o#63PKH9+yuj}7=ngK+?G&6uGB4`-CktBX+k_vU-9dp^?97-19y%xhlDHG6u@j?yHWLx%|fRcP$ziHp)TnxggW503m|&$bV%2 zYZ%XVTOG9VD7aH)aK#`Rb#XC(Jd~a)l-zgXT8ZmhxPFGKe7fXL0cE~}dpYh~aY-2i ze~yfl3}=R%A!Vdyq~V^Pp>Y59p2DQ{ZML(c)IjT{0M?K5`yj|E>@+%@JX_WsgM}H> z4K0+=P=ebL@gg|b$ZWGCG>R3uSP_=)+@{p+A#43RoKg;74P;uN{nW8PM90GT%{rP3 z!>X2EL^s$l+oN)X6n^6(h{Vl>uc-R69_?zl@UVBPv9NADj(jZAa1m+D=_%Lc7t07T z^Z}N#qjNghx_+$3u$mq$g}bW*RqB}1POQ$99yYH0CM5$YYCvB`XsEjSJZ z8*W1O>NHlxOFNx!gk#yxc6+pdlgj#neux|})ROhPOD95^a}tHBV^ZoRlr_WMF+-?! z#j-v_hU>mQLt&2S?OA#XDyiS@s@CW1rMea&29AQg`V0>u0#(dytXpft<;RjkzhQ-1Gb(6VPSRB+BF{_`G*Apn-AXIK~+ zLwIFnF8%D?3;nSgOsF6=2$5fnJWSo$r1X_5UJX|NZYLT5;fW;p(APoO1!eBg-I(!e zgy;pf&{xDk_W*RRi`|Cd;bCKpW!M0a5H0fixK|2{D`LL0CP>&d(uVaIhve=n0^!Ip zV6LJpFQ6=Gyg^k=foD`bS$1gpq;eG0DaA}5CQ*@OF-Rn0Z?hRzYc+1KpOM^x+gD z%rvao>1&jZa$$E&$v08g#e-G1VU^|d^udB^_Gk8>8@8*tXz@$b;$Gu;7mAzdF%PhQ zW6Gate$Tv%ixP4z04JEMq017B@&^z9V(8>UHln(k#qkB^YFD)}ZYK>n7X^s&szqV5 z2MSx0RM?w%9$eU!|F|#^GYkP#QqevtNx^FZoft143@+-LDJaR z=cJ|bKCx&>#=wu8PBWe4R=teYtvz~^OD}`MEAr33y# zV!eyanlf>>AM2$R+h}*7@#{$-hXd%w$iW5@3*FI4#N{P|JAo-Ie9Vzw@}jaiLnARg zn^i3(<1qN@T?2Ay$ZLz3p$iEa&_?8f24x+$fOV|_APr8T3qfATN-HTSUDsYp>LV?c z;SK9sF3VKr@&|oKAWzT#IT}f8pnM%vtTEBjGPy6qdl>vJi`BXznn5K^=v-+q$|72? zbuVDok=cdJGw|Jc&*B|wvl?-MdI##1#dZCyJdT#8)>@E3+lfTev7YixcWB?Dds&WB;Bu7)gh)Wg9^Xgq+yuB`T>-Xnkm z*_=Qr$+3$xS*%t25e5mYqMG7h2k@ffXql7Seqk;_qhvUk zXJI*~r^DtH7O;sUX~;5P!H6i9$bheZI0YabyO##Mf-L0*e8A>tL8lX}gA2^;=q z0x5L~DBZQC3=5BkpfN%RYg)xE!yapWjV!rqr!ZH#v%h(&Pb;oOE`lRzU&XZRXrnp@ z;O}f|Ek1OuAPSo@lRd@Eh8t;^iZW+HPD7171t_uchd`(gf(Wb{^-i0*(btfZuppS< zhh)Am@5Bn(Z@UqYO~K8QYzo~e`F+&aG)pCQHfI8)W908O{JTk6$>!_7B(&5Ztk4P* zJYK8^EHTKQD0(^@l>_w&MnIHKYXTl0a#XPXVq8MjPk|7u3fK;>7bB4@v{pA|1Ey&i z(?CcA*8aVFiIk8_g#1S=W221eE~e1>%QWcY3%QN2(bWj(9Igt+-2x*W667F#epAOka#EOgT(+b(A-SlHU1)rqF`eV=Yu`vma znq*@Pdazh=aC5gDpv_Z1(pZo|Q!CLdG)L_iXI^&kU) zS6fRVqX_xGxSP+=-D4nJ1wG*22`L*xoLicc2#~}T%4J|O#}l$tAR}RxgMh}?xbgUT-0K3 zaLYLx?BYUWFnB7dUG~6AV)>$7T$)FF=0p4*wGvFTt8zAM%4pkk4R6AiJ~P+ip(K=& zBnTFd0S;d{{Q*};8JG1;M?jwoUU)$8LQ-&(PzHKHd$BxKg-nS6&|Mn&fCtafVr$yu zJx|M8o~nAMaVNBVn}^XfnhL%O2w1^U)stAa+2Fv|;w!E7RCRmI4>HA)0>}lVGO`%U z#|nXjjzbrS1{@pG%D8v(3GH(Duc+HgMw>`sL1VdDOQY1?f27|Hjs=C+h9>sS%<*ay zUIXuh$4Ar?L`Qm@WCeN*A>4*~New*Ts9yP>V7B>f;bQGjVUBh%TI%TD50c4d4Y{!~(2LMg|D!2#r(uK|-@twd*%W9%-D*mL=D(JUj1P0(m1 z482{D$do`dloy?wXziy_`9j(kpZP9wBXlZ+N=Q44urd_Njh5#5qYt_vEu$K24!uwO z)Y%RS^%xeZyqfmA5$ucQ)s7F6Yb0%s&a1L1zhx=T0q<0aVn-ylYC zvxEp}*_zpdF1XIR*FPlGkx`g2-vk1cu8DtumMSbfvQbNGPhfLcM-HV!f(JmH%~S#? zAT+A4IOkaC-e50E=pKoe-q849HmHghoK$9Qoh`g=ryeZW-82^LLiqE$n%kIz$&5 zCUVgNmS_s_SIPaq$dAiLUCO#~0(DOhLmeiP1umjX42zj?8nRHM9mgg$PMk-or-qcK z>!a)3!`hW!JhB8HM z0?%P8xM|2cRrYC@2z&*H;tRtmE@q0%Y|IUQ)ZAvFzIr$pkyK0*MwT^USp0MIzrsJk z-&iKCwR1rqiVenIHf$gTJK0>1TG9blfu-VtH`J0&SUK1OF0dna{+@zV63sz+*}TEn zH0uwRU>P&$6>XjbFw!Uz(;~4(^8FX{8ZqsCK=my!9ShEN9H`EQgPn_P>pz9^j^J}l zUo>9r`8hEU4Jz4(QX!WO=rPvvKkrIGdY$MfyHp4^oxv?=i*b`El!hoV2J6o*FF=zY z_%D8~E>3Q)7b^t=CE}P0?g7B(CTeDNqQF0(cJMz#^E-`pomYc{+Qo_a408kaK+`5F zQB5aS-Y{&GuD^U@nh8+;!1E&bbc1~6IDCj3PC5-QoaC{Nbpkxt5_RHxDI07hn9kRp z=S=5I&t=p3%JZ$$Y2jI$0uzkL@M&ad57F_g7?RD}bt9jK`X-rn31UoeoqZ7yqF@7W87$~DJ=L*&R(U@f zgEHO>f0&|;!DJ)SkOV|f;g4l42Q3x7iOr|vNHALTW;*Z9PzEgG0yuK8Z7yV(*Iq*B zZF?0zYKalq(>4!bTlezU1Cb+be!OC=H)ER6#xzfpLaEJD(Fv~{1w?o1E0u^ws5Szj z+m9nGl)C*6grO2xJm7q+GH^%Lf?zh{2DAV#ssP;>JeluTb0^vJ1MIi@oZf2oMRX1K z$4#g0R;MSPU2mQQrXNUen}h7TSAhIr`^G|=v%`223n^?@{vIp;mIruI-N| zUtH*IR0h82#yPdn!yv1b&`&S)-DY|-h+K94xZ@{Q~oeb9} z!E3Puu)IUQrB?Er_xpm+Qv(LkIJ}&q%Z?|^(I?x4IohT~ai{hj@5d}r#eN%LpFwYh zllEx{GIsqe>Fra#4$9``Va<6>iSfNFh}d9@ZYV(*JK#LHVfNcf^XZY=mCdKeYnN38 zi-y$)V~#2Vi#R(FotJC0t7PaA0d-4Z>HIwwIK%x*?S<2x=R}tzL>UV>zi;YlrRkT^ z3i4ERM;~;7r+^KsMjlK`U??Olf_r~Aq3qE~K?$+qpj{-OtjA=GkP1V-imdrbP(93{ z7%nhqa0_v?DHrY_`watfIELAU!^{-i&a5|)!Oz<9(NPAM4%iHY=-yG5)e7q9D9hdg zX<~Ut8CXWL@Q#WMv-d9{p?bjHKZTVo9UOucfZ*T|tN?%wB&Ql!0pRvW*mj<#xfo_h z67|lpV+)3*^Mt$48Ln;*X)U{k_rdv*Z`X>C zZ_5P4_oK280@bHxqGO6AwPh3Ld-k9wIuj(Wufy>Z$D_A9>;O5Z8_lK11Z4xm1XutlSlK{SCm<#C@!>)f1Il zu?l9c%z}S7_=s~Yxsm}i!M#^;w%*@S+Cz!L-^`NK#~7YamiltLJ_pBv^kO)eVPlM@ zhK&?8GL}+cuTN<&*hl+mte|&df$UNEuE4ng{QzHkZ450k`n?dJBD6^c5tU}B*Kgh& zJj`oLDnBXwF}39eUV$EHS)dhN{>}{S^4Ayi6!^2$N6EI!4&aEJ{5_jfb~K|}YU%#X zvgQo$Q7X+A|~XeEKznel5p@ zD?P{tHbK@4%hI6LIyJHdkZ7+y(*vtbblG7V@_cpf55E%ZkcdjY<-tmi`z!AVzDiBGIT*vuAM9m#8N`o$!{~QNc6V6WQKQTqw4j6b zBpFC(W3-gqhRdx4#OMcFHqr{^Y>oIClOM)vGNhWsYI2+wpb^s*g`Fpv{fYT5XJEbq zOC}dh&>2w^%l=l>^DP|*;4Cs7&@9`Vz^(pEfm`+EH?cV|-0x8z zIR=IYe~*=4BnbGB5!DdM|tnQX!O>2bLjj3y<9p~={;ak%QrWXyBo^gt7{m2eBy2gTjU#^} zkj`z6=Ff9Cj8UJ6IeS|UhCV?oCJ63QPEE1)&re1+b}(58^f||1Bp8SGW1lS*VIu%> zT?z#}h|^>Y^d0urHLYVcC!@t!TZa=s&%}1U^r$Ugr04e)=4zv0Qp(s+a*Ly^FgNrW z#L~=rhspDj3w?J6TM9n9%9`myO>_&=}%#)B|d0SE_c^_Q~+hV$$UeSx7n zu$4id(_ipTXP6Y+?Ar;;1BNl=}@NS!a!0ZQ>cU_PSkZZ0y;{`=}f}5 zl2Y=%qSbgJ8lsI-16}euZJ=+uk(}G$asuDA1t*KpZP-_5Tu9Xyb|$S6ZWuFlY-&NW zV+Fr&e2-9HW3WSJ=rBg&tob(HhVH5y#vf^_APZqNRLm9)xSH;k+&y2F+zxQaY+O=??PWwhMj7xi z%E(ZXI5wAGvIh7y*1rSQ;m^&`%^R0M)C4og+E#=ES+^EqCNmyej^l7dxHiqiTyiwQ zv)_AORE*7pB*NEvaRLSC5W9ql@t{k=d&Xf;NiUrbF_7-ZRyr(7VQ`l#n-y!{{mob- zhQ_nC89mZggderEo$UkT9X$FJW9>oo@Vb*wk+#l8B_W_}E5ThYd7E_xjC)7~3lXVr zosUqqdxLx07F|BgIBl+N9Z(djU4HNWp1bdjeM-ao_XMB4ZjT#y<+fg5^ zj=4iuK=RVdysR8c_Hf0^nLy$M4jaL$QIQ0>QZLJaGrN;j%lf3pK|93Nzd)AZhg@gX z$fvYH3idW^3rB?h5xsJRfdzS5spkg}rTw>s;HoKXKN!MI$M7zOFwTFp`BRR0Gp*Wf zU4gc?E#+u{sY=7cU}1tQcLSMUWM_^JfZNS&px#r^E+8O;RfAjc3mn%Mxk1X7Loo%8gO%gFLd>WS&ryTNAqc)mfd_h zuQ9dx^wg#cCiZVoqD79@+36-6$UlQ$2-`pL4SWCEhLq?XIG<$+xg$fP76%A2W{H z@}C%i;Vy@^BHW)6B1cyjL{+7dwk=}|U@(Yg`Hj(=iScH2lS8YEthaOzHt@I;KczUb z>1O$HY(sMb4+$Jl`)B(WVhy<;u-x!q6$4$TG2@||r=t$dtMlMho4JB(Hj15~V@QI? zo;6plN}rCA^w64d0JrRuHu#ekIOX&xY1XrSOEKBkqUd#2l!ZUqU)2{HZM<|h1T^{A zB(q@?_t_iPT+Cd*j-e7zuSXkyTskNghinEbK;JeCNLWND=~+ZRdA`WMBeMV8k$m_E zf;|+Q82+@)YEb{e-wBq=Pm^3}(qZQAG#AIA#b2hn)73F)s=M5+jw!sdYy1Mr0CZlsKuyYriarH6hdrcF+=|4lgl>EchI_2ZY!{hvFc9GzI)kN? zvVtibH-#8>38hxotOYIE0e`Ln|u*oA{WUt?jb3ch0mt0>VeKTZ^}9C7HnSO zdBP{1C%ou9;U#tFj=-uas9?>roRH&tH&TyNvtpsK#tU_rjxEI}6-dPy1)avVU*!UA z?m>;b?g3h%y{_aROf8`g=yzzfLhe?Vn;e1@jNP9uim@b-Ny zpdf1;Jh+M0p^Xoo6SHC{jXJh29tiet;vP@-Q3u1lm}p5&kjWi5;+AC?6VQJc+3~%V zcfc4e!#E8U20A*XQIdKGNXq6H*$@bcPoP72Z^E}1Mtm%&P#u{osgkxn57p&3yjBzNm+rSc2zIoj!#N;0d_ou3l(AER2XJhli z-uoqTrd$Po-|p)ngSNI)qsW-mOIR9gc@yW#urpEs zH9QT?R{oPWn`Asp*P+aVHqJSOHv+egLdWSj9e!rLAp`F_AwRpGl&ZfL+h**`LL9l59 z?>Esq%pi9B74%NlObh-3dWYS@eow(WydIJ=iP@jfsL5qdzX*P`H)yeE? zB#>I{Dg_xDw^3bi>XH@f=&Gey9L)5NG5Qq}l(n1K=x89zZ~XNO;9g|4cI-9|Fz~fNjDX$l8elv@zQX0b&#+0Sr|7S@e@#Qu+EX=+N&_Zn(*% zz=0k$a}=cBg~XwYp#v7AE}_TK7|y)>j4pmaOHh=56w6x{5wf(X>+e9HG;u$};U91m zXeF214;|~jpwoe1Ps#7pUftFr5^hRLxX*YUZvz$I3=retB!Gd6KTpP~1d3i2c~uX9 z@+yu5Dpn&5=8=8IpTRF-eBEcz--nKh8@j}QRp;}-Oz z1Z~&&Ar4F*ow1sCuK@0`fa{Wh@8G}~Kz8%^**cDGCoSNc2slv>ABVnvAnUORKt0Y_ z&@w^W^*9B(scXB`0v;L41E9Q7bjT6J(ZC}Ni?41JE7N8GyE}QJtE@yW97sZtj%{w9 zFf>ji3SEFYDd0)2aX1yb_qgryXug|5VbL^w4&{8xGPUzf9#e_rfkMQTf z6K$DCgI8D$?gRT6pdEwWY&;#oOI_Oy14-XN0Fr7}(pLs0{dscIr+LzGR#J@KZ;Paj zR?=nxEJp8Y4ou=CjRnxxR|Rm31zbhI3Gy!G&@eS~=rl|em?a(sD4ohT zH8PHag13r}Rv+e*@Al#mU`^rv>(xh2(|qu9vmqE$CudBf)yPK(rd*!lxx7nnH2*x! zUtj01NkU2D1ZGqC5+Cv$f^aOoR8bJcwAsY2h6bYLl20u zL-*FTT>}7~LFn#ny>?hkg z?MRLkr(1TXy8o%aGn&Uv$lop_ zT~1F;|EK=atl66T%gc9|_j|G3O`N=V4|N3`11$&`a46mfr>SiuzZ$d78K2E=bBUcu zM61KoXR+I4k^jiS@#G&%-c^n83jA~a5jy@KiX{#mY+A_q8On!4IPK>4*@w05R$q9X zp&X&2u#|%It zOe{;Na_~kYRqnt~P_;MBZp3=pKUTtTI`!;UI@$_OjvsV8SrduQ_knFEmG!_Abjkr$ zz!EjN0?L)zQb0nHZF1+3?--P9FX|?dolcD`A+EU3xI$z$9Vbx3p=vr#)s{jdahx`? zGtww7J`36V;2Lag^F$;2uhqs*=jyX-x`)!GhWi(X{uc{m!8hDh)lL+i=;fj)d>^gw$lyat~+nhD_y@3Tpp31s1zwCF+yIvq#B;h;4gP7PTs z05^;cDBZ)-3Mv#eOfBk=H;_#OyRG%6ES$F0dLEVL?XKhHkn>sWzcwWx?I9QB05U8(q<0hZ@Rg}c(U zQNTyIb4>S0mV%W${^Hc-SIW_oxuI;dX9a{Vw3o6B{fg})%}z3^5s0eL4xaWuDB-tM zJ+}=huED;%ea19I5+R_Q(6wDDK&nlO=d&yH6EVV6#9jV!ztph6X?Fk|?phxB<7Y|Q z4PRD}0#P!lAXIa)8aYPvc`4smb6pEYED)}~ERJo&L1gH+GVVUJk9y9y3o;l-bdA~) z1v0MVx}p{~K($kmk@*fBN~nV*GCTX}Y~J_a&;l;tgkIP`b+*r^Mt+Wj*l<1N3eB}1 z<^uB}^;^G3iZ82u&fwa!G|sW0NTU>d@$PQHiz=r`As}8hw63n0zMPH^7OCJ(UJN*lddxJrasPvo`-iyx9oH{#NzVT-|2dKx=SWFO zNE`gT0X7<*{sqP7pF)cV<`gg!2<~$|x@SH4kIu~+yxOsM@A|dEq)^@5b{t)yIg- z6xOLNJLv9HTYf`#ujVOqX>Pdux(Y{W3fzC`@K)!H?pLZdjEl}ZP?)i%92Jm}tzM~g z6!SeB8)i5P8vwX4l#URZZs8KEm#%630NriHTToQ5(1r%Wru3G&F1qy%iA3!jzo)uR*`0RhV+ z(s>4{p^uQkQjx*?yei0HUma4CkV6U6LOTGmLV&y^Y6F;`5e#WTBJ&?n<{D-G9g3h` z!4#2-6Gv0FH}b@4WHn|Ms|++~M%8n-m%$NkO33YUs5C?h{ z0s~s{Gyx&a&+%%bL5=(bZ`2GjxQZ52p_|W9D49&dXQ7m@;Q1A#ZUK-QsiikMVdEwK zI?u}Q0xLhO-(E!|?>Cb3crI62x$NS}vg}+s`D=!i3kh_!@yD06z|M(lbVPFBgliVA z2XOrqS0}FNpvx$(gMg7nj2Hp+OCB*Ib;O7?yx=c=gpy&iBsyOtzc2rw?GJ$SP2pe{ z?$2HGH&D6;N(Z4Y!7V?A37=fdAZKV8&Z;Z8)eaax|sYP?ra+0eEM#B`esvR^XWRx zO#kpM;gNtVwC^y1V&w>Y6kGVr`VR>W3WKV5gR*wOWIAquoZ_5!$QNr>0 zQ;e4LXiw${td6H`anNhRg9x8WQ?@CfbHn`zeeDuoWIRj&qJ0o5Wa!8+ul$2eI9J&F znef553!#ZKhc5e&1QNCUJK0BQ!9@$U=7eUn&h8A{@x2xttU*{L9UMnS`HhT5j7id} z*G}&%%W-Qr!a<;b3e2JF#c%E`nDA_F|U!ur(-o zb3V$0A}=<)8#h7E8s1?E{PANc1J%Zl9t9ddhvSuLu9~n09}hJjxGaG?58>|Qe*kyy zMkMJ6;=_3xy)TN`eFPfP2F%%C8swz_Y?Z1Ti247K953qB;!j7ULS?;AQzjEFzhvoO*$@!tiL-T8+{QlT-zAB3Rh+^hoAHFe% zTHDWdf)ZTZ8>Vr^L->NR`M@$ZnHp{$Lem%VUwqRTn-*jQT3&A2SAx%@Fboyi!G^)$ z-`IoLVDkgX9w_c%4rma#Mjo)b0y}+Z$PA!ypuGWOi`CSJE^g$=gRhPXSICF%rB^h) zdG*pn_uQFyI#)fP{@<+U@NQ17pL=lq9)dL}sea9-`A^sHiT@+@Te(4y-a}&>LCg=W zaS`(Fq33@dbFKfk)l3W!P+nr+tT}l;(aSSWfnGE^W?yWIrp;d1z=ECXvTw7Z(ZNeN ziQEr)``fUuA_(LpcUAcGWxR`->m)Qt12#jKJA^w!pXSdAx-9 z!ae7V7w3~aDXNH58blSr{M43qFbq_OOTN$I8G?ZP9m(A$eIH*07N2^-puh)bd%rg% z;K0y;*wBEdhXync4OlWXz&|7)`I|?{web#tYm zCrMD0555kz`Uqm#Zm92g|6n5R|MpFs?VGL7B}dX6Jo@#ybOO+PJ&;XKnEmU>{cb+6 z27Klq!WVTFyomKxAoqm2Xb-NPC)B%ujZQqq zaJ_-o&i`|<;Vwrgd_oJ=PVch$t5*81J>5@9M0bqUDdffxt1UbO!+HMdxHI(Q68 z3P8bf_1>7eIQALZ@)>=)5g)q%9!PHNK_A`=2p&5|_NmpWF<=UeaWJ4Y2q-^H`be`A zDbyDZ0t&LI5JCaNGm}MTRLsyCBC*2WpkVcdH*`0?)=l?{gxsH~&u8i%fp(F%U0Ld1 z%JhKx!U3z@^xn#Ii!Iknd}%`qRXFCM{ z&|eZ~_7MHx3or)E7kh|+czM!wG}1RULo#lopCp_^dg7Mvyzp+EF-(toUa}4AGy2Y@ ztlo*XECkgq$CJS82M9(!bUTU&J4-4L2hPwQYGv$vUvg2r2dwx6JkIYy_S@lI$;qA0 zw;*AkoVM{j1pf8Iz3rH!E5r;q=#)O|fF&i)PsAYoKq=@Cts!g!L+ekA;~e;Rgt0w1 z@qNlrwm5IZPg#N!C%m8Qh@dDn&yU@n4G-K)J7&%7@s^5%7HY&n3juL##=Lr3@o@Wr=_yHeja1~d^hrUxNQ8(2%t>@q3Q2xN*c}MM2T+MsFFa=3Riu5z;a#4De7Dplb>1sL z3fzu`?)w#imm36Las*y3$0x!uKCqqK9~gw2YjMOy7cC({Ip!_B?7S@%R%UE>#91yK zWmthCGZQa<$atYW9l>3Mror;8Hi5Z(r;2kV$%~`Mqs2BxJBMRbi~xfQq2WLuuf<9C ziPJu{r1E9Jo8!G|O3arT*!MD_Td#q*#A@r6hD?n5mfppD%6VlbZpy2!=bd1(T1 z*2$$%-LbhP`FJzSMl#Myj}9IRZ94Tjg{A4`F6(?-EDu%dnzz68m(2K<>t&b1SD13|5l!2Yh{9K#S!NUZRI2xeLLtnBGuK2hv#Ee z)Vk4mxs)YQ<=||&jSM|qn|!uidH}~~B;`rgu|XNqxv3yiAIOxu#lrRw8g1+H8ooDP z7heyn3EV|*5X+zsU2RR1Q;es-3vSBi%sN_(k;I1?Ek+R+#)y+YK=bpGCz-+J|381e zBk8|5&ksVsNwX}z7{zDVM`@aPf@(YDEZoE_OQg-MaCF|pG_VAAN|3j8fD!iT@KcM5 z;CX_Q!#Z?VuT3*A{1_KiQy~bK$RE7M<^?_P}cKTpse>v zpbQ@brjO2xBd};jy$bWt6s-5bK$#c1B3fn-l`{lAxFm644OYo9QC((D!NZ8gI@YEV ztNNkxO5=eVvaC>R`!swSSRCOr>UpqUd?uF?-)J{xxcO_~)EpdUh7-zAMHx2Kxn8VV zhc3XG&HRW#AjtCd4xdgD!Ip*F3ib;NHR=GdHHYjGH-Mrb-c4b(Ue@4>99cVo4t6Fs z#y`z@m({2wB#Wk(A48@RD^wsYkEib!W@1-8&`c4(J|zmP+z(JHfh zMfi&p@$}X37mm=~`3JifZBEg0pOd5%xD{<1iDB4XZhfIfC|{si<1_(A#Ka!zo5zy}%nhk~pB5FG9UL+k%PB*dJ* z{=XTPfc#(1pI}&pn$8KHT>o5{(-P>M_Yc9L(4<4j;2I6IGn|ucrRCj6>wUv(_NLtx z<0WX|l$>jJL(;wvq8XlcUm|TY;)w_*;b^H}bDE*f-+0bccH3Dz~!a|>*6Rq(!i$%kN_0E4ES!P_yvHn})}mReM>yYb|3C^jqSRJ-KGlwDJW z6YPw_ZwS=T8D%5kZ%XTllpC9gg%K-(tf{ComWs|nyd$m{hs7w6)vp@g@Y~9xgN1R)=LLWBpt&`N1`Ns%?n5OMdcfD zm_?BR>nx63>1Suy@6DCDIOwI?h>9Q_d1Z!9IvaAUjRz?N$Ile!PV7bYbV4tVX>oSo zYw(CMLk13WUBa=Mn_Q{ZuZv3Y^H;ghh9Ap5f+x)ST=P@JaLKhBN0P|p(ENmk*^4g+ z_M`WL*%tm1x*JF@&J9hpo|-U^426QwgcDZTT9*?Gu}L2hp;-*a$db&2qyN+N2JFpV zsdN;%L9P-HvW}%{8|Ba_=^8ss6@T!C^+ISeBAj&ZZl3md9fGrI^e;>I@A~9y5k3@%!0V~8=-K-2!$v;(YaK5a3w+$Vy+dH@Ciujp26p% zUMJ-HtAzSGUZ}5Gf{0!Pss@!42bB%zm3E%gT?xa{Vv*l`k>4DVU)gkisOQzJ*h^=g zaJx1cD7et5S*0X|#<(c`iQ%RHzj691?75ZLc0MC#vscSJ*O9OB%`q*H-7l{{%g-jo zx-luQp-Z2Winx|^zN21k^A7J1eDjAN)?s0O;jp$FU#X^@_zfrhiIXI3Ei>HX#rz5{ zU+8(12@N6tMi)p^oQ#o+ub_O74?3I~BUqwOtbcmBgG3cguO%?7E>yH^(I2zlOx}TT z3h~l7VIK0P&w@mr%_cc85u0h&XHC@>@{GR&EP%{>d48j75%C2~rfTblh(Sa)O{mtH zv=mIfy>zg&7kf04g6UwmVcr5DU~jlf4tLY(pk7tK4_eNGYTNiqQUqpMwFQ0*(m6gn zGSm?Vx%rakkLV%?&J-%wFe?x`A`lCC6}~o9)Sv^$y%zE34JH8SfMbSL`=6)_ynp%k zTD|{}^m3ng^1yN?o^nP>Uci^Z zewM^4p=Da%698SUI0HwSzL08k!a>M>#rer;n#ACQ}}2iF^i#0wFWP4=Kh3;0)MF!p_{G;x@5uxMx+-oyADHEe<2g6U)& z*KWXQ7DB)T%(M16$id*yq=GuQEV-5J%lKX!NCH+KZ$Y~{yB(i5z$<22k_c&F{a@_8 zeSB2aweWwE3}l4C86{{GtfK^t5@{5)NYEzag`m77(igT)#ut)dwW}2y}#N%RL{k?MlIr4>0XZL1yzc50NT& zU@6{xs>TVRVF>!Bj+ze+veWYVx|;CA(l(n_$QgrgO-y-78et=Ja<@wGnfu=KmYK8` zFZ6PQxW1!>clgQz-j&bUQ8%<)a*7*Tz%@^-LiQWIu?lazZlfk#(zmA>Mn%QARt@f4 zgq2>v4iwU^h8gox7?eZm`WWs%WW#>E?a?+drBWKr*_0_BdC>il8)B4VEH!%_dqMPH zIOdA@X1~<{70iPQb1;Kzz2z)i0jIX|H>9Kvjb?yRD76dN!z= zUGqiP^YjxG5r35Om>dU}@=IyMPeSr?h3*CAA18#SOas$+%#-&5`8S8im!#1OZu)ux z>ji)LULtUTRK0np^H#ihXEl1Np_QaI?r^Mh$Oc(HdL|?@T3>_V1 zAnLH2ktG7xlX)JJ4bd}3?SztplWS&|e7M!E92l0ZzB$sP`SMRT*QBqiL05iE2YD(W z%t^e#|6*WP`Sf0a**cBzGJY!vHznQ(zfJt?S7$#T7exqgq@&bZIA()T0iQ97SdKvv zl={()>^zI-^16Cj>$BdJM(0=uwsw@?dLB8&*a)_zbfGE$&=vgSTOMeY~(&X;B@LZSH%pe@^7fgv`CK}lga)Llk#IVdV~M+=sAg|a_zD9`OL^GyJC zJhtj%lpIEE!!b_wmf9f8<_SRoT{WWd(}%x;HHMGMxo#l2MpwJ|ih~ujT3j~&VqnSJ zbi5|6G_x9c{QcrkTkg+}d`{B51|*gKOS3kS^Q}q3tB4lM!!Hy_DDZFTg1#A5yvlDl zG_Wue#CFKE-2AG~6Db9W=#cGvBIydXFY)1;xgro-frF}zS6Zu%{UnDk^peA-a5<7{ z@#JINR{GR z4yK(OyUr841MSkr+^3~*N*Gwgx|s1vtZc%^(LZKHevdK*wki`>3N&vZge|Y_gS%;k zR+EgH>*8}Vm294!OolLeC|9<)q4-|a{9p1Ua);4iy(ZVO#D9wAjDkX+T;Br2lmH zz4I-F+qXtxJ&JuiUeQIFky*oi_Hy0gaTZg-=iH|sryy?U{OJ??`pbX0CtJEzRcF1n zXQ7%L*2{Yq3WSaLclife!#HtCle(v(9>3$lm9w>E6D(oIzED5v{sGC){R8r&nP#dE zvfmY3r*1j}3~+?>)q1%uUQD8>czf?VXVeJkOP+2R+EbyLWIea1A}FntI}Jc|KFjSH z%6>`?vT3=HEv=8P^WyHi&gYUHr@qf{REsp3g#R;H)+k<(r&C zDA=^#``JrP&W}SB@qImhc+$hW@5=~m$xtV%J>0{wEaM)12?u4*JZ_hJMng?q{9hQW zWPzOegM6OD9+K^o6G}a8=9{?EtjrpUl@z#ZsQn+V8p^nu@GWj%Z&UsuWQoan;O;y0 z)h6wm-`Y4;=NuOupE>MredJ+=uND8^C7JXhx~tGSak3UJuZvGjYaun279Cjs4Rn^3o-zm7lU9Clw zEbD;2{`Vy&>v5}*U0&P{FjF(KF$NLUWq?~(;^#S;W`lLw2P*4%owZAmWPx;*7v9xp zb;>oHfdS7~gP<5A?9IrQh~aTrKO-j|iU;O=l`n>%NXF-e$BT{^Z;=fFChs&P#Ul&y z{jjJDa)vSPhVs5aT8IP&CeHF@PEc+wSJ8(d?u46$m&xdJ+0ow}Q<6`Nv-O4&dN{Ae zHI_B%D$c;hTj*0RjB)g5L8fsoZ3==M)F$+FdBCprmYm_!xxDb@c%!b?iaWJNin(wB ztehMxc#!It070sZ&3P~@BK!#0ure>O{xN(_K`j{jB`r-4%ZZf$2^?Tu0UWK*=e!Bb1vUVL{hgNNxniE%VUwJGe0Ex^Q(maBn z8-%_9jy!rXNE`>b&GLl~!S>U@q`+RpI*pc;oGK6#1!iKMfv+jFx%SZM7pIh1 zaPIH-vWl=xcrWA(1K(oyx4h4i#ZWlmZ@tg|gYG#YS^oe zoE^>bS-YIpIye623=J&s(O`yKbVRv(JWtU?!>SC%CbVMCv0YqTw6d_b#ukicaVMX( zOAS;1PLQFd@BA7pG3dGSKbpb=%y0B)R#%064CL^;FNvPd3yEKDU#RgO_?XI&n|>Hj z){DVmPxW|?9 zZ*f)P%5AVzJm%V}tCp=a;)%RX^wQB@DT{~#mRzAy+a^b{qI)y8mB~-Bm6ONh_F32E zTZ1!Xm*g7x7nvZ{Qi3sjF7cZrUjF$vzXIng-(=YD{&nK6yAn0A>++TF6$&q|lS}JX z=9OV=0eV#t3JHs;qS*Gb8;6eMLFM!fjJ0CS>Ep;@bTEe@4uWATFmWjJ-+MXw6MB_L zWd%X7rQsQRYFm0JKrcsJc=(>O8bai{n2(Ek3%D2}Rf*$g!{%$3RF z?|(;lq4jwMhJWBy9x0@(vgoimQN(_nm|@y5(A_O)U2dwq-^x;vS0&`*%go23wH7a^Um%QpjAuwaU(6`TxBC zo4Y8cvS?_-X-u&4ApllMGl~sUut9Dp;G&7UM0%LZBTIol_H`o3lZ&1^i2sz{JmaqH zXPILOW!p61<9514BJ1S=k9s(|e_;N*)AueL@Vq1ONB2)ZR`jwtkpgu8HVGv1jP=)t zy#wG6ESqh}BQV>|#m9RdjQ%D&?9(3$vvvvRICw0pD*o>f7N{&h-Xg3VCFTvx|CzMX z)!&tQ(pxdutUv|^z&r(aKWzGu1g;%$Q=AjR%>gLfA>GRG-o2VOa2}8XZm+k z1ge);iKgd`te;sgiZ_$^)Ow0ooHw7VGWJW|i}U8tlvsw0_)lc`ZqFU46xtD_mI;uO z+~G$_6m+`12?W2IX3ixNjj?p`mW1ltw0h}WO}uYpvCR}~mnqOkU&s=WH86h<>zK^X zIrPdncmso@0O}vO9sw0#i=*v8DPKMl-RFREwmR)~iNs-AXaS4QMYJBRh| z%mV|SiA6u#IQ7UbZn--$gk!l2C^(CQHM8GxilS>-$A@oJT|^$0dy_tjOy;$!XmI_+ zfoZD3nFofafmSw7Ejzk@_FL0AKbiFx(09(@ds*CtLI!(q=VYqFXwKlLSu?wWSP_`x zXAaLiXvvQ0T)z=0c_G;hzVVOyVj@~}^p!hZ3DD{2D+6;f)viPcINLXRtw%JJ)d(A zZ|#t)6_ROnW2`brPT44&6DtuGz#9vT`qI1V#=CD)H)^?$OUTJ7EqiQ{g_99UZdU6< zReEVw|JA9EtGTY*oY((q-)C*~V*e1?*$HFzM&y{GyB8^NAwSs5{Yys%Wi5U_a z#?Kd9a>&Z{C;I({u=fz#I3g$A4& z_NGD@-XoAHMmW820Ak8+)52JWn#p%s$tJUrSY-P2JBg= zw)6HZbSX9JWWd-U-Lp_}iFdyUb7JgSsEw#&jroou<)L<2m;{-RLr?01I>vLEzz8uDGC(1 zU`!ZD-&Rt@UxZJhj@35|fe~DxdZ-|E-ZP)ALlSy|WI!nf&k%^@>zRsA>-3r0c%#q? z&O#n46CtAjDZIxQ-3Y69nKE})<{JfeuB7<}Lb;vo$9A^5&56GC!b^v%NL&@aRpMW! zMDN5uV)NoLPp8m~i11;7pb_L+N4hMY>8_}ij=Hfrc~0&=c%0q6m6ApSc*+2rLt|Q=d0Ra->WNT zsnd#X@hXvt8w_7w90|jj->BBM?TKIQMCPNTp;fn8GZbLhg1=j2-*2=l!JcVh+ z`Ai2xY^coaJ-ifSdkOZ*HbJ&x<-KT+#mEFNxoL&fy>1k9+)0s67tOfHT-pAWu`}sZ5^4HPMc(@|4XJQcV9Vbi&Gdx{_`R z!Q4`t=eH(DF50v{&u^Gh^}SSf&t#jR>eADKoFpDv67%w>V;p4_2n>-v6B34<4iBos^_kp;RzfnI9;AM zcqeh($>8fiy|mB1zxX34ORmD(X|dZ1;t^T{(kf$Fc8+>H_H8&}L0s1=tQA{PTq$ZPT<9H*U>5AP;6x{orHc0UFX1(bT8WHHLzeWaqK@Xv1w}# z=7NewhWCCcBLJb)WI{;mg+M4U%szlagr9Jykm;=qOua0JU|OK2Ve`%%Z_C7 zF~;(ugx@E(Ys_Ijp%qwqhl_^= z@E?;%FjRjX#Asq}bC%N|Sl6cK1G;Jeo_EaZB{b9`CfUk=Bu*Gj7t!hIrn! z(hG0;U}*ewh5~zRwa&JW?f~a+J$d-35*?oVMDr#|nj{bEMeS3ziB!!U8P3QYx4-N0EpW_KJT^K@71x61*4RwNvir zA-T)xbriEU!jHy}s?p%p_yGzC;}jQpAUS2Uza+8hkUbza4VS@($T0Z(-<5QC1r7h! zf8~h<;S?UsidZS^@iRXQqdO8H{(iZEfD^4XQ1f zI)rERAqTv$Na$AI=~D51w%P3P7yVC_X|EwF7mlb5b#NI6bZhTEAq_Hf?>?byvIBRY zkcODvyH7|Xuz<49t&+9`*UdPqV_m?mi3)hiarf@!_)9=iRG-7tfK@-o!${2OunZgA zAoeC!rgxaG>RS##p&(L_4q4xIqDbd$_0Jmj@WPD1H;?QtyTgSETciT+tCo=iNz6jK zKaq3EC(=(tkjBOzn$LIK%h^XOQw>VR3V<0`r8P1D@(7(A+az{&NvNg2_cxEAQ+5jY zbJ>jl$U4{nD*P3|&enjH$!A`SQu#sh9XMK!rk_Jv{c-v?@mFm3@vD?)2NhGSu`6J% zQWD<2_}dHBXqQDd=CEY~boXjbLVR++chmfvBIDSq!+-tK;6S zk-)(S^F(9tl=}F;Tq(2zp|bZ)|C2k_9I{(z?K7vuKSSFY?N|wU1WZ!O>{sJo6QI+X z>CZ)vVZ^QWOPuo}{pY&V?tIWXK+5u9bSJk=DrX5YOO7def?B7*p$L%)`tX(1kSJ$T z+(1m!s{bKqFYpr$yh}t z7rL5f9ae1v(zPq%CCtY1#tWO59hk1dn^;}IC zCu_Rs({yoMed5*O@m5JO^v&kt%86bKyyT1F|L!ZO#TYI^2{1{pG`CFoTz4x{;IWpE^|KNx3rBSheDDpv0_`o zQiC~`8$7+Ph976V;-V=N66Ua^lqcc`OY5%a&xJk)SYcW5{QYWCUFYL8I7h)x7oAY} z{eDRL_he+3lE!0ZX1#S}aHqLgZSPM86wj_#7awo=s-&ST(ITK~GcUC*4KKu?Do1oW z)m7FT7=zwBQ!Vu%%$7Bhg)c%Di+Oxacseav!>-0_<#Sw+i-RUt@p217<$Z7-;I}LT zZu~ayQm!d%oQ`$mV5JsY#nsKegAdUCBZEPI|C@qEjncJym66wOt{YOPShffOW3^}V z+xn&d2k%hdyh>wicy7tSYt5))Q#X?c zapH^okNB^algc0=#0%TbqKpB=tf0lc7sn_&zvPPf_s)&gUZ(G$HS0E^G zyGfmSCoRXnPHdqUGG}XHA$6Ki4b_Fipbf^D{WlcOTV;Un0h({~3M?5nr~>z}a~W zg`(lQ?4>|f+GEt4ve!^xcTr?R<a0VqH#^Ei5#~J4j)A653?QNbd`r|BXY=M4O_i zO+k8+fzsRt20%@h5#t9=nHMF-GjMW<2#-2mdjP8#!T8oXg?BQNH=+s`4S>zpJ3s8$ zdTcx}E48h>qZ3Ru6@5IxGvI zlE?QlT);VwSaTak8@!k^#7gjB!hf&4R^_o-m&FY(7O};+B-xcE-+b2K2zf=j-^^`o#ujCbaM3&=%{>4s~6?p@bltUM&W#qFmF*I_l_~Hmq4uqZ&esIYrvwt4{6_z9V__SzUuLf&m!%Lux zkfZaPcK+n1pF96IocyI;nW?>j*oKc2Q^N2M>S%0i70Q_2tz6b@t@C2{Qoc9(hfFNd zW0BWm8{(M2Sd}@P$z~}1MKF`HyZrRy)Hi%u(XTdK@bv3E*3>7v;qEoX1o46;{p_xS!Vy#yOgFovBIJL`Ay%n=L700VpSq49pfzoOm|0 zVw-U0(*?9XKJ_2~-WyZJTZ6=3XI&|XWi5%*>(7kBJC2tFdakdQBU%=R*B6kd_RUf_imT|V<*0h<3Wcx z9u(abj4gTpk_{}uFU9IUw&nrgtWT|+Gh%gb$L9ZCQTgi|Jl53VOzRxkuQIP~z!3CF z#+`*CR{kD+|Ga;Th>`K8DhmEM5qVE;5f5KQosqX=pR1{l=g#0m1Z8RN@Wkac4e@Q~ ztK7m>Tuer*d_TiqY^{0;J9cZy`_Ul}_qowpS|e>*@_teI0sq!!3Iy0z@3gE>6H}g8 zz$8&euV{pQDC|7VZ_x4lTYQPv9QN_@|HK)sy{vn&3=lpMIc6<6%$Xiv{NQ3mCz7Yq z$`5F*ms34kMaeU1X%%05ii)lB17gx!{x;j(fGja(B^|Ps2%WT0lxLe4R5!%^uwM#r zUc4lrwh}y4cy_SkFCtVStD5Iq=gGbn$(#gPu~mn3s!CE-H*705e;ltpjuqAaV!f)v zDg_aOwM2vnHBtQia?}oc;DoaL z{bH7bu7^vSo|lyD+jC+t=2bE@euDyBMI0YQ#Vlhk=2A9WAcc*O)W{FssvxVEAwh7l z*Kn!IiNpB~TA2fNPbqWSNO(U9S-hu-VN~RYj%vpIE(No}cjJ(?+H2j#LH>>Nt(W~j zc+ncvhxpIaoM9m|-NkM=nHm*qzxnyq{-t|YS%WC!Wn(%oew3|h(4AzX*4Iab4ABI? zrXKISpGf?pL^%tJ?X|MO>hbQl@9vC1a$S+#Jcc6IY*%ZFa5}-;&_5nam`k`Sz|7up znB>Dt>(U({aL9*VLy0C=x zfi+3#EiZQ+Wic<)$5>!2ZCf8E`Zc8cUYB9Zzhcnx52R zDxJoRjp0_E&ZE;ayL6}h(&@=NR?x_U50W6MC6Ti-vcM|GDElFLNRYQ%clxYu$P_o1 z1X_VvWOAb0+YtZN#R3e+vlbsdRqDC3Vb5)i3aY*1h|)8k>-`K|!GnmjM{y$ahV^n) z{PIg}NNU~Aaxc_&c_4lZ(RSt?b7>ui$f7 zPJtT!?IYONLiYgivVuWmKv^Er%CAtWr_p1im>f}V+dSNkTdH`f`;VlTTYV{Bdj@-u~7|xrRCnfbx;VpIs8wqs=hw-(4rN5%J#$V|f zy1CBG)CsW25ODaC+Sr3|qnr`x+F0`c79e0sy*v2PD|= z|KKl5F_d*D<10KgAYcw&rZ)ywu`#gPb^&vv0*mJTR;Gc(rwzzm-{+)%Uo}c(Q1Fub zdtL|XkuwCn0G~H5D+DdjZfvZ54e>ug{ZR#SvhX(X`jk3Xc%Dd`ObOc=jyHW&_2sHn zTut$px9~?%-b5IgPW_d#BVZ`!O5|(=KDoaiWG15ffIaA^zrcDHj@` z${nk`UUzvjPf?!iC=|2CBBJCpic?QjL7%$1R)n%oH4(1`#%gX|NlyY-SBvB4+(^E)h_j^#OiOc$ zUW}aU|G}Fzs38#g#m_E4O%hMw7a+UGyd!!dBNEX%L$7qm8+|S?Jky#~#W-Vn<42p8 za^cK=N(Ew|_A5Ewy3!|!hRh4J*O;2{19$1EYR^1YT$^$Mf%wr1HL%GxK>y~V+EVP8 zY<8hu*hiK(ZDox@Sg6mSR1Du^{MFpg3!#{S&68b#!f^`@KAB-yn)M3GtBRH^D2ZYc~ ziG+Cu9vdOBJARo+$kDr<=C3)IwJ~r@t|-1%fIleWvQ|MDG{ZkygY0XZedI>j=w~LN zO17Q@9yub+$5#7pbQEt(InylPowmuZY&Tq~JSDN^a3ZnJi}M()7(CHGCn7Ki_KdsO zT4*0mj!X$Zu@ODcG{EH|-moLe9ATZ0{L)44?TMgBvGbovKeQ?>@~`37$v^xoBB!lf z%MO1mF^?7fYGZad2Vf3A0ziTeH`bY>y!oeYk2{dN@sWEn&XZ5PsIXB7{Vq`cewuZaX@~PS6aDJ+2*5eOT6kn`B z(%jnmH;Z&HY7Om%brsmEcjnB(yN|2QuzXQ|`8PjAI#umsfG_N&+XBO+H6Jt#1Ijy8 z3ITBV{SBj292pFntr+>8tC|*`YZsD~{wc%(RJ{h&iS-xT#G|?)RrZRs+QrU*jF2wT z#_VZC9mD2BQb(~hEiu^Us8E0`0*om8f*i#t&O=8MM5p7*x;woO&<-~O0$PlbK`*r} zdUKVi_Pn0~njjI|qPRgLgAjY0XnsyF*M)kyzCC)3utPGH6Dbloc~LE_<}9hV@Ku2xuO@e-$?GABXzlQGHDGaoV`z1 zg0GI5z{xLHij7r3(JQ)qu5vr|=aXc&n|G4zx@?APtg?yQd`$Y9G5f7VCYmNOF0I8< z#(V~z=u|6WrC2whbN^^6uoEQ+!q7(cqx2|Tr}3dwkFmsQH;ffX1_3sbYd zH@*9_cgkno%Cz}{%Q)uq7xJy485v%53G&as`Rfcj=Y_cGP_D!~oBKr{wDzKow*4X4 z0E!;V_DMc@Iny&?RB_?_WWVV&grU_xSvJ!^yHRP63}u;4(j+e?Vq^yLnKNo>$aZPPBeL6eI z&j`s9ZN5O%b`L6#a*RfE__ZTpZzr~qMCdQ0$1zq5uX%od|NTH4byAf;e363qc>j|b z4-S7u06gCR6pxbyUpTWj>;*-*2(Q{>WlY>^SnlArEdaJ)oQi6!PSyZJGW&S(5A$e~*bf{4VDXOMKkgAZ57_T=V(tJc}O`3`5OY zHrTLBUJN-G_d6aq|LoaEMByMl9Yyf1Vxj8qNZT+f2Y@RNaW2bCmGgTv zT3oEKw_Yrkd=u;AQ7z!0&F>2QBtU8KU+T_O5pZcr(7-$7b$MJ}Y_FYM?hRwcPKJk7 za(LYKT=G<@Gg9H8jFH`j!0MrTW&cgvSy^D;;aoow@mg8O*uLE_M1&RrxtL~US>0~m z?%4<*tfI3>p~XjjWK|xXh6p*ux-C!n#3hT0_5a{GtTRe*euStTe%}QQ)^Fe}gwkE{ z`4ACD%)zAyrKnfg6lNV~-A-SgWj_s$+`b+_4T{ETy`2VL>k%pSD$ZiWXpFrUr|Hxb z{n&Z;p(j9F=io$t-s3_t6hO6N329nj0eXLzFW+Nu4D$?ifE@jf*gG72LB$Jh7NaMV z8KKCbP2~{QiZ!KEWG;L^aKgXY7F3tqNfg90B!3#*PDAqXPbPP3|V_Y|Ww=3r29uKXP2)P*Dv`yH|7_=o}R z1lhb|X&jJ$Vy`6byYxLSL7s%W5zk+0vvV4#Tdjd4&MDq_x(h(Y|M>|qSse`}K#_7~ zHRWT4l_GeLc4VmyS&|gk=HIPioBUp+tXZsT=YAe`ZRHT%QV7^`2eokupb6>>p+S}O zTy>e2D{&jIFm}snsY)xv)TcJA%Jh6mxfK{$e=bCbL9C@i9P&d2qQ=%Nvum8HsGbb- z6(dqyXcO&otrAW&3#nGR(o`7{5Mk&gb^Z~AdsvW{+b@{APS?-kh71}0mG1TrFRj;o zz0|_?W~mI%Xpe2_ZYrnh)g#ARhqR%ty5UFfRpSrvE-38;8sMPZvN2F@A=iSWP%e4a zQU=Y&|0E61K_xT_X9c_elX&e+5mo;Gr~Dtv4*JVxw%S2A1Im|4WEsTpGIrPT$63J* zn~BIdO8Z%iNvKkQWV8C7A#SAPYsUJ5;pwSEZ?E48O({%@$x0sFjuwxz)QQb|&GV{M z(tHLqTqeXOnKYo1CQ)5Iu-;ipN@dZ^VS&V-FI!}9S_joSH^;iui#b#QN8GUd?-Cq& z0TyQ(rym#R-YAKeo@7F2#F zZN@c+EK&~Hj}%B#L z{9k%Uf6wO<^c|HrCFnx(^gp^cdYrcHpS>%3yx7{uQ*>t{`f4KnD8~&H2=+;t^hJF3 z1mpJ;O_h1*gWt3kDdWl0{7;q}4_b$ZE5vs4H2+ijZEp0qCz5SN3>eOW=y5L^P@Lz~ zd4|t-v6(I&U3@(jAYtWGUtl^%5RUGrqG{82O~|fmh(Ocmd3T{FOEspW%$RWP531`({^6G1lu>;lnTWdxXMYGpqm+IhB*3d?EBgH zV#_$6nR^yOb3GpO;^GuU>F|}jqNcXL|JQ;~8?aNfP1DUQY~|Obw({#j`|7}Rx98l! z3EO(irC*LWBO3xMTbkj(3^z?2-4$SbB3`kd_yYd!iuRcctVQ_~_li9#wqIBB%fvTV zd{2I!_5#oCb1BkaHprdvk325S2ebb@HHPq5{>_gwJw=(wmG0Vcyzp(?^HR-tJKNoi z?axqT6)mKJyVAaJDzV905{MsSGZB;H6V{8ch?b=3#Oe6gV>p5=GmazV~` zKd(gxBCCo{L~bAv0zGNoJw1uJ3ZR^k>!K3H*v7vi1&m$On?Om?Pqy06+niXJ-TfH1 z@!bAh^3wMBc0lXEHQAmX>e8)K?RkRNgSTR0A;%@<>>qhqx8R?vS^iF;;kv?+7)W}N zD6$-LekPGN$qxW<*zDAfE&}%^h9u2qKg}#3c~2{f{hJdsMRDrdRuu)u4{5bOe?Jh5 z`|*3Ed2vQ@o5#FSPQ8Mgkzl-*(Gva%t26%=*>I*2TT(+Fe_}kUh^K_Z+%C7aw-z0C z4_!=WXpI%7ir;Cj0w$*hM&8x8d2tO{0rlvGpS#T`ljPqoQU3W;w8noRiuq zeV{L`M9LFQIP_*n5mq*1iq$ET@&3(XR?NT~L4W_Z1Pa&WL+)Ttl`ruePKhh7Rv6Y` zRlSte2UT%DY^U6@urK`^|M)PoLPh(x$jN|Y$>}9VwmalI zXUI7Ur(ja0DYUBxSU46K19gQ@e?lj>zs^5@`JlD0Dt-nuK~WUlQ;d*|Om?)E`i8Rx zICioc`!*{gE}C-LeY(#+zYH;*bfh9D!z{qj4*KRl)hH;c)ArlapeWpg14W_i)*FdA zPYczKdsAKfifDaq&s|vbXaB&ZV!%Xg!~fu7L5M&}#wg@iHwbxkjfA*Qv1+hV^F%H@ zvMb9RFW0)5=WX9YGX|<5n)~9jkSa9M98!0_Y?P7nM$X=Tpf{OjLMn}g?~sj%M!hp| zvNK(Iatt3PWpek%G3QViH&k@Y%vi`ap}yybOWA)xm4BuNN_ZOyk%MmW3CcoTZ#W9a zZ3L*jK(|l^VXK0Lwzb_CUn){GsB2YY@7X)KXxGLww4$Pt9O|S<(JnJXJQbsUsfxdg z{7LuJ%o!q&8bUEJAoE2jKzP><5;Afg>@9`82#Flbti7GNmle9>wiljBTZ`2-@m$X;->lDjCJ1P zqFw9%10g~?2v~17z&U-O6(@TA!~>A_@y7_Oy%B%EXcolV^@a@tP_lpHP0qpgR%2p? zXjm(|NA;+AMAKvK#e#{Z`?6l>on{Tr-1Xp5_DG6`BK{)(#>|*MNm(>oQ9G(34)J$9 z3P;XNW)bJXk!NK|Ts*S0iD%K%A)*e!iL6m5YjWhnTdWjqSb}-uGbS4buc|{y z&kVM5U5D@aO&OK_qY0AQ6<)!Q-v}%gy)YcbamrULwH+Sw31fRWIchmE$?~k z%Q?)m#Ipm_GVe^8mXU06kwZsMy@i6hS&RzA_eWnaQ%n$it6ME)-k_Dt@!Q?K_NgUO zON7i`^n!NvUL4Eyc{3v8{7;qyhEBmGoP|CbXJO)(0`)oEKdRp+UG+VjULR0)*ie)u zTrZRy@CAs@Nq!qzeFbF5v3{!3=)WngtkHY;dIM2lDS!5Qs@Bo~Mr(5T1v}RatCR&$ zonU2;!s~xFLj;*J(V2VwJJiePL~EXHsb|?+;rVh@=ZG<5ZZ+gW)TTk_bNxFuoH_fg z*}orHnD9S3Br;v;@a%UG;TSo@w_$`Z{Pe_|A$D0qlmB)0^RtimA3YvTknY3T`=W_r zez~bG`YU7l;Pf|UieEihbaSTutLSlI^~@=PWv;CWz5a=3M;I$x#Bo+G*@Dp<0wt*p zfvcoznGG!MpV$C5J@)2z0MMBQ?Km*ofP5bf^hShu5(2)5KyOWbV;9=e*A0C2`g;g=o}cHi}=B&xhmJg z-K0nfTy_)jiVfN847y^%<*VzCKY2~D^)OvfBHPm$gAIkM*8yytEG32iynXeuIlIsu;nB6eE+0A zL{E4l<8_+wC+>|r$s=YBBlqs$gdFlK-=#R&P4?umne54x&* zg3PW1u`Mj`&hcNr;>tyo64l$VZFTt`bqP1@Frzs9dNDD|oXy&!HZSZWuzS<Ql`2FrI$m6gdt0(T){T6;z_o>2znPgE~ ze497&1G z2dv|2Bgy(Gu}(?fBAZ`;^I@NP1s{AWvHq7z5P3ITC;)Iysw>ezrYtiLt5>uHDQdv@ zJIC9coxV(*%3l-sT}Hs*lk* zky>rN%1)~oN?b0M_A51oM&+swmpYc8lH&Kh{^*{Z=;w{d*I*UIebT6VX^3~ra6(js zPYSZ4okw3}GtTBnI{3N-xe;I*~u76~IQuS{;NeK=- zRQ+<~t;>5$CQvO?Q026JB@f|;Pm){syLRN6a?V`(`{ksX;?Mk>zsaN0botSA9mBa2 z%G@pgOynqh%({_{c=S3ApwE)l`|3H{@$>j|pWt9Sd8Z&d+9G3L)sR9i*al_2C0gqS zr4*+lC`Q>g>^)s;f_@AGF+|~GU>M$gR%wBAmF;}V?&kv|`{|fTB`37Ni|za%=eT4W zs&^u=dPdVzU1EBH_l+#jO&g?88Gl5(Li03aEG|hZaX`Lh?3_}+*v&X551tVP%6F?6 zQwx(jHJLO|mr>64`TRr4?#jinhQRhJqTO*p;#Q=`=|gLQ?Kh=Ib4D$&J)9obzq2o} zy(>LV?Y%zTPvrJq{-w7;`mgXyojXk$z^3)crnlT@ZRXmSt&E~V+{B|lquU4VMiQws z`vF@X7Xywl><@(gj*QM6ogZ5Nupaa1#@zIs+6u93V0e^IxIN?1wsE}peIcexaX;?W zw>FJxBFdaGH-B``MsHY)h#)hTlU9g`_PWE`R`1$&w&i~tU%~}|-?4FWS#Zhvr2JLL z0SC5C1GqX+zG8E56)XM{wSPbB&6yt#Trd9qvPzXd!e0(lWMmC#4_`?1K*QN$yQ^+b zxp>7e_LQsf*i){o3-**NMYMe>PIhbqM#BXI8w#`7*^8AIfVbQsC%2zXYlw%=LXbf( zEl21J)IHHP5+e@M?Tex-qFk)>n`ACL77b#_O-& z+!MvN0IM~@($pWiBo1PpK+`Pe^Y>~j!0}XV9v3&EYV@~S;|FffY>j!Al@z_O{=;L(W98pG9(>e%0;POgN{Q19ynSqWno`1E(61(uOy2)(B5APL z8`)-p{{BaaV}Eu(XJYKkeN&VVLE(%)(V#x~cO+^c)kBhsl|gKs&f)!S-bW<%4zELzjq?{Sq0CRwo~_2FFAfhpTmGaw&0d4#=r zPffZs&mlwMJd)BkA0^U0ZCjS+WW!21y$Bxyj-WX_mYp8@F@fdHlb9h*7=eB-MX%}o z1l}z&EBu~hoU&ho1=+(RTDb80#6r?#h^YG?B{2oQpX7s(f5V$TrmIQ>E+DtVyIsiq z3gB)rw$T?`%2j2|5XGeG8LK9PNDQf|pq|L2OzlgcJynGq4y=v-q@N)&%NvBX8+6OMW|h12y!`M7lMRT=cts! z582=C6CNUH$afbET>aQeV+4{$;Psd?9O> zh(}3{id3id$$@mZh=?H{lW-|chl?C1k?+7IdT50lp}q;5iu)N3oTF~x2V+8p-;~+9 zlYLtir!L9mz3()&YZKkQg3TZ9fx2=o8Dwk&+73gO>T@(@QRg^8Lif=lHDhwYqf6d@hE?7o!I~-Dg;tMdKp@xyA1}4-q|oj2{2QoO4g+yz!BXqsRYZUUJWZ zjCmE{U-bB==CpejWX`L|j7-AZ!1Uj>onioIh?Ix z9^7f|wXP8nLSSHBS;joy{5&pPuTdJlH;64(9x&J!Nn|_F$f2(D2p17POPEV2A-K?8 z`LnbtHNRDSGJeZ7K{P+=@Kr98KyA*ajrAWY(#Y&vvhCuwukb?gElql}AHqs`125>Dd7y}Xu^?xn0;jINgL zDcZ}Z%p5ENp3cSZ?{%e74@Jms7!vb#X+_pxRs10g9w<}hkF5j)on$xkI;!v#Rr@xS zP=QabWI9?Q>L^ZpM0?qxnPe^U-cuY{y~Q&D(hYgj^YRJ1Mi)IA(%Y$9)7v6zff_cc zPZ$%$_xda4;*9`AK&Zh<=zxp)paexcp6?CHI2KjY~ufG3j(tU6$F@0vS$fp|Z8L)Cl?VlX- zx%?5a!S<##SW^`fcg#W6qDZs#;y^XrdYNI|lkyinK7Q8(#_1Cnq_gD0&p1R+J0)5~ z?V>NAedNCSn(#^nts$PJS_Hw4q{^KLeY^Y9O$^lfU%2z1X6M)K6`#<_KA`buRB%s? znlDime(!lWtoec!tm=;1OP+C&qo$;sJXhZHERVJ;jo$%w5y=7d$JC7Eespf~^5oUu zB6l6g`Jb>XL0V|o9DSBrg zNsEHN|37dCc@nt5Th4P#*(Qx(!z{qBu1~JCOFFd~b*7Erijk)Cv|%DQ#LE$Smgrsy?YaGQz94Akbd_8;X%>^f>90FxnO~i#Vjnx|5T|VtV zm0vZ#dP$tzl7LSs=3V)U4kn5h7mb^X3vBa@RH3#SShN6QP5K@+0PQzyxHDOt)6 zZIoy$`u4vv3TYjbvsB2+4beMtCdqERc#axx-Ql&CYo7qGn&%xE%reI&&Z}fwV$Zto=)3U#ODAz?Fts4RZ<#Fr-%8yYMmrXADc);^7kT4_OHICKz z&YVdDOFh=>vAV+;QmFb#*&{dK1krc6Di_aRR6YSdkIK1d`L~vM_u_OyP9ky`I$4w0 z?|-Tki(xQ2gX1fdV+@4ePLkQ*{7REq*&UXzzfAkOJNY$BeSIhG>vxi0#R%h=kG9qw zvoZmbyM|46Dtocy2mOT4|5W|>=!r?|=KG)g;XhEaojt|nfrq$FYX0$@pWrzh)Iup@!!?=vEg z(Fifw&s}Y0%zU$PRndyTjT=u7Z$AnwoNbdEhVDC$BF74s6E8u&X?qqBkr9?7m#4{K z-sOL)A>e;%-0MeRu$N}E#>U4k=QU3Gt?Ye1`~eSJN;s@J?vRceA05ozUU)eU@CKuC zHca_)*iIZB9M>8KzDaenJLj;FtC&X1Ax}r(!+;Du-h~FYs0xwzPE14PeYF$A?HCn`h^22y- zstWI9eB)0s^3mP7YRT6e!`W}!sJu7fip{WNT_~UqORe)Nno!Aniq7SR8*Z_|&lYEh#D& zP29^_*1?RJzmXI60tUg*H1ZBR zc@uThtR+6{H0w0Zc3@$-B#`y%nIy}JZpfu?a^R!%odd(4j~*9muO+#$jN#c5DbK$= zXBeqAi?j7ha^E>;c|`#XVCt9S`=5_y5zb*WMel_RrqA>DTAK_8Yu@${xP!`#1*H3@@3nT%Fb;DA;NFkACt8Cy+z<8eM9WJC|P%;RE9L5#;ZSyl*A^Jz+cHO~MI+r!m9mlIsTeyNPG{E97@K;YarSy*KIj z6TII`*hhGia0l`2gj&K}!Zbphc=>yP=fylfKzNwvdHmi&s3*u@KfgaA^z%*r?x@Q! z?jd}e@C@N~!UqITJuo2T5y}X)gbu<+!dD32C+sBbBm9Q&5n)0D=?U`)D+sNG+X-JG ze2efS!al;U2*(H$R?v3BrG$Be#e_A4Uc$YEhX_9+>?OQU_=qs41~_xo>ZOGFgvErL z2zL;^LfA%lhH!xJF(L2;+C;dDP(f%R+(Ed9@DL$PU!Uc7AK_Po6NCvjW*DC#%pxox z)Dyyl^@RHf-yu9lc%5*F;A8BAghE0wp_Z_Q&`r39a1m`jonR0SEoTe~y9wI}_Y=NI z2otU&Tt&Eua5~}BYQ~lD2H_dPcL?_rHW6A0RfKB^mlARa$Eq@n_Xzt5&l0{*_@9JL zgbu=T`nr_g1^mt;fQNM!-u55T+3d2y+N!gi68+!cBxO!g|7ega-%@6CNWxL3oC+oA46h0O38tA;QN5 zeG^4>Dy z-Q?XVU%qc~eV6$AT;JvWfwXt%3hz#RQqN<&J8jFPmruCfCH@)LyX4#L$}itva=pv< z1H3!!m+$Yn-sSrt-ktW#_m5rg^4*A~+9Tfsu6Ow!NWG$cBr()tRcv9v)Wpk%%;36C602`+@4if*xY(E z9dX`Em)GX4C@qmR{poSS&>36O;ufiNThrrIy8F`Ns#LoB)8kaS2h!qdRl0}L<5aqb ziF*tLnbj?QKrIMt1z+@gUQe%%RxlUX+wphYaYxGL){$ke5R2;|84P!oW zuJM^8ak5tJ;=r_>_bQ1qx>#qo5NG%Gy<-{1O1yQ>TGP{`z6Uc6LBLtvJ>AWDg_ld* zO~gHric@Wn_>q5{hf}z{-~M&pr=F?TO6UFc5#Rs*6yJL%8-cG8q%J4_0ayP1v-wVN z+VYR~{8<3pGKh4-T;AssiY1(fnLfY#Z*FaEYh2URd~2wqWma?7?5i$Y*VZxnidkKK zEk;$}qK-DRF)fWTe~IMNf7{@Xu%}V5cj8R=V}rT!5aZ$eeFmNt=bgrTuDI{t#QH+q zURT^p#BC+c=c?lnaTqxmJ6-8~@ZSBz6}ZyPBJMD82VCi@h{N>3X-3-b|CJs7uk7%D zWrzQNWQVV1RrS?%v^96Ogc=uhn4#{Ditf&}HD-5P2g}UoAv~oOk&b4wt+S)Cwy!H> z7>6^f`f9`7p{AA^v&jrKE)1E*Zqk)^cQ@9A%w=nC2{oI>6k-;1cD6T4af>@C?C)JQ zk*B8Zj*zizTw_&Vgvww9u2%&W;|lslzm0u-n$q)ZJ0ey7nznN`o3pdTM&8uF*7%Y~sp$ zn;WkWNjEyX;s2WcT_`QY>2A)!A(}>K|R1@qN4WrJdbto7yRkVnSU4F0@6i z9+&jkP&RH$wY?_P-ns~w8;{zhENgC#ba#hXbRTnCsfNR8XI(1q!ca%3yR8{WmY8O@ z@jEHu_NMm6nx@-BdU%YlNf~WDrVNYfi;-*hb9to0Y+D<$`)a02C8ZSSjQ{uCtx%&_ zn@YQ|w6U(EwwndZXiCLaHuZ#!skCN6q_s8F-MFZSkuckCj&w$Pj3K+9RI{|RBV-P{psk-Am=C#XqxuTc%WU?Agvt6}_>FExEVf{!*1&ZDG? z_Rc1gp)Tu;tZ4^(zwMMR12UrQX1*<4v{<9{@=$xIsfQdPg`?73pb`PKjc=yPNVaP- zV_h5R0{xbT))`-=pJi>$3i1qaaz6ciGG4I%63P**tLq7M3wFws`@U1QGjCL{kGXo? zd|RZg+wOFMT|z@!M@#3r9(z2mAy)TW3UPX#n;Mgfw)XZ#9j%?lAsG)^AYIMd9C*h)L;f)G6Dw^7R9OPJ#guPC& zJp~&98vX9*G>r459H=R?qb9P(RCM)+cFN_U9#ujEXEJs- zgqm($9%?0JbLxxm#l}*4=B#22X>G+^EJ|E)0(xicSElmPSxu!3<9?}IXr)YFBcAGc zDJz{BLe98ApeJLtoTabNFs@CNQ_>@Gc2PH`;wj%vO}}eHgDR4P1En9a)-4H{;m#HZ zbH8clDHQ-115PVTy1I}OjILB&E-9a73lHba&O zA>|>FKMZ4$3nK&|4XJwUb;n*Pj3zY$gy=hHb3b|Aw9hd9hZCb<`(^2`j4xfhnN{*p z*Es1plb|*mPg}ugo4$XJ_CmNqjSbyxX2`+DP^x8%dKRfAAL{YEj9+Li%m)*~_>n+O zDik!R?`&(SV!5=NnLOsphZCy-B<9|vyf+MP#-?K+%X{TPj z(AlK5&{;$M#0rgFR-?z^9tRrE1U8~hj2>j;yH0ycdXlXTxu8@Q5_WBIliAjRO#A{Z z)|`qI*wF&9?h!ucJ!zkC1TsT3IGH&$ss1rX_3AyFlneUIP>%wTZZ||!p-HQ~`k8P& z?V+_HC4LCyF- z@~)ez7hw{H%uuQDL~wGSQl_2S#k&o<=8quQg?6iCkaSzFrQQ@5t7f0^cTQVMJG=Uf zbt$OOIka`jxl*O(!xX7QKvdG*-PG3@TH9szHTGyG`9G-7rjiOT7D#RICQa}gjiup` z%%>M64ftp?tA@vJBf}Ny)r~YVoH)g0>d1Bl{o3w`v%DCeu~Wh6070GV$1VsksA`ry zIV!Fm`tdXR)hJl4vzLWhnUIO;20gL_vN9kZsTIflLWA)0~g9D2*p*e50iraHkP&ZbhS72 z8UH5lg5UOnsat?1eau!n_}tY8b?uma)vOj8Vcb5hw!H^Zrg#!#;y4}`H}#nIi2TF2 z`nGN}($ua=1M^?6AxlC%Jxw=<8qnHh(tTHsf67|i+0?SQZB2JmcVCHN^ho)3?){Qh z!(H)SUzhi)?#^c9m2OIePLaF0sa+ARZ)C`iU6K)za!9V*`NeT{9pR>qmiACfl7KU) zYRxW{KAiOPX;P;>ouq21wrN~#sC#W&2dkVDWvtCw)MLMNc2}?fGJUIPq|Cz0Jk^m< zcV87;iEvXL&7smx#0eON38PD2)MA|Oiq&I8r16HtYG~9ViD+6YPsa6GLa^GDFjXVu zwvx5gxS1A}h1NuFzB$zGwB-8ogIJ?VTcpv>4QBc1GGUoKWp)>$D4RyuGyxY29o! z?v*qh&1<`i>9j<1nv+JQ?y*zn8nU<=Mu*YX)6*q9_c;b4tZLI-gPyjT4C-3r>eO>% z$S5%ydqUyHR)ubj0(g2YY4w0Lb~GVBuWJl-+}_sR*&*wNVO*7#0zs-5Ri-KQ5iZJr z7rhxkvIN*)1O`fg&J%csnh{H!Pdd~2=t_YX2unf6*}SO-8A|}r66rE3jKO&Z2RYh0-3dl~ zZ|>}Xg*Q);f>uL*-3dn4fz%RC`(B}NoTcl($=?sp*-^Iicf~(N9j^55qUm*OdmTEM ze@U>li3LdZ6@-hC=C5z#Xq=W{gNpP93wvj*VD`2xyF|V>&gdzgK?YE|vs=Ew-7r1S zE(FOH>J4^A%wT70u)C?_W+~AA*3={Ny`bRO$&9s;9y6#@1)G9vgaeXd+Cv>To8jQ) z1;LI^GdN?`6@`U82IL}4C0!vZX>pRM0vMX+aIi;&WIJBkY`3AMGt|>@u^H@%bcyar zC1a?A>)NORr0)t|a7|D)Hd6f&{w=6VUK66>W(YFXlh&61lVzvvFwD|5Nq?YDM&qJo z3q&PSv1oC*#I?3Z(DexYQd<`)daXUCP8#<&CvJYT(9+<=GkPw*R>}!=MBoM44(e`O z3n77m^K1*DkQ3H=R%+OGNyAi8I+#d@Qn49HV!^l16A%qlS$Ys@V_T@bg|)RwNtlMg zA_m7po$c*yT|I4l;oF)v-W7AJ-bFYTw2_xv!7xeV{5E$qu|^q2X;42n_1WzWnwo&}wFr`=9JDf#Az!fAmacxm@1`CUf@<*T&Y z-Ops2a8pn4;#sq1U99__j4xTRNasm*UyV)B1rIlDWZaV93WD7s$egTIjg2jB-HnYS zcC<=sX_Atrq>eN^n-{z}WU{6(R4gECL;05&PWwAsTh-dSrp@dLx3w~I%oUzRgl%na z6172kS+hzjq#Q}t)ZxsZU|SE(Q;IXb*pn)#33CsKl%h%hl|0Q&GR18&AM6O-jAV9u zCX=5^`kWVB)6|k|s_I%Qos;*ZDGg(t2*j#TyJx3J zo2(y|5{l7O_3AySDo>_qM`&(Qic?oYi45vko!nLbDGH_U%~G41_A(xwT}`(|6pr2u znQaO-irPZxFFbuD6r9mg5DfKpv6m2P2^Px4Q2W0&lpD>?#!j`c*lR!vh$rjQ*!k>r>?ClIJWc47P@;l}Eg@hI-m(A*kuC_L8s-|&kfJI*kf2bo zrA^jfSzGf_E;irF=O#1Q*BMb?1^S^`H_r;LZR?fJN4i`Y*yg4V*_>gelLbCGW86fN zc>O9U8C(bF+Y*GiSE^931v!@GUGLs7XB~QLV-Yhl*xAwECzH~I>kqE%EmSzf@8yQs z*%?G|>7x}6-%7UyE2OuiQ0_ZEjV|vKq`3Y#lcoI4h(b((UMCn>&QGfsP=Y2P(Zz= zt)(R-v|iz8Yeb~gG_*`dT^2vVQ%ziof{~t35Om>HHw5&YC5?eoZEKYh`xHtyccO%9 zHU%jh8YC@Dr$Qt9qxz8S5xx6w*`Fj}JA*x}zDeF8#il8G-O4(4Ye(lgW?Q?>Si_fT zVhuTuuqZ?B1lEcXCl$LE5Rx^0Dw0)Ob2SWIiy595gjCPJCfFx#E zT+?}T1d`*_$P{gXZ81b21lxU_EZ zVjhj)Mc6APX{uB>qeqz1RGh@-7|EwBCK$}YP!1s5X?`q2r?4$HQ64cA9k}+t(CI1%*H~5VCD#1 zJ#ZXtaH4`I&iBr)F+bbiQ=;6+#yk0?{A=L@I>Ec-TK6~hcSHx7Dtcl1Tl?4{OL^01 zCWK-Ng7!iuuu$m)hH65~QpRjYD|D+Xl_Y1MUgkK%KUQMHxVR ziUHIZ&iu3orKv?)n8FFDu|#P0S=#2JLkf*l>qPKU;<`hWG~aBO*~6}?thXbJP1Pdh zrub1C&zqcmaE1L+w}ZD%fd~up6gg{w9}o_PHOO2SVk)aeW?iR)>F`mS3vB{<+RZk& z2H^oEl{DREoyTZ{5`&WUyV{uQyG%aCJ*$~IPiCXwp78CC;ONSC(!SZ*7%?_k1Vxan zb^ftn^Ivb5l24q50aNJO-^|;&`eDOUD67^TI!Ke0U+oSt%Ow$rujb~h+8BUUTuaVs ztxgt^$E*oD)U7yw&Do=3 zN0z1@sf)4gY-Y_OldV79r{hAE8t9)HF17$M{HhO8<1%)@0#zmT#-07r< zF-Lc$CixcqD5+Yv?1J^n=5>VDv6RR>66N4$wdn4YGBRFst8Oia z-4DfaJ2J*7DOKhKi$JP_&;(6p98TT+DeYV8!~bsYYrvx_&ixm~7FViKX~jzIieQ3< zc=mkl+4Hd*NPwV$L=prPnuH`kG>|kK6ezW5(W0WomMT`XwB=e_QMp%YK~b@_N-HW= zs#LL3#fp|{v{X^K_Wyg|nK@@SfFJj{_j&&R=l-L^o|$vbe7y6{`#p1d*HBo85P77; z)9cZi(`sC52*ZTz)Gj<~OcgV+n$vPFtQRfpEs^*a(g0wYqS;E%kW4@UC=Z&DpQN+l z?A-j>(_w$$HBZD;2%}h{`E3iilGUm60(voPnF>>%*(n8@~n7Ec^J@RPJrKzY6p<Ka;|oz{o3e~AL|Fs#2d+`nnO}e^MOIc!ZR8h(S=mT* zUlfG*WrNXuQ9$=+V7*ZDd{1R$-zclb6yBEw>RmOy@V+ci_tjX#`?3J{JG){1LQ~wf z%AkC9)4Zr$WpJOgdcL7-ILesAv<&qeYW_;j92WTqdMPQ)D~7gsEMP_I5)})B72G*` zX~HZ`II$cN(1y_4z@A;|d*aL(Xp6Wh+z;Xw(IR?->A8G5l;Z`FS7!d$jJ7UvDs{~a zRfdSDGhIYG;uV$xdoc}Z%)~PnPD2{n-aZ$T6Mwu98Otj|0rP{wPWn>#5Q96&i^_o3A zjyM*w$CHU%crJu#f;nB>>P z^0up?J-#X?1+tauo!Q(hWgwb5-~~p>Fp-_XjCZw?fg;xd97AWx49#n~6K zBy77o5Z0(vs2DIN8aL)d(1Hamtf)coBzrYC2WTrP9u{e|2_c92%JU`a3=K^vS28;; zV|lY)pym_gF@}Wrr*1B}f*B_T)nOxaM2`4jX;w@q(SsDl;t{oDpSF}JH3t~6O8jH^v>V$KA@ z%|NgZBpq@90yaV^B85VV(rf|fHD*HmHiU_ZWyiF)048*w!A^6Z2dfe_7rpIR3<*3t z7S4HE1d=X=7|Hk}C`9Cb09Ao3PlI5g7TXG74x0@byQP)FVaYf~`t@uCw9rFh&OOkZ zQX(bN7l76P-ubf$r$nnl@yNvzdHqqSD<6UWg2h2j`+N&7!q6!6fXyLFx+HTa z@3Hag;W~snM=Un+aOAa;*n+;43wM|NRBYZoF;M4SS1hfl05Z%z(@zfq@w$rH)7 z{{JBQ#= zV(Zc)C|4@U<){+@NC=II#atYlSl>0DP#>L_UJ70jvL{?QZdK5e%7#uu4qKLX^}@1v z}QQjZu~sG=B6Q#cN2JC+mfbTQZ9oll7lxsAzI@nLBgDE zB>S(Q?@=xZ{Xvt5+lwA5NRdhUL#mC{%+R(7r8U^pV%o5j-SW2gj9=^{4}$S7m!| zLJ!Hw3J%Ubb28NB;YBg8W#u8ky*!#mtM4HDOToKj>&3h9@%FB!unqZ~*u^O27-E?u zt*`*Ba3U`}A!J_%iIiBIB%n&uRvwKgB*G;3f-?w)70YW3vEFvEhT2?O2;zB!ImJq9=CDnY5PXh{02f&GL3MI; zPxTt@3v*&VP|oJ@ogLb2B;fPjQMY&)w2sLBf`B0viY086D2ILto=Eo>&T$0_#iU@! zq9r301R|^mb~uVhXeG@>OsWhu$uoE=m{OZuj9;bb(I|z3q+ooiuWH`%C>(SH@p8VD zuGGlY9#?0%eJ@Ge`i%@`tIe9bqX%SS3#e*&+&CI1JnuqHHa1S5PKWlCqoH$=o*| zZYMk<8vw6|yMsD~m~ISKi~<-Wp`ch7T2*~TvrRyI5uzj*ZT`HGIq$-0ho86JczzH+ zrxY>PbA^M<%r2P8w>%D zUO}bRFU3+)DRjL83LT5((^E-0VQ(eU&S80 z;iO@~@`y1e?I>gD(Y0+x*W&rmlFTp4_X-N-`vqmPo`N!ETb@u~UD;4Q;({sY2Vak` z!G0lmJ*K*5!4FS;mS6u;-;0&lsd0$?#d1? zL@&}23gf0#r>RfMUU*4IJKT8$Md3ptz_Pps`a+67c3Y5cix_R9V+i|$@kCL|MDZ}T zZ$U#(gAvb`mEN;m6SN0OjcdKej3JbRi$(``BPid59c< zt)yqj3md$+$Jer6zLs_JHPt8W;8c%v6>y!(EfAIC#qb%tNd2RR=@)gx*smelq%5y} zWa%C1CO;Qes`BIzK)qClxJSK{&(b~Fj<`p?&AW#|MepIDT&jcp4Ulmd@nBb)frS~W zhN=`IM=@b&F=Qe7k&nsS)CQNcFNgmrCZ@^gAbm+;EJd-vY$4JsY%Y#{(tU-G!073; z=S`_cf)WwTgzo-dzb^<yd?&9^(L_Zn`BpDKAe#8^^#5V)MZPYT!S7*xU9@ce7yAsAJCNyz%0vbPK!+(e zuM=VSoC7S5P>|G;5>OOUr6DaAJP)%5cqin)s6NIB*v5vl>?)SuCsqsp6Shf+hYRWq zupw;-aPQ;m2k{aFO<*46>4WutQnNyXUv56))#oLOY0%_V6Ys%jnV2_ko(FD4F2O_z z_Q18b&m)B!zvRM*kw($m=~k>_O3@8$ej#QKmj?{bDx9BvZi>U^Xdjf?v@(=@5P5N@ zTtCkQlu>dtp-NaqWbg#TM~sbFFU>^P@a~S`1^1NAC^KvAB$K+mh+L)k~5Pm5~ESi^OY6i@O5wkYEdR%f#Li`X^f#olZ5 zf$sR?g_0Y}@8lcQ}hk7s>KA1nJ0$lmxh_LBN9lbyx;X!hz!g)kp$|exO3h&7NQ(MZ8(9$jr8)#|gt@8zXe4Y&~ zM)n!97pOVN*XzUKpk4_BoKqn5RjA#`}Ua3x{m#e{p*fGhZJb9jRLIwOiU>)F;gxR3V3>=Xw6nQGP)HO9@MbpVNB)AYCB!NZnTarJT>1-BQ{wRtA zc9V9{L~f*kHxY~W6pSlC?m+J8j5}o&MN=a5od8FB$0cnQ;p%8nfs25Y2Ls8mXnbOy zv%)u^0BnpoRS)7Vgd0yG3W1FV5rH$TDi%$d3^STe_FZ0&Jg=W2JT*pd5zzAM6p^d{ zJu#m&?W(>SND#vI#y~F|2ThVRSBsGlB$XJ}3>y;d?Pb}Qga<)(V6<(JeoMj^MK2lz z;xOM^o zo{=>*)kGL^$=9PsjE{|+Qa_6G^cyO=LX)njs2VXrS^)DdV-+>kH4`R`8Xc_|Q_wc|SKUS1W5?Ikj~JgfdX$|zYKp3`>f8v}Rl=?uzKo8JpQs+BarN#vm(^5G z#7sr)&~r88$JP_@#4J|Tj;J104?GR?qsE8V6GqidtgeqS(olU9#^l|rn-D=A_v$WC zuf?h+jjvRhv49vs55KT73Pi!Ruz;dYTmy3>ghd_!y&~2?*`83wXh!u^j~G7&*u{&a zx&l{Xp~R{y>ZjC>Qn^^Hdd!5{=q~cK1b7vRYt;cNFX#aJL}lUgtOklQ?u4qNcEp4c zf7uC8TDK>twKaSVA{{q&E_7p`zBtgfx7!y;t5%mb0Pw&3zM4p$U-=9mNVCBAqmhYzQmtWiXos5*8?LTpP}4uMF78Iyf$BUQVL+g- zy+9%IOc((j{8B}|z!R%wVi@%!A2h1;Fd&H47J@+LF~eML=-eJ(W2WVARL1b&Qo4%D zc{E&3S6nWLA^*$ykN~O+D#h3io&uEd!p1-HnP^I4wYbLO;D5PZT%q^;ugat6$lIpM zJCt>zy12I}3j4~7Lqdl7#KTl0lzB8lB?3aS3=L74Xi1iZ@Ai718>KSUDxR;as+mw9 zOP2to_{l57 zIH@cgCuJ~B@sszQ!$l0$TDr;OQy1zv1_m=zDyiiwYJkcmuJJWBKxJ~y_&R!*uA=uv zLsX_9Sqn}{fD}Kn?ZTyr56v^2!nN3Isp38XHM|`=kC*bHq=ZTXP8ILqoev{p&BQ(A*MR)6O;UKk|T*mN*H(^ z(-IX6zD#B0!cyXodYH;FGhsPBO=Xywuq@&{qHcJ~SVd5dP=+ zt+3}(ICeQ}4xtfFH(U_?qPtGKl4lTysqsDt_%=|WIJpbAOTF@FO9 z+!uS!c^C+C!C=UaMq>(Fa-)`p=Ag4Qb0tDO<}C>mIC_{gL9Hw<$Dr(5>X%RD!ad$S zj9(Et1UVl15$UrSH|caS#HvHhp@~0Y1u)=Y&tKP2llT(mRC_Ry-CxKa^Apr}M0q@}@n5)2fRLw?V8mrLhaxr0@fi6iksW0;YMoK+3%4`}| z>~OyvI4%`L!eKZkXzPnO;za6?L!0TpgWG735iMlJ5Qlc)q|7|K26YKd#~Lacuyq8n zE)vEKN_obMn8Sw3QKLsdCU2ml-6&sIUx|2=YQCyTe|uE@t;!}kD@ zZ#+-DU-T}%4TyZFF7oAX;X8rI#|)8g{C%II?Z}sp6ZLO-4&VGke%lXad%N&0LF9{W z6!n**{T}33;dyF*#S?vs4j{j|6yKUfejwiOcNf|lE$;Wc-KS^(@{7)q{d*eUOGAEJ zRjjBlp6`4f-xEZ>_9k(E&r|qbBJx_D$nXCP;DY=*jECyiUg%R)P0!2z&f0@-BqG1L zR*YxX>-eT2@|_Z1Yo6>=)Q)^?wRnHqqxjw-@!eipa0JO}4jJ%mJDT|JGw( z5>JczsqhiXKN;nBAm8{zl;4Z|uB}o20P;P*7vrV(``?Q3{4Tmb2>C^M`BKVnj_y|> zUwdzqpNf3r9Z|j=`OX)kd^hrmXQTW=V z`C0V-I^?yy{AT29Z;$S8MP9+1>fesMf-mKF(*0W_c zp5UFxTuIcFIGdDC@qs6}7%>-!NJlt&*c0x$8V9>M=ETMR1JpPtdPBONWH@?`&WFO0 zxn=P<0-Zl`;8LKB2HVsNr12RDXYN3#DD3_i2=9XFf|WIQhz7{e4Z$QFf`TReHYE;f z3Zf&SW?Vw;pxHdQ1)K~A>vGgt>QRV9l%d;VCrf_S4C&(W6UU7lH31GIVfh#DLDZ0v zD4&y778_GPGQu}F!$d&7@+NVZIU^8*JA?>rR4I$&D-f;N-gv197$;G)_xmW5VdBHX zF!9nj&S?S+IOw{h18yXC%*w;;hLKNMoUSKiN~|pt-|Wm=7t{+x&&}L`R0$wQn~mWEi^EBPt5hbPh0a`1iY&DOA{B-;nVGHa1m7` zaKbot@X;>fu_EjrQYbm_;+K-#3URf)qeC2W#{gx&SE2+<9dxf{m<+~4|7a^JdW}t4 z8~`}0wXG{I4EO2o^wLiuGtTR(I=nNqRwirV4x z(hvw0HN>!xK3L#d6NXBgv}ob;!8?4wg4``l&kD{Y8kgr0@q@IsIiTf|*7o^vj2!-O z2H+el5kX@L{`3y`d*s;tIK_=3cJRve>pWBi_6W z*R@EyhJhC&<@2^8$aDjd1|d=T;+yd;V5GeD8Zu)5LD5youtBd8s=|I;+WSx2sj)D? zSn+YX0!rD`n^_Z|OBClvXs~OX;mkX(=_(foYwBuB(D!?JHSm{ULcwnBG6yaS$gh&kXjX< zeLtWZG9KC{9ctBA3+ImJs5WtM8I{xDs-oz7vaxa@B{rgtB3aKs@fiQRT(%m#uZ4br0huKK*nyA5DO_yV22Mv|6qRT=xoCh$)oG0q7rcN*3#OI_*sS7j@JbijA?U{ufa_D-SJLi`S zL$k%`1@FEi45tHtMK}^-biJdFyN66S^vA5M{yfM^b4Wcxvf62vJI53Z)d?Q+Cd#$XlF zw1uJ(_$wK&^7mfQ3=savauF9)G8mDqgTXj4@It0k40>9In=C%zmq{k|7&N+9#X=_L z1ur-sDhm+{*Su*(aAOL$Bm$p{LuLZ6rlWv)!!w3^VeTqtuOr%Pr$w)0ook)w2mLsi2+JdwbsRyawde~Eth9H?pl}PnSElBf_mLRP} zT8p#+X$#U0q&-Ockox@+?;{OCGLb5g>XBNI<{>RcT86X=X&urdNIQ`BAnix$doSKc zN+4-SK2jA@Ez(q^SxEDcmLjb{T8*?0X*1F`q@756kPaXf{R;IUX-MTrwMZ>U^N^My ztwGv=v=wOw(jKJ!Nc}c|_ac=b`AF4BjYyqHi;$Khtwq{|v<+!D(gCCa8!>JqAE_3p z5vd(%A<|N$l}KxmHY06A+J&?aso#C*Cz69yg)|kZ6KNsRQl!;L>yfq~?L_K9irtSs zB9$QdNVQ0_kQN~=M_P@v9_bOJ?MOXHeK(n#a$tXJ!`TlujjVMEwJEERzmt`m9J=cx0qmifeuAal?L4N{I*-%E9 zfkgyeSV{=OeFU+JQ3+lT;qE~oUs=4KoJhiW7OkGV8SlpTap|O#9PJ;NAu4p^C9G+~ z%Sg%x1TOw*=$IiR5%Wt}rRC3B#CjkzcfvQKY}-}vP1GXMV8V4q_8LfQBEBEuAD#7; zzq5u`U4eK@5)@u485So|4PzHT=CPx3u~AzF5Y z;474yxPT_+45VJQj>;iz!lWq9+JK%CM$ke?M=6=0iNGNPiwA^$wn0e~-!PnZh6cJW z#XKY`2=&>L$Jf~lf3_K41lgDwO&i;Uy-0Dv+QM@RoCBN0Xi31869M502N>Ey{8fTWs0 zC=0wTm8dk1P~>AfSa(y15yux^+B#u#Q!&OcXC?55M=x}z(UtgP^8qVhwBDilfUGxx zLd;}yoiI~U#o^0sE%U}1>Yhe(k5q8l&z?Rnd%vPMWz{95v33TIQVihoUdV+23 zMBU2b6`)FysYt8 zKg2Oqy;|@DQDdnIA`z`vEOtJSEH1W=z$6aJ$tRKF=t42NUX=)Ph4n^Xf00ol@e-VI z#r@xYJp6dHA;TIeVW_4{#T;8k+VnIvSKNk^Ap=*l3f z7?B_Vpy}Z$HFT&N&dm`o0m(S(2)_ik)_JgY#f9ZUG$#4k!PO9`g(43dJx)?7A$dip zy{-6UJ=r>`MzD^VT}`ubKr0AUTRYiCX{E6fh8_SZVfTxqC0+Q~H^X8=&B)jY!Mezt z5^`CX<9hAe2#+vT#yJ|Epniq+VUQ@H%OCWHq7;%R6#CR8!Eh6ejl>#@C*#baVU3N7 zv4TaSdy^Tcii1TJV61K3rNSHl7^^Di8x{xE!E}iQgT&-wu2zkMnvlLSMI34MfS{C! za~^5uC&jPih-(R>rt%9)BPXV#0%ocyQhH?{C>TxDBRY_04)qCLmNEkaKI~PKGSG-~ zG=y6&F$e3h9`h_sYCwwo({Kx&ZDOK94ag{rGtHU23d?k4f~Ah)_&GXvQ*uO*&0K;{ zrOc~Zgd*|k5)wBMF8tt@;_-qz2oOo93^7;6Kr%sLoW$?h<|l|$T#N&Qnxu|N3K^*k zl~KO4ne-_*>1@lv$R@;r2oP}+3P@fVi$n5&>DZlu4M&ovu`3om=~X8k!i3;3_-kgh zbpjRQw1h&l7mK0Nq>i;C%#)$k#K;XukzPZs;5NMt_8DiDA|C(+%O3B@wT`QcTRTC6 zXhjPM^U@%7X?km%VMp}7iHQaY#&8Fr-H~MW0%pzyNaZmqXr29rWPOyBUXdonN+ik@ z(hZz^N8fgX`UW&ay9Bq;t-_Qc7TeJUyTPe~m+ite@w7eoO~|qyS+*a)Jzo*`V>$6V z5WlNrSwfaM_+2+h+%K19lkwXr%Nk|bJp5|1Y>_Nmf#04}#Jj6y*#`WslVzJ^*;f2E z%Ca4@Y!7~mWZ6DhMq}yy7s^c}N;6ZC^TksU@^VeNF6g5E37_q0B zIV8D8L*}c5B*HRDD8niQJ7(OCN7S>pllLiw8(Z;tJF(WJpC?osBwbbF6G=Rmv^AlR zVc&^AjO8#tsKeqTIAP;*u!=}S_TsI1@d#Df%~OEWgpd+e%IU-sh1|hS3qwdUgi?JE z(wkt3=7ZcMCj^Hl4KQTPytt%d$K+pZ;fKtvzDot{r8bm+qO28i4U*Yc$_rv}1aDaC ziT}5C#}(hF8p3_%;=xkV4%q7v@^idPRGqdS>^Fgg9>DpZaKbdTOHV5` zNxX!Ba%hKX-uukWxDt++Xln*L!FN7$AVK7IjG}^=xrGi~sMdK6pdW(Q@dBVVm~SIk z13OBB3h|ftm$He7{Ac;MkF>w<`skDE_#`{wr)jqnea#=o%I|>Y$QOJ5cyJ%K9imLN zv#P*uQ)mxT&xF4pYby<(`b-!3<5&n=j`D}BI$6^APg&b%8VBm!{|WZmPuXnvSkaF} ze+S9m68US&U-jTbJ|mCB%TNdtim+OjVqpnMtW;|JLaLI~MW~o`90zG_XOrnkS{9fz zuo-AB*93M&GYDSluh)dCVX>Fg@uSf5DF?IN=9Zd{9oX4bK||O0}xC(vTGPr z=EDbffshuWE>^VaTHM2LEq<3R#k2VBL0WeW%8KAa!*2p9c6F?1 z^Kj7VGWfKX#EQ0^06Fn*D}Jlx@Z{ok&G{Q13B#_KZTfZ3dlJaSz+1ZCkS=vMPn=Dbb(gSDlA>e(}e| z_>ztOzw#HJ^HHLNQ@e9 zgvX@yVbu#l#ZhdCe#BSn$%d*xzudI+vX# z+TJxeR`hA(707>C3;)Vw8JCp5uC{zVq7OYKhi;SlQU3&d(Qg*sSL15L-VIt;YhHx^ z9d)ci+I&Xjhd$Ie4p-KR{RM}+*Sox69GD+P=gLQY`Z}miH4ah#C&+=lJMrI;uS1+E z#k-P;fYdm!Sda!8-41Cf4CI%FXQ94S#94`p%tVs^MVd^p_^eGB*~=ffA=IZ9Xo1VG zvC0r3m`%d(0pc0~jZo{9^3WH2L<5~a9CS7k9R3bIwD;p--}C)j$YHO;;j>k4%Y}v+q<0Im;iP(|a z25FW)snRezcS-Y95@MS1TI>i;!YfMnu5h0E+&4x$71cAJ=W9`ZEML=G{bI-PwRrca zyle7A3tx0U_CBj?0S!?Pd6%gjO4Jke%#nPp{+YW%UH$*x&klh;F``>YiwfWE!w}aF zQGPTgqzVr=CXR^RQ1}w({Or8hI22HP&?G2MmTAR6S7=P&Z$a_I`q9H390JL4{&1GG z2F3GRbH&-r7ves=sSpJq-lBpzxu6*PYup@|;q0c|aO@xG>cBTDx)Ibk+il9tF1sWN z;~PAOGZ3hHetv6I)!C{pWDe2C|J6m{aEq`Y`)FrrP1;KBC)yhAE^WPbaKEy2^vjL)#u4T*=9kQ`n1NYgjx}q{iROjobhFLu zG%qzTGp{t4n#;|V=4x}DxzT*ke8hax++pr9_nZB!37n{ zyQjFNZraVbEpD58z58SLLH8K1$UEIjdez<}@2}ol-g{ogALrNm*ZAM}SNV7N>;2#O zPx{;a=lxy&U;VxQJN|zEBfoDD4~l{>1%raYK}j$o7!$Mwoxy@&S#V?UZ^2!`J;ASo z$AUiwuLN%g{|dg4IVCeJlggZ%nUJ|4Gdq*Z%+Fk#xh=Cg^Q+9}%x^P~XLe>@&ipge zFFPoEde+H~%TCYE%U+)SPWH#yHQ9C9UuN&iK1ByyCGdSK!2c-i3)&a8Q?!IuqK(le zVqYuNeYE~v{YU!a`WK9PV}>!?xXQTB z*kt_1_`R`%XXQk*+gxb=0IqvZh&G z);w#8b-lI0`mMFqdfIx<+G+jM`oQ{^Rbp$lZI|2Q?XTH$>|5+7>=*2p?KkYtr@oLn zC3RLRo2p8UPt8v)N!^4QJIk@0I_CoCB4=8rQkpber7{ z_cHfd_j~StxKF!pxqZB|Jj6 zGiMO2K!8dBtD)La$pOhzftNdyFDKtfUZQ_Pzf!+bzenGwKcsKd-_YODPd0`bnvpUl z7*mXH<1%B3@jc@vW3}-MW1X?Vc*EFdd}JJH7MTOh)68OXxH-n0X3j9%%`S71d5!s9 z^Dc9f`7rSHS@RY1b@MIrSnEXV%a&%1vsPILtiE=CdjKHg*%kII`%-(ceU1HyU6is? zZfay|LaH;hAhjrUW$L=ry3}*2H&UlKXFH8fhqKE0H)oymlJl-}z&SeYrYEK6q;E(M za;E?)PrAcB-E+MTZd&m$?r(wk5Lz zIJPUZJF_RVH`9~Zm)V~=kSWei$;QeN!v#2KnJ+Qk^aajzS}Uwut$(x5 zw!dj#VK1}q!iZn8-?aZ>e_+Q`r>4#TCe)=aNPR!GI&}v|{c37&>PY8!=M?8G$8f5g z%bXuOcQ~7z?ap7FbJCr_j@{|L?p45so88Us6TpY(-67tYp5|q|@!oWAp|{-I;630y z?j7YH14wxO2!FNzYyUC-kN&&<0sq9{Mx*U7{5wSC$L+MkoVF}73npX*;SPB+dns*Gx*7BpkB zG1X`^T8vpnJJ!V=#zxSNuVDo&G^bdztl6N>3#}`x`>cnUB0O&muurwe*xmNc_JY(6 zSnWSfJ)PQ-dO7uFQ0K*<%s&EV?{Ge!9-OX8PfO2E&rg3VeP#N_^vd*4()Xk{rXNZ_ zp8iAnPwDs5E$(;S5#9vvVz1L%;4SgK>)q(x<~`~?>AmXh_3p*IUJ+a!+!?$O91Z%~ zn(5B`Ec0OIZ<#Tmn!n1%I$<-$5HHl`gGT;I+pWE?>B))7%aXrJZcT1WZU;@+ncRg@ z?@8`W_9XWu`|15bF$aKV4$>3)5WPe%)ivGJ9n5pNUa42<)q1U7udmkc(C^n@*S};` zf}Y-DY%%_6oNiWwi~R!h^JuHwT43F6y=*1zcKc`ccKcuUaj76xnVOhtN#(H8ccflR zy_+g>zUIt!7CJXN&p5}Y%hQ$Vs&sX_HeH{doSvF)Ot++GrQ3o1-RXJhh3Q3@>#gZ+ z>Fwzq>7D6aSP?I}qrkW3c{BZ^GG=B@=IYFMGo{(BS(3;GVNHKoYu6sdybc3*UXomz zT$Ws(ykD<3r<$|OCFXyae>VSNe#!EzI$+{@Yn!#tI@um&Uu4g>Z?xCh524>X?47{A z-Qc);?H+rdz281yXHuh5jTpzQR6A(b@0{12FQ<+4E$N?O)}F?!?E;^j04g;DSow-O z*emnqdY5~@_5R?U;%k1!ALWnt>--D-hyBO>9pI^d#|r(>KQd?vE)M1eR|VIBhu#wW zH25$$Dsw_6ktxn-87mWHF3Ws7b6sXFbIh%o*E8>Bj?SKxJtM1S$7k!Z7i3$q7iZ^Y z7iO=@UYETodmC2t-Pw)V&Dq~)|B(H2c2D-5?E8fG%U}lqY>w38SmUQ?L$y-P)&lU+ z@!+HlSPh-p0_}3`TJ3wlnV)KRLcV!W`>pmA=)f-RHSKNSP~T*~WdCFl_{zZKpkyLB zBw3OyO=?Lq=_LJRd9o7nP<664Sr52RO*R7RXBzcpyY+Rz`FiVA`*gbm{9v>_)ouo~ zFR{O2e;W|L&wki`8a#7ADw(oVnbh>u%+xo4E8k1~Fm-q8-qZuBM^f8Udm!ihE7jNO z?-VGA10tgrd$%hFe2U2Omr{R3vF zCmnN-bx(8$x(WA8*MT%N-kt1T;jVCRckgi5xf|R+xx3ug+&A5Sx*xblc&9=>Itz5R z!aLXdn%9c8c)j<1?^f?2?-B5xzj&{CC-^4jb%y^P|0e&Jpt?_j>b?MZX^)hXP7TU} zk-^kpG1lt!!OGyF;L*U&jLcMJF3K#*T!~rzRc2G>;mi}6H!^QwUW>9{%zh<1I$NEc zfO%b>y(7CR`)c+L%pm4(6?6#9-$v~{tt2@qc@@_3p5#g3R9EN^>a&ciu@<))+l=jy zZFU;FjNQf_W3SNz9zMW4**w)OHLnK8{(*U``HuNH>tw4M63xFumN~*6WS?!DwhuY8 z-Tu10z&;UkP?Z{&nw08Db*C1let>!SNor&2q14gNvCcrJ+L_>7@BG5~mGh|cG$hR) z=a}@D)2CwvpO>DJo{5#)onDdtQTq1uo$2+UY7eLPq~A)voBmI5vM;+|b&DbCq};4K z$6e@t+r7(u-F@3F#%eJ<&pXd+_b&0i?Oo^H<=x}m?``+~1W5nGyU4%Xzu!MQFatl> zjr9@F41iR50VvOo3(qv{kLdO1Iv4W;dU|$S+XwKoV*lr{Z+{qlg;|g`V;z2{YYb= zF~k@Pu5h*Smhrw(WR{qBn48UQ;QnubyTz;%tbtYr+~85`aqAiDL~u2d0+!m3f&2f< zJ|;CFbz!O*FkPIwC3U~^d*=)369Bm(=?l}%pd&v{KapPS{?(0pgS~HhS9sSz-n-X( z(0k5%+OG=M1@{L}V(j~akAlx=0mZ^i9__7GdQQ+C!q-<}B^=!Tqj zi~S3G19XicsbQ(Hpm?)WU8#Ag%h2=JQ$4BoQy-;1=Zt1dxDhMx4d-pAPujt{s|N-& zVbv{5--}iEOgip%x|g^=agXwj@lu}eorC#Z?ETbRY`=CuizN?F!zL$l$@$4&Cht$aoqR8u z*2jY5P13vc`T7r_MckqPQom1s44CkY{(`<2((@6<7mW(Y3KNWG@WL)*xpABEQ%KGm zjbB6hdDD2;_=0(YIS6ud%Jj`9^I~Xx3m_3)YknUx^E2j)=KGL{K4*Q&8f+DV7frTW ztvS}E*0-#mS$A4HtUp7$IMR;W!!bL)eVx4=v+@(n%7ga1_J7(ZVTE1>>3Jz+$=#Tf z1F3$_mz}xJH=Qe;>zr4dFQ!j}bg?+S6nJ%e`mXeo>1We_PX9GM!fkL{-Pw>MKXUtd z$3b?w95ijUcL(O;L+@xm;TQX5zUAKt?P4|bz-Ro|p#AOhPlvvzVNSXsUtbd}5AF#5 z9t;BpE`~<;9#&l6?0Isny#P7uAK4FR9(Lgjd(6WS=*_-Xp`D|h2U)vOo2C5#I`q%9 zyRqJmO_GF}OMZiO=-ZNOllLU=gBCqVKSLj`H|w+XPJJP0++MxSxWxF5ahGwAali30 z;QJ5bL*opy*_>m}H?J^nFrPQyV+pgrRcfWJQP$6`yR3Vy$E-K482I_0?bqzTLnk{0 zIDKZS8q{rCY6kR=<*A>io&W^jOeG+L+s+8*H&~f_BqwP~Uj%BlG<|>ivGkkie?U(= z#y!bB!!2_?ccgnhV0jrh$M4KO z_bZ^co#)>UsNMtZ`Z<56zuPYjjKGB^JuWyum>#qT3xfy1CEf_$3f>FiSm!5Y(x7n* zGnYfk*_e3|ynlH1_UxV6_1XKgZxM`pVzH@!@oCy{ZM0Uaov(cj()(a=W-XacPD{>a z*uIqfYx3_&ORv$VN|=6EU#b68zYAEsS$|f4SN~7Y2F| z{LXmNcow|mZP2`-=9#8$PB15#4Up@wNC>EmqXHT1a0}IeKj!nA^TBi$$zuofjuLZ(o*N8&QE;}TxKma!S$&PsZFWP zspFlKoN}kundnS|M)a8TvhxA7!Q;|rKtAkF-wcV6B<0a=HMqzY_a%1^xX631jairr zoW8~T2{eMf{s8|Jf13Yw|6Bf*{saD2|BN6NbObj;a^47Tye)VhmWqAB2f-x3_*Vv$j~<=S0sN93>~jO$+X~o{UiNz&=_VJ zcfyWPWR*b*uCto4l5etBTC1&Z+266hYu{ww23hiL$dP?hLtzuhrCv;pamHhOQ=OY2 z!)$QMKs_%^Pfxd{?*`x8l->uMz~>;bjKpev#NFop&Fyi=K<=&go`JRIH2(~&)4Bf7 zfJ3kQZ~LvmML}2aX7CTlkH=>og68`i^xju9f6si7IVpQ;c4)RVYiG-|RoNxkA7pRM zt_22tNMi)!pf!4gcC=QcB_R`LAU)?;7W}bxJM3Irv}d3}{9Sto5_NZS0qiTsW9+00 z{7T=aAJ9vUq>(np8Fj|1(2|cc6QFf7uqrPDrTZZ`3#JvgDlixsB)*)4tdsSXHIO?_DGH(T> zw`aT^-Y)M5|7ibs|4aUS_?>7Hc|F|Fl&XWt~2bV!IUyn8UNbvjM zso=SwZ|3OC@tHF-=Vi_Z2c8Yh>+;MJNFYCggnkcX^v5!P%sdZ!!`p!S8sO3Iz?ou+ zKC#7sv8hdgq`eND`DLvxIVJg>@^*jK{GSKQ#KAgRvSlvmADr8nXouUv6%NE#?h#pygX_;3bz>S3~Z; z5o_^T>kY^wo}FVY`+EDwuqobUKMuM3ulA9tlVO=Cfy`Hznhfc?2z>o~SYj4Bivaf} zuqiHc_B;Jxhgkq!XDx8^WcO6iKm#&#x%&-wy}J?i#7Et~z*cp@J>EMR@{8eRy+&`A zH{V<7{Q@$~)832T%dj#Y35!jcpYyK*1$+wD#W%q3ji4MhiUn-7Sp^H?Z-U#*x30^9&8ee%w^^(SdmF0-No`~5p0+ms8Oxe2+PzWYZ>fv z>#WVtcy~eei`hkX0=CO?yB4&$(_RGY%PQ!h;Os zNG;&Rt;lCOd^tJkWeS^M9e?;G+Z`HSBb?nr4>w6#%?9=yy`t&vW z8U2j`kj)1{IxjIw4byN8ANC2d;?%S4hGvdtZH2KC^RO1S%}qQLq$!b>M9?S56VwUf z1Z{#eL75;-&|LyM{R-IW*O+U~4X}G|!CKp4?u4a&FKqP(V6E>D>3@(_0_nrS`m2J5 zHQAa9J5f6<#|vQ>TnXr}w$@?>*29Xt9TMJ7%)@SLkF^)GvDjW>?@HGPjX_H=i{*et z&>NS+vOXwVlJ&Ebq0e+?7iX7dcVu^FcV%PS`ovZO=6#JKjGJs#^Dw^6|Km8em?5zY zw1%Lu3bu2C$U1X9LuE7k7X+DY=5~h8E^{|SsK?yLP>NZ7VO#HS6KS^CRtrP0)9Pj@F0vNG3cnPXygY>Hnh>U&fZLB)TVRdbb`X5`LDx9| zobPM*gI&4^mg#}eeAQ~zpaDCOvdisCy9)M+S@uHA$2P2jHIPVr*r&)YC9G0plUfd& z-D=pR_NV*9nqazB?f`F~H^eLPD#2l6yZVs1B}T0bVtSx)?Ui;l*4kvOwied2=3&h( zfquB$UWt{r25WCUY@d(VThQ_jSfF;>jZPzU!e#Ducfad{_fLlYw8UHQ4S<|p>o4-R z!IDJw24UBZW&37}vIDY->=0ncZrFbILMJ+qjrH`wF_HMw`fB~O{^0Wiw1KemCbS{Y zk4iO7Gc^Yidbw5!y|EfnWIb#`Qy~Y;gQsh&woNl(-KkG@CcBf1lPi= 3.4.2 + - chardet_ + - multidict_ + + Optionally you may install the cChardet_ and aiodns_ libraries (highly + recommended for sake of speed). + + .. _chardet: https://pypi.python.org/pypi/chardet + .. _aiodns: https://pypi.python.org/pypi/aiodns + .. _multidict: https://pypi.python.org/pypi/multidict + .. _cChardet: https://pypi.python.org/pypi/cchardet + + License + ------- + + ``aiohttp`` is offered under the Apache 2 license. + + + Source code + ------------ + + The latest developer version is available in a github repository: + https://github.com/KeepSafe/aiohttp + + Benchmarks + ---------- + + If you are interested in by efficiency, AsyncIO community maintains a + list of benchmarks on the official wiki: + https://github.com/python/asyncio/wiki/Benchmarks + + CHANGES + ======= + + 1.0.5 (2016-10-11) + ------------------ + + - Fix StreamReader._read_nowait to return all available + data up to the requested amount #1297 + + + 1.0.4 (2016-09-22) + ------------------ + + - Fix FlowControlStreamReader.read_nowait so that it checks + whether the transport is paused #1206 + + + 1.0.2 (2016-09-22) + ------------------ + + - Make CookieJar compatible with 32-bit systems #1188 + + - Add missing `WSMsgType` to `web_ws.__all__`, see #1200 + + - Fix `CookieJar` ctor when called with `loop=None` #1203 + + - Fix broken upper-casing in wsgi support #1197 + + + 1.0.1 (2016-09-16) + ------------------ + + - Restore `aiohttp.web.MsgType` alias for `aiohttp.WSMsgType` for sake + of backward compatibility #1178 + + - Tune alabaster schema. + + - Use `text/html` content type for displaying index pages by static + file handler. + + - Fix `AssertionError` in static file handling #1177 + + - Fix access log formats `%O` and `%b` for static file handling + + - Remove `debug` setting of GunicornWorker, use `app.debug` + to control its debug-mode instead + + + 1.0.0 (2016-09-16) + ------------------- + + - Change default size for client session's connection pool from + unlimited to 20 #977 + + - Add IE support for cookie deletion. #994 + + - Remove deprecated `WebSocketResponse.wait_closed` method (BACKWARD + INCOMPATIBLE) + + - Remove deprecated `force` parameter for `ClientResponse.close` + method (BACKWARD INCOMPATIBLE) + + - Avoid using of mutable CIMultiDict kw param in make_mocked_request + #997 + + - Make WebSocketResponse.close a little bit faster by avoiding new + task creating just for timeout measurement + + - Add `proxy` and `proxy_auth` params to `client.get()` and family, + deprecate `ProxyConnector` #998 + + - Add support for websocket send_json and receive_json, synchronize + server and client API for websockets #984 + + - Implement router shourtcuts for most useful HTTP methods, use + `app.router.add_get()`, `app.router.add_post()` etc. instead of + `app.router.add_route()` #986 + + - Support SSL connections for gunicorn worker #1003 + + - Move obsolete examples to legacy folder + + - Switch to multidict 2.0 and title-cased strings #1015 + + - `{FOO}e` logger format is case-sensitive now + + - Fix logger report for unix socket 8e8469b + + - Rename aiohttp.websocket to aiohttp._ws_impl + + - Rename aiohttp.MsgType tp aiohttp.WSMsgType + + - Introduce aiohttp.WSMessage officially + + - Rename Message -> WSMessage + + - Remove deprecated decode param from resp.read(decode=True) + + - Use 5min default client timeout #1028 + + - Relax HTTP method validation in UrlDispatcher #1037 + + - Pin minimal supported asyncio version to 3.4.2+ (`loop.is_close()` + should be present) + + - Remove aiohttp.websocket module (BACKWARD INCOMPATIBLE) + Please use high-level client and server approaches + + - Link header for 451 status code is mandatory + + - Fix test_client fixture to allow multiple clients per test #1072 + + - make_mocked_request now accepts dict as headers #1073 + + - Add Python 3.5.2/3.6+ compatibility patch for async generator + protocol change #1082 + + - Improvement test_client can accept instance object #1083 + + - Simplify ServerHttpProtocol implementation #1060 + + - Add a flag for optional showing directory index for static file + handling #921 + + - Define `web.Application.on_startup()` signal handler #1103 + + - Drop ChunkedParser and LinesParser #1111 + + - Call `Application.startup` in GunicornWebWorker #1105 + + - Fix client handling hostnames with 63 bytes when a port is given in + the url #1044 + + - Implement proxy support for ClientSession.ws_connect #1025 + + - Return named tuple from WebSocketResponse.can_prepare #1016 + + - Fix access_log_format in `GunicornWebWorker` #1117 + + - Setup Content-Type to application/octet-stream by default #1124 + + - Deprecate debug parameter from app.make_handler(), use + `Application(debug=True)` instead #1121 + + - Remove fragment string in request path #846 + + - Use aiodns.DNSResolver.gethostbyname() if available #1136 + + - Fix static file sending on uvloop when sendfile is available #1093 + + - Make prettier urls if query is empty dict #1143 + + - Fix redirects for HEAD requests #1147 + + - Default value for `StreamReader.read_nowait` is -1 from now #1150 + + - `aiohttp.StreamReader` is not inherited from `asyncio.StreamReader` from now + (BACKWARD INCOMPATIBLE) #1150 + + - Streams documentation added #1150 + + - Add `multipart` coroutine method for web Request object #1067 + + - Publish ClientSession.loop property #1149 + + - Fix static file with spaces #1140 + + - Fix piling up asyncio loop by cookie expiration callbacks #1061 + + - Drop `Timeout` class for sake of `async_timeout` external library. + `aiohttp.Timeout` is an alias for `async_timeout.timeout` + + - `use_dns_cache` parameter of `aiohttp.TCPConnector` is `True` by + default (BACKWARD INCOMPATIBLE) #1152 + + - `aiohttp.TCPConnector` uses asynchronous DNS resolver if available by + default (BACKWARD INCOMPATIBLE) #1152 + + - Conform to RFC3986 - do not include url fragments in client requests #1174 + + - Drop `ClientSession.cookies` (BACKWARD INCOMPATIBLE) #1173 + + - Refactor `AbstractCookieJar` public API (BACKWARD INCOMPATIBLE) #1173 + + - Fix clashing cookies with have the same name but belong to different + domains (BACKWARD INCOMPATIBLE) #1125 + + - Support binary Content-Transfer-Encoding #1169 +Platform: UNKNOWN +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Topic :: Internet :: WWW/HTTP diff --git a/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/SOURCES.txt b/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..029edbd --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,162 @@ +CHANGES.rst +CONTRIBUTORS.txt +LICENSE.txt +MANIFEST.in +Makefile +README.rst +setup.cfg +setup.py +aiohttp/__init__.py +aiohttp/_websocket.c +aiohttp/_websocket.pyx +aiohttp/_ws_impl.py +aiohttp/abc.py +aiohttp/client.py +aiohttp/client_reqrep.py +aiohttp/client_ws.py +aiohttp/connector.py +aiohttp/cookiejar.py +aiohttp/errors.py +aiohttp/file_sender.py +aiohttp/hdrs.py +aiohttp/helpers.py +aiohttp/log.py +aiohttp/multipart.py +aiohttp/parsers.py +aiohttp/protocol.py +aiohttp/pytest_plugin.py +aiohttp/resolver.py +aiohttp/server.py +aiohttp/signals.py +aiohttp/streams.py +aiohttp/test_utils.py +aiohttp/web.py +aiohttp/web_exceptions.py +aiohttp/web_reqrep.py +aiohttp/web_urldispatcher.py +aiohttp/web_ws.py +aiohttp/worker.py +aiohttp/wsgi.py +aiohttp.egg-info/PKG-INFO +aiohttp.egg-info/SOURCES.txt +aiohttp.egg-info/dependency_links.txt +aiohttp.egg-info/requires.txt +aiohttp.egg-info/top_level.txt +docs/Makefile +docs/abc.rst +docs/aiohttp-icon.ico +docs/aiohttp-icon.svg +docs/api.rst +docs/changes.rst +docs/client.rst +docs/client_reference.rst +docs/conf.py +docs/contributing.rst +docs/faq.rst +docs/glossary.rst +docs/gunicorn.rst +docs/index.rst +docs/logging.rst +docs/make.bat +docs/multipart.rst +docs/new_router.rst +docs/server.rst +docs/spelling_wordlist.txt +docs/streams.rst +docs/testing.rst +docs/tutorial.rst +docs/web.rst +docs/web_reference.rst +docs/_static/aiohttp-icon-128x128.png +docs/_static/aiohttp-icon-32x32.png +docs/_static/aiohttp-icon-64x64.png +docs/_static/aiohttp-icon-96x96.png +examples/background_tasks.py +examples/basic_srv.py +examples/cli_app.py +examples/client_auth.py +examples/client_json.py +examples/client_ws.py +examples/curl.py +examples/fake_server.py +examples/server.crt +examples/server.csr +examples/server.key +examples/static_files.py +examples/web_classview1.py +examples/web_cookies.py +examples/web_rewrite_headers_middleware.py +examples/web_srv.py +examples/web_ws.py +examples/websocket.html +examples/legacy/crawl.py +examples/legacy/srv.py +examples/legacy/tcp_protocol_parser.py +tests/conftest.py +tests/data.unknown_mime_type +tests/hello.txt.gz +tests/sample.crt +tests/sample.crt.der +tests/sample.key +tests/software_development_in_picture.jpg +tests/test_classbasedview.py +tests/test_client_connection.py +tests/test_client_functional.py +tests/test_client_functional_oldstyle.py +tests/test_client_request.py +tests/test_client_response.py +tests/test_client_session.py +tests/test_client_ws.py +tests/test_client_ws_functional.py +tests/test_connector.py +tests/test_cookiejar.py +tests/test_errors.py +tests/test_flowcontrol_streams.py +tests/test_helpers.py +tests/test_http_parser.py +tests/test_multipart.py +tests/test_parser_buffer.py +tests/test_protocol.py +tests/test_proxy.py +tests/test_pytest_plugin.py +tests/test_resolver.py +tests/test_run_app.py +tests/test_server.py +tests/test_signals.py +tests/test_stream_parser.py +tests/test_stream_protocol.py +tests/test_stream_writer.py +tests/test_streams.py +tests/test_test_utils.py +tests/test_urldispatch.py +tests/test_web_application.py +tests/test_web_cli.py +tests/test_web_exceptions.py +tests/test_web_functional.py +tests/test_web_middleware.py +tests/test_web_request.py +tests/test_web_request_handler.py +tests/test_web_response.py +tests/test_web_sendfile.py +tests/test_web_sendfile_functional.py +tests/test_web_urldispatcher.py +tests/test_web_websocket.py +tests/test_web_websocket_functional.py +tests/test_web_websocket_functional_oldstyle.py +tests/test_websocket_handshake.py +tests/test_websocket_parser.py +tests/test_websocket_writer.py +tests/test_worker.py +tests/test_wsgi.py +tests/autobahn/client.py +tests/autobahn/fuzzingclient.json +tests/autobahn/fuzzingserver.json +tests/autobahn/server.py +tests/test_py35/test_cbv35.py +tests/test_py35/test_client.py +tests/test_py35/test_client_websocket_35.py +tests/test_py35/test_multipart_35.py +tests/test_py35/test_resp.py +tests/test_py35/test_streams_35.py +tests/test_py35/test_test_utils_35.py +tests/test_py35/test_web_websocket_35.py \ No newline at end of file diff --git a/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/dependency_links.txt b/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/installed-files.txt b/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..47219fc --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/installed-files.txt @@ -0,0 +1,66 @@ +..\aiohttp\abc.py +..\aiohttp\client.py +..\aiohttp\client_reqrep.py +..\aiohttp\client_ws.py +..\aiohttp\connector.py +..\aiohttp\cookiejar.py +..\aiohttp\errors.py +..\aiohttp\file_sender.py +..\aiohttp\hdrs.py +..\aiohttp\helpers.py +..\aiohttp\log.py +..\aiohttp\multipart.py +..\aiohttp\parsers.py +..\aiohttp\protocol.py +..\aiohttp\pytest_plugin.py +..\aiohttp\resolver.py +..\aiohttp\server.py +..\aiohttp\signals.py +..\aiohttp\streams.py +..\aiohttp\test_utils.py +..\aiohttp\web.py +..\aiohttp\web_exceptions.py +..\aiohttp\web_reqrep.py +..\aiohttp\web_urldispatcher.py +..\aiohttp\web_ws.py +..\aiohttp\worker.py +..\aiohttp\wsgi.py +..\aiohttp\_ws_impl.py +..\aiohttp\__init__.py +..\aiohttp\_websocket.c +..\aiohttp\_websocket.pyx +..\aiohttp\__pycache__\abc.cpython-36.pyc +..\aiohttp\__pycache__\client.cpython-36.pyc +..\aiohttp\__pycache__\client_reqrep.cpython-36.pyc +..\aiohttp\__pycache__\client_ws.cpython-36.pyc +..\aiohttp\__pycache__\connector.cpython-36.pyc +..\aiohttp\__pycache__\cookiejar.cpython-36.pyc +..\aiohttp\__pycache__\errors.cpython-36.pyc +..\aiohttp\__pycache__\file_sender.cpython-36.pyc +..\aiohttp\__pycache__\hdrs.cpython-36.pyc +..\aiohttp\__pycache__\helpers.cpython-36.pyc +..\aiohttp\__pycache__\log.cpython-36.pyc +..\aiohttp\__pycache__\multipart.cpython-36.pyc +..\aiohttp\__pycache__\parsers.cpython-36.pyc +..\aiohttp\__pycache__\protocol.cpython-36.pyc +..\aiohttp\__pycache__\pytest_plugin.cpython-36.pyc +..\aiohttp\__pycache__\resolver.cpython-36.pyc +..\aiohttp\__pycache__\server.cpython-36.pyc +..\aiohttp\__pycache__\signals.cpython-36.pyc +..\aiohttp\__pycache__\streams.cpython-36.pyc +..\aiohttp\__pycache__\test_utils.cpython-36.pyc +..\aiohttp\__pycache__\web.cpython-36.pyc +..\aiohttp\__pycache__\web_exceptions.cpython-36.pyc +..\aiohttp\__pycache__\web_reqrep.cpython-36.pyc +..\aiohttp\__pycache__\web_urldispatcher.cpython-36.pyc +..\aiohttp\__pycache__\web_ws.cpython-36.pyc +..\aiohttp\__pycache__\worker.cpython-36.pyc +..\aiohttp\__pycache__\wsgi.cpython-36.pyc +..\aiohttp\__pycache__\_ws_impl.cpython-36.pyc +..\aiohttp\__pycache__\__init__.cpython-36.pyc +..\aiohttp\_websocket.cp36-win32.pyd +dependency_links.txt +PKG-INFO +requires.txt +SOURCES.txt +top_level.txt diff --git a/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/requires.txt b/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..1621c30 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/requires.txt @@ -0,0 +1,3 @@ +chardet +multidict>=2.0 +async_timeout diff --git a/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/top_level.txt b/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..ee4ba4f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp-1.0.5-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +aiohttp diff --git a/RBXLegacyDiscordBot/lib/aiohttp/__init__.py b/RBXLegacyDiscordBot/lib/aiohttp/__init__.py new file mode 100644 index 0000000..f709016 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/__init__.py @@ -0,0 +1,41 @@ +__version__ = '1.0.5' + +# Deprecated, keep it here for a while for backward compatibility. +import multidict # noqa + +# This relies on each of the submodules having an __all__ variable. + +from multidict import * # noqa +from . import hdrs # noqa +from .protocol import * # noqa +from .connector import * # noqa +from .client import * # noqa +from .client_reqrep import * # noqa +from .errors import * # noqa +from .helpers import * # noqa +from .parsers import * # noqa +from .streams import * # noqa +from .multipart import * # noqa +from .client_ws import ClientWebSocketResponse # noqa +from ._ws_impl import WSMsgType, WSCloseCode, WSMessage, WebSocketError # noqa +from .file_sender import FileSender # noqa +from .cookiejar import CookieJar # noqa +from .resolver import * # noqa + + +MsgType = WSMsgType # backward compatibility + + +__all__ = (client.__all__ + # noqa + client_reqrep.__all__ + # noqa + errors.__all__ + # noqa + helpers.__all__ + # noqa + parsers.__all__ + # noqa + protocol.__all__ + # noqa + connector.__all__ + # noqa + streams.__all__ + # noqa + multidict.__all__ + # noqa + multipart.__all__ + # noqa + ('hdrs', 'FileSender', 'WSMsgType', 'MsgType', 'WSCloseCode', + 'WebSocketError', 'WSMessage', + 'ClientWebSocketResponse', 'CookieJar')) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/_websocket.c b/RBXLegacyDiscordBot/lib/aiohttp/_websocket.c new file mode 100644 index 0000000..03cc432 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/_websocket.c @@ -0,0 +1,2414 @@ +/* Generated by Cython 0.24.1 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "depends": [] + }, + "module_name": "aiohttp._websocket" +} +END: Cython Metadata */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03020000) + #error Cython requires Python 2.6+ or Python 3.2+. +#else +#define CYTHON_ABI "0_24_1" +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 +#endif +#if !defined(CYTHON_USE_PYLONG_INTERNALS) && CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x02070000 + #define CYTHON_USE_PYLONG_INTERNALS 1 +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if PY_VERSION_HEX >= 0x030500B1 +#define __Pyx_PyAsyncMethodsStruct PyAsyncMethods +#define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) +#elif CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3 +typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; +} __Pyx_PyAsyncMethodsStruct; +#define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) +#else +#define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) + +#ifndef CYTHON_INLINE + #if defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__aiohttp___websocket +#define __PYX_HAVE_API__aiohttp___websocket +#include "string.h" +#include "stdio.h" +#include "pythread.h" +#include "stdint.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#ifdef PYREX_WITHOUT_ASSERTIONS +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) && defined (_M_X64) + #define __Pyx_sst_abs(value) _abs64(value) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyObject_AsSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +#if PY_MAJOR_VERSION < 3 +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) +{ + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#else +#define __Pyx_Py_UNICODE_strlen Py_UNICODE_strlen +#endif +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_COMPILING_IN_CPYTHON +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ + +static PyObject *__pyx_m; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "aiohttp/_websocket.pyx", + "type.pxd", + "bool.pxd", + "complex.pxd", +}; + +/*--- Type declarations ---*/ + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* ArgTypeTest.proto */ +static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact); + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* PyIdentifierFromString.proto */ +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + +/* ModuleImport.proto */ +static PyObject *__Pyx_ImportModule(const char *name); + +/* TypeImport.proto */ +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + + +/* Module declarations from 'cpython.version' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'cpython.exc' */ + +/* Module declarations from 'cpython.module' */ + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from 'cpython.tuple' */ + +/* Module declarations from 'cpython.list' */ + +/* Module declarations from 'cpython.sequence' */ + +/* Module declarations from 'cpython.mapping' */ + +/* Module declarations from 'cpython.iterator' */ + +/* Module declarations from 'cpython.number' */ + +/* Module declarations from 'cpython.int' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.bool' */ +static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0; + +/* Module declarations from 'cpython.long' */ + +/* Module declarations from 'cpython.float' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.complex' */ +static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0; + +/* Module declarations from 'cpython.string' */ + +/* Module declarations from 'cpython.unicode' */ + +/* Module declarations from 'cpython.dict' */ + +/* Module declarations from 'cpython.instance' */ + +/* Module declarations from 'cpython.function' */ + +/* Module declarations from 'cpython.method' */ + +/* Module declarations from 'cpython.weakref' */ + +/* Module declarations from 'cpython.getargs' */ + +/* Module declarations from 'cpython.pythread' */ + +/* Module declarations from 'cpython.pystate' */ + +/* Module declarations from 'cpython.cobject' */ + +/* Module declarations from 'cpython.oldbuffer' */ + +/* Module declarations from 'cpython.set' */ + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'cpython.bytes' */ + +/* Module declarations from 'cpython.pycapsule' */ + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'libc.stdint' */ + +/* Module declarations from 'aiohttp._websocket' */ +#define __Pyx_MODULE_NAME "aiohttp._websocket" +int __pyx_module_is_main_aiohttp___websocket = 0; + +/* Implementation of 'aiohttp._websocket' */ +static PyObject *__pyx_builtin_range; +static const char __pyx_k_i[] = "i"; +static const char __pyx_k_data[] = "data"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_mask[] = "mask"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_range[] = "range"; +static const char __pyx_k_in_buf[] = "in_buf"; +static const char __pyx_k_data_len[] = "data_len"; +static const char __pyx_k_mask_buf[] = "mask_buf"; +static const char __pyx_k_uint32_msk[] = "uint32_msk"; +static const char __pyx_k_uint64_msk[] = "uint64_msk"; +static const char __pyx_k_aiohttp__websocket[] = "aiohttp._websocket"; +static const char __pyx_k_websocket_mask_cython[] = "_websocket_mask_cython"; +static const char __pyx_k_home_travis_build_KeepSafe_aioh[] = "/home/travis/build/KeepSafe/aiohttp/aiohttp/_websocket.pyx"; +static PyObject *__pyx_n_s_aiohttp__websocket; +static PyObject *__pyx_n_s_data; +static PyObject *__pyx_n_s_data_len; +static PyObject *__pyx_kp_s_home_travis_build_KeepSafe_aioh; +static PyObject *__pyx_n_s_i; +static PyObject *__pyx_n_s_in_buf; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_mask; +static PyObject *__pyx_n_s_mask_buf; +static PyObject *__pyx_n_s_range; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_uint32_msk; +static PyObject *__pyx_n_s_uint64_msk; +static PyObject *__pyx_n_s_websocket_mask_cython; +static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_mask, PyObject *__pyx_v_data); /* proto */ +static PyObject *__pyx_tuple_; +static PyObject *__pyx_codeobj__2; + +/* "aiohttp/_websocket.pyx":9 + * from libc.stdint cimport uint32_t, uint64_t, uintmax_t + * + * def _websocket_mask_cython(bytes mask, bytearray data): # <<<<<<<<<<<<<< + * """Note, this function mutates it's `data` argument + * """ + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_7aiohttp_10_websocket__websocket_mask_cython[] = "Note, this function mutates it's `data` argument\n "; +static PyMethodDef __pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython = {"_websocket_mask_cython", (PyCFunction)__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython, METH_VARARGS|METH_KEYWORDS, __pyx_doc_7aiohttp_10_websocket__websocket_mask_cython}; +static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_mask = 0; + PyObject *__pyx_v_data = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_websocket_mask_cython (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_mask,&__pyx_n_s_data,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_mask)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, 1); __PYX_ERR(0, 9, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_websocket_mask_cython") < 0)) __PYX_ERR(0, 9, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_mask = ((PyObject*)values[0]); + __pyx_v_data = ((PyObject*)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 9, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("aiohttp._websocket._websocket_mask_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_mask), (&PyBytes_Type), 1, "mask", 1))) __PYX_ERR(0, 9, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_data), (&PyByteArray_Type), 1, "data", 1))) __PYX_ERR(0, 9, __pyx_L1_error) + __pyx_r = __pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(__pyx_self, __pyx_v_mask, __pyx_v_data); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_mask, PyObject *__pyx_v_data) { + Py_ssize_t __pyx_v_data_len; + Py_ssize_t __pyx_v_i; + unsigned char *__pyx_v_in_buf; + unsigned char const *__pyx_v_mask_buf; + uint32_t __pyx_v_uint32_msk; + uint64_t __pyx_v_uint64_msk; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + Py_ssize_t __pyx_t_1; + char *__pyx_t_2; + int __pyx_t_3; + uint64_t *__pyx_t_4; + long __pyx_t_5; + uint32_t *__pyx_t_6; + Py_ssize_t __pyx_t_7; + Py_ssize_t __pyx_t_8; + __Pyx_RefNannySetupContext("_websocket_mask_cython", 0); + + /* "aiohttp/_websocket.pyx":20 + * uint64_t uint64_msk + * + * assert len(mask) == 4 # <<<<<<<<<<<<<< + * + * data_len = len(data) + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + if (unlikely(!Py_OptimizeFlag)) { + if (unlikely(__pyx_v_mask == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 20, __pyx_L1_error) + } + __pyx_t_1 = PyBytes_GET_SIZE(__pyx_v_mask); if (unlikely(__pyx_t_1 == -1)) __PYX_ERR(0, 20, __pyx_L1_error) + if (unlikely(!((__pyx_t_1 == 4) != 0))) { + PyErr_SetNone(PyExc_AssertionError); + __PYX_ERR(0, 20, __pyx_L1_error) + } + } + #endif + + /* "aiohttp/_websocket.pyx":22 + * assert len(mask) == 4 + * + * data_len = len(data) # <<<<<<<<<<<<<< + * in_buf = PyByteArray_AsString(data) + * mask_buf = PyBytes_AsString(mask) + */ + __pyx_t_1 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_1 == -1)) __PYX_ERR(0, 22, __pyx_L1_error) + __pyx_v_data_len = __pyx_t_1; + + /* "aiohttp/_websocket.pyx":23 + * + * data_len = len(data) + * in_buf = PyByteArray_AsString(data) # <<<<<<<<<<<<<< + * mask_buf = PyBytes_AsString(mask) + * uint32_msk = (mask_buf)[0] + */ + __pyx_t_2 = PyByteArray_AsString(__pyx_v_data); if (unlikely(__pyx_t_2 == NULL)) __PYX_ERR(0, 23, __pyx_L1_error) + __pyx_v_in_buf = ((unsigned char *)__pyx_t_2); + + /* "aiohttp/_websocket.pyx":24 + * data_len = len(data) + * in_buf = PyByteArray_AsString(data) + * mask_buf = PyBytes_AsString(mask) # <<<<<<<<<<<<<< + * uint32_msk = (mask_buf)[0] + * + */ + __pyx_t_2 = PyBytes_AsString(__pyx_v_mask); if (unlikely(__pyx_t_2 == NULL)) __PYX_ERR(0, 24, __pyx_L1_error) + __pyx_v_mask_buf = ((unsigned char const *)__pyx_t_2); + + /* "aiohttp/_websocket.pyx":25 + * in_buf = PyByteArray_AsString(data) + * mask_buf = PyBytes_AsString(mask) + * uint32_msk = (mask_buf)[0] # <<<<<<<<<<<<<< + * + * # TODO: align in_data ptr to achieve even faster speeds + */ + __pyx_v_uint32_msk = (((uint32_t *)__pyx_v_mask_buf)[0]); + + /* "aiohttp/_websocket.pyx":30 + * # does it need in python ?! malloc() always aligns to sizeof(long) bytes + * + * if sizeof(size_t) >= 8: # <<<<<<<<<<<<<< + * uint64_msk = uint32_msk + * uint64_msk = (uint64_msk << 32) | uint32_msk + */ + __pyx_t_3 = (((sizeof(size_t)) >= 8) != 0); + if (__pyx_t_3) { + + /* "aiohttp/_websocket.pyx":31 + * + * if sizeof(size_t) >= 8: + * uint64_msk = uint32_msk # <<<<<<<<<<<<<< + * uint64_msk = (uint64_msk << 32) | uint32_msk + * + */ + __pyx_v_uint64_msk = __pyx_v_uint32_msk; + + /* "aiohttp/_websocket.pyx":32 + * if sizeof(size_t) >= 8: + * uint64_msk = uint32_msk + * uint64_msk = (uint64_msk << 32) | uint32_msk # <<<<<<<<<<<<<< + * + * while data_len >= 8: + */ + __pyx_v_uint64_msk = ((__pyx_v_uint64_msk << 32) | __pyx_v_uint32_msk); + + /* "aiohttp/_websocket.pyx":34 + * uint64_msk = (uint64_msk << 32) | uint32_msk + * + * while data_len >= 8: # <<<<<<<<<<<<<< + * (in_buf)[0] ^= uint64_msk + * in_buf += 8 + */ + while (1) { + __pyx_t_3 = ((__pyx_v_data_len >= 8) != 0); + if (!__pyx_t_3) break; + + /* "aiohttp/_websocket.pyx":35 + * + * while data_len >= 8: + * (in_buf)[0] ^= uint64_msk # <<<<<<<<<<<<<< + * in_buf += 8 + * data_len -= 8 + */ + __pyx_t_4 = ((uint64_t *)__pyx_v_in_buf); + __pyx_t_5 = 0; + (__pyx_t_4[__pyx_t_5]) = ((__pyx_t_4[__pyx_t_5]) ^ __pyx_v_uint64_msk); + + /* "aiohttp/_websocket.pyx":36 + * while data_len >= 8: + * (in_buf)[0] ^= uint64_msk + * in_buf += 8 # <<<<<<<<<<<<<< + * data_len -= 8 + * + */ + __pyx_v_in_buf = (__pyx_v_in_buf + 8); + + /* "aiohttp/_websocket.pyx":37 + * (in_buf)[0] ^= uint64_msk + * in_buf += 8 + * data_len -= 8 # <<<<<<<<<<<<<< + * + * + */ + __pyx_v_data_len = (__pyx_v_data_len - 8); + } + + /* "aiohttp/_websocket.pyx":30 + * # does it need in python ?! malloc() always aligns to sizeof(long) bytes + * + * if sizeof(size_t) >= 8: # <<<<<<<<<<<<<< + * uint64_msk = uint32_msk + * uint64_msk = (uint64_msk << 32) | uint32_msk + */ + } + + /* "aiohttp/_websocket.pyx":40 + * + * + * while data_len >= 4: # <<<<<<<<<<<<<< + * (in_buf)[0] ^= uint32_msk + * in_buf += 4 + */ + while (1) { + __pyx_t_3 = ((__pyx_v_data_len >= 4) != 0); + if (!__pyx_t_3) break; + + /* "aiohttp/_websocket.pyx":41 + * + * while data_len >= 4: + * (in_buf)[0] ^= uint32_msk # <<<<<<<<<<<<<< + * in_buf += 4 + * data_len -= 4 + */ + __pyx_t_6 = ((uint32_t *)__pyx_v_in_buf); + __pyx_t_5 = 0; + (__pyx_t_6[__pyx_t_5]) = ((__pyx_t_6[__pyx_t_5]) ^ __pyx_v_uint32_msk); + + /* "aiohttp/_websocket.pyx":42 + * while data_len >= 4: + * (in_buf)[0] ^= uint32_msk + * in_buf += 4 # <<<<<<<<<<<<<< + * data_len -= 4 + * + */ + __pyx_v_in_buf = (__pyx_v_in_buf + 4); + + /* "aiohttp/_websocket.pyx":43 + * (in_buf)[0] ^= uint32_msk + * in_buf += 4 + * data_len -= 4 # <<<<<<<<<<<<<< + * + * for i in range(0, data_len): + */ + __pyx_v_data_len = (__pyx_v_data_len - 4); + } + + /* "aiohttp/_websocket.pyx":45 + * data_len -= 4 + * + * for i in range(0, data_len): # <<<<<<<<<<<<<< + * in_buf[i] ^= mask_buf[i] + * + */ + __pyx_t_1 = __pyx_v_data_len; + for (__pyx_t_7 = 0; __pyx_t_7 < __pyx_t_1; __pyx_t_7+=1) { + __pyx_v_i = __pyx_t_7; + + /* "aiohttp/_websocket.pyx":46 + * + * for i in range(0, data_len): + * in_buf[i] ^= mask_buf[i] # <<<<<<<<<<<<<< + * + * return data + */ + __pyx_t_8 = __pyx_v_i; + (__pyx_v_in_buf[__pyx_t_8]) = ((__pyx_v_in_buf[__pyx_t_8]) ^ (__pyx_v_mask_buf[__pyx_v_i])); + } + + /* "aiohttp/_websocket.pyx":48 + * in_buf[i] ^= mask_buf[i] + * + * return data # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_data); + __pyx_r = __pyx_v_data; + goto __pyx_L0; + + /* "aiohttp/_websocket.pyx":9 + * from libc.stdint cimport uint32_t, uint64_t, uintmax_t + * + * def _websocket_mask_cython(bytes mask, bytearray data): # <<<<<<<<<<<<<< + * """Note, this function mutates it's `data` argument + * """ + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_AddTraceback("aiohttp._websocket._websocket_mask_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef __pyx_moduledef = { + #if PY_VERSION_HEX < 0x03020000 + { PyObject_HEAD_INIT(NULL) NULL, 0, NULL }, + #else + PyModuleDef_HEAD_INIT, + #endif + "_websocket", + 0, /* m_doc */ + -1, /* m_size */ + __pyx_methods /* m_methods */, + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_n_s_aiohttp__websocket, __pyx_k_aiohttp__websocket, sizeof(__pyx_k_aiohttp__websocket), 0, 0, 1, 1}, + {&__pyx_n_s_data, __pyx_k_data, sizeof(__pyx_k_data), 0, 0, 1, 1}, + {&__pyx_n_s_data_len, __pyx_k_data_len, sizeof(__pyx_k_data_len), 0, 0, 1, 1}, + {&__pyx_kp_s_home_travis_build_KeepSafe_aioh, __pyx_k_home_travis_build_KeepSafe_aioh, sizeof(__pyx_k_home_travis_build_KeepSafe_aioh), 0, 0, 1, 0}, + {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, + {&__pyx_n_s_in_buf, __pyx_k_in_buf, sizeof(__pyx_k_in_buf), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_mask, __pyx_k_mask, sizeof(__pyx_k_mask), 0, 0, 1, 1}, + {&__pyx_n_s_mask_buf, __pyx_k_mask_buf, sizeof(__pyx_k_mask_buf), 0, 0, 1, 1}, + {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_uint32_msk, __pyx_k_uint32_msk, sizeof(__pyx_k_uint32_msk), 0, 0, 1, 1}, + {&__pyx_n_s_uint64_msk, __pyx_k_uint64_msk, sizeof(__pyx_k_uint64_msk), 0, 0, 1, 1}, + {&__pyx_n_s_websocket_mask_cython, __pyx_k_websocket_mask_cython, sizeof(__pyx_k_websocket_mask_cython), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 45, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "aiohttp/_websocket.pyx":9 + * from libc.stdint cimport uint32_t, uint64_t, uintmax_t + * + * def _websocket_mask_cython(bytes mask, bytearray data): # <<<<<<<<<<<<<< + * """Note, this function mutates it's `data` argument + * """ + */ + __pyx_tuple_ = PyTuple_Pack(8, __pyx_n_s_mask, __pyx_n_s_data, __pyx_n_s_data_len, __pyx_n_s_i, __pyx_n_s_in_buf, __pyx_n_s_mask_buf, __pyx_n_s_uint32_msk, __pyx_n_s_uint64_msk); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 8, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_home_travis_build_KeepSafe_aioh, __pyx_n_s_websocket_mask_cython, 9, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + return 0; + __pyx_L1_error:; + return -1; +} + +#if PY_MAJOR_VERSION < 3 +PyMODINIT_FUNC init_websocket(void); /*proto*/ +PyMODINIT_FUNC init_websocket(void) +#else +PyMODINIT_FUNC PyInit__websocket(void); /*proto*/ +PyMODINIT_FUNC PyInit__websocket(void) +#endif +{ + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_REFNANNY + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); + if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); + } + #endif + __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit__websocket(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_websocket", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_aiohttp___websocket) { + if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "aiohttp._websocket")) { + if (unlikely(PyDict_SetItemString(modules, "aiohttp._websocket", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global init code ---*/ + /*--- Variable export code ---*/ + /*--- Function export code ---*/ + /*--- Type init code ---*/ + /*--- Type import code ---*/ + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", + #if CYTHON_COMPILING_IN_PYPY + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) __PYX_ERR(1, 9, __pyx_L1_error) + __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), 0); if (unlikely(!__pyx_ptype_7cpython_4bool_bool)) __PYX_ERR(2, 8, __pyx_L1_error) + __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), 0); if (unlikely(!__pyx_ptype_7cpython_7complex_complex)) __PYX_ERR(3, 15, __pyx_L1_error) + /*--- Variable import code ---*/ + /*--- Function import code ---*/ + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "aiohttp/_websocket.pyx":9 + * from libc.stdint cimport uint32_t, uint64_t, uintmax_t + * + * def _websocket_mask_cython(bytes mask, bytearray data): # <<<<<<<<<<<<<< + * """Note, this function mutates it's `data` argument + * """ + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython, NULL, __pyx_n_s_aiohttp__websocket); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_websocket_mask_cython, __pyx_t_1) < 0) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "aiohttp/_websocket.pyx":1 + * from cpython cimport PyBytes_AsString # <<<<<<<<<<<<<< + * + * #from cpython cimport PyByteArray_AsString # cython still not exports that + */ + __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init aiohttp._websocket", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init aiohttp._websocket"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if PY_MAJOR_VERSION < 3 + return; + #else + return __pyx_m; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* ArgTypeTest */ +static void __Pyx_RaiseArgumentTypeInvalid(const char* name, PyObject *obj, PyTypeObject *type) { + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); +} +static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact) +{ + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (none_allowed && obj == Py_None) return 1; + else if (exact) { + if (likely(Py_TYPE(obj) == type)) return 1; + #if PY_MAJOR_VERSION == 2 + else if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(PyObject_TypeCheck(obj, type))) return 1; + } + __Pyx_RaiseArgumentTypeInvalid(name, obj, type); + return 0; +} + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + py_code = __pyx_find_code_object(c_line ? c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? c_line : py_line, py_code); + } + py_frame = PyFrame_New( + PyThreadState_GET(), /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + py_frame->f_lineno = py_line; + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CheckBinaryVersion */ +static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + return PyErr_WarnEx(NULL, message, 1); + } + return 0; +} + +/* ModuleImport */ +#ifndef __PYX_HAVE_RT_ImportModule +#define __PYX_HAVE_RT_ImportModule +static PyObject *__Pyx_ImportModule(const char *name) { + PyObject *py_name = 0; + PyObject *py_module = 0; + py_name = __Pyx_PyIdentifier_FromString(name); + if (!py_name) + goto bad; + py_module = PyImport_Import(py_name); + Py_DECREF(py_name); + return py_module; +bad: + Py_XDECREF(py_name); + return 0; +} +#endif + +/* TypeImport */ +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, + size_t size, int strict) +{ + PyObject *py_module = 0; + PyObject *result = 0; + PyObject *py_name = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + py_module = __Pyx_ImportModule(module_name); + if (!py_module) + goto bad; + py_name = __Pyx_PyIdentifier_FromString(class_name); + if (!py_name) + goto bad; + result = PyObject_GetAttr(py_module, py_name); + Py_DECREF(py_name); + py_name = 0; + Py_DECREF(py_module); + py_module = 0; + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (!strict && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + else if ((size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s has the wrong size, try recompiling. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(py_module); + Py_XDECREF(result); + return NULL; +} +#endif + +/* InitStrings */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if CYTHON_COMPILING_IN_CPYTHON && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { +#if PY_VERSION_HEX < 0x03030000 + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +#else + if (__Pyx_PyUnicode_READY(o) == -1) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (PyUnicode_IS_ASCII(o)) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +#endif + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { + PyNumberMethods *m; + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (PyInt_Check(x) || PyLong_Check(x)) +#else + if (PyLong_Check(x)) +#endif + return __Pyx_NewRef(x); + m = Py_TYPE(x)->tp_as_number; +#if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = PyNumber_Long(x); + } +#else + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Long(x); + } +#endif + if (res) { +#if PY_MAJOR_VERSION < 3 + if (!PyInt_Check(res) && !PyLong_Check(res)) { +#else + if (!PyLong_Check(res)) { +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + name, name, Py_TYPE(res)->tp_name); + Py_DECREF(res); + return NULL; + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(x); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/RBXLegacyDiscordBot/lib/aiohttp/_websocket.cp36-win32.pyd b/RBXLegacyDiscordBot/lib/aiohttp/_websocket.cp36-win32.pyd new file mode 100644 index 0000000000000000000000000000000000000000..b8777271e027136d94dd82eed64b911758d7afbd GIT binary patch literal 18432 zcmeHue|(eGz5i)R+W;wzvz@4@fu#c!YSOg%(Illn3uP2Y=`V^v2u(^-+O#H5u;mMg zOS=&ss$7T8d*}Sxi{7zKC)=E&a~cTJy5V3Oa@mB9ZEOb1Tm{5#>wNC}bDpFPpltW` zy?=b~YfE3}JkR-@&*yVKzs~2J^E^+NtlY)MF~$@qlEm14T+4tQG{z>~@$w}0 z{P;Jg?oX?Fb84;E7u0kF+SdnMZJH*R-`_51)_F96kYD5TYs#0^XxiG{o`US`3EBkf z&wM%iq4k%WP9=-~!J$(gv{&t2cbcv*kKdbUuRYz&;mxPp_;vm1R$M;-o*UQom)D&# z;ySc<1IHWM+r+OQ?0x(c(GBf&@%D0Glb6Q&B+VQZj8&y&vR~dJG$wJ*u=J@D(RgF@{+iHHF&R8aIj$PR<0LZxbrL!_v3`9gXdX<||jJp<4%>u^U)ZM|@ zFkP?yZDXtz{5s|_Hs$KRl=d$4m!-6+L8E`tuR!p03h4aJR8%D2G?awgxZ^(!L@Wrn zU4o0T0+@pjK)4B;a3mRARv@FX6%zq)peRv{C@H@(#-au5n|yV7Jz6g@dUtgwij> zE}W|w$5@?sUaU}s<4iDw<7tA1svD@vFht7+b@9Kzu!9-#w{XdMAv$=66jDneWqdO1 z);OpRZ&9aBQn4-m#Mu~sNx$Ch>eXs(a_BJ8K?;+8gC`_mQ(LS-YXA85m@*(Wa+OM z`ACwacsT&wZ|C$>QKJ!z#i4L16K=1M7s#pzH%cK@OFHZz8pR*SpM`AF2`R(Tz|=5W z{5`Z|J669Vg?=0V0#U?TRNl2@kq6P!M`%6K`xS=h;Ihg+)QXRXgxf35ju|7J8{@a6 z*~{n)=W`1s1-umkwlH!-Ik9gvywM%adE|h2AReTl_5 z_=}_90gTAOVJR&%t><#+_8JFq*#~${J>BgDp?tR%R6UZA5thb!iQ7|6j(h^QAPqNr96fk?|21^ zN_k4$I;1=mE~{zK2hMBZgO;1(hhg-dsF2Z;-uULsJu6l)btJo%>O2S4w^dPHTuHUP z9Ch=~ybgZJcJs?^W&Dz12Y{ZLMh2%#hn6PaBb{jA677 zoMB=%lDwMQUV-M*uVE$ecU*O`LRl|X+Qtp!yf`cy=#cBSsfD(4*n_L4bNfXjw_7D+R;_dLJ%E_ zL5200fH@}qa46^7lYlc+7+n+Fm$oJrU&e`qtoUAp+Ti546>UfSUerhe0`ZEsps_D4 zhLm&^OuM6v$Xu~_C!okq9HUjf&x$wUX7CR{=JP*ZD^tR;@plk171P9m4C;$iT#8f-M=D01e){Rxh+YPO1h9Q+fI8PuAhcw}^ASErT9n==Df#TCEQVaa zf5}S5nl11VQ-@af*>HJaKQ(A07OHw_BGP`dWjnmC4wWL5UZTFc4mhggeaH=z(1q#Y zPA%I!H8Ioor48O8R-6}^;f)8(U>nWZjYLTKAc0UAjjzVwG%MqG0s$Oywrzol#~Y$A zQCxjdw%#^78Jr23+qS4!2%c5OR|B1{b+|S*Cala&B*GkIp75`+AS!=05FVC7S@EgB z8=NRxn&)2F_15Tz$39b`!oGjJ*e*aaY!W|$i87AV+YQtCk{mT{QmGK5k9YF7xGXCx-qu@Nx)K}BXZr=IGQ~c8V7{7c~;Fq2betEi;U!L{i^7unNkK-a%C?Y+( zannmTPvWMZOF%hPEE{QYM0$4d-o5pX_$5dM0?5)|u{RIf(f*2OXx|d4cwWkz1+&!m zu8=-_ySHL5CXKAgEpNkTAI0Qlq_<)io?<{mO3BW7A@bIm=3Y9mytu0+kg8db_KgF}zd&%5Ck zrglrkKw)CJRvl^8s(VrHI#W~Mz~d2-Wq92Y@x-OjuvjsYm{p`t#gKas(Orlk&fEwc z{2HB*1(H`^Ah${(!lEsQfBy5(mW9vHs4=`ffBbD~Ahb+~ zKL`}ckZpwO8DfTLIGeiC4R6#pL~bLR-Yq9`g|(<1z4Ff#$zGMpt=K#DlVhqogY=QkI& zWUVl~A&=U-`ebBWB&diiRz_+d@~N-W=_XP!1oH~x!$;F)cFr|lXOxiIWX%=*8KgBZ z8Q@GQ)X_yPEH%85lXi^Ka2@<}NnA1bBl z4?%@bg$Fb;9GQ!8m?6po4>LSwh*~mofkw+@?1+@FMx09dW?tKPUBv5hURUzEiq{Ta z*YbJ=uUGMUEwA0Y_M*lDYly~pASSA?{a788uBoeU5H^W+Gvf5sGk}Pwg&YuV;0 zecLf)og`>;z2&v=oKDI+lQ73RaiLn=sXYsH>w%5NY6qcB=`Gi4z59Xc9Y6;XlMbV| zFo263wDFz6g_+S4?VKj|588x3S3CORi7}>lmGIKo7?VlOn;=WW;P`7q#RhJtmLAE# zzV{f8#Mi$sNe>(%!^Y~Xr95uTA#Rya`kGh+?2xqHu*_>I)J|AW?02rY!asDN8S@V@ z#R-5kfgZ}1%$9LpEmUk#IB;akQv&Yr&H@0v8V=l1uALP=q%A4eDnirHn1H*?&}3{+ zAvCdrz%LAuz%)_}bP9)8CvyNVuY5NpVZ#$GX~38Y6~F3vTQK%9f<%ScV%{)vfjCeTA54eK2Vg8~Hi5Q$isM*T z4JblcV$pzODl+pD_Z@l>y$LK|Ta2y9W-6A#k~ye^e))h^GS# z3n06wt%tYVX=#lqdUIo;sp5o4NjOc)>FEWTI9E}fwEG(8YR*N>Cid3%L;Bu(%X$?b z0F60d(w&fr5{?p)Rm<1@VnUS1jBuT-LtF0@-|Nl2cvn2qUd+tBe9s1oO$F3;-@zt86>2TpCh7itnZv;t-7e#nhDPwyeHOkd8H#F$c@J4lHv(_f% zo#mn+Pvmsp3(>{frqC>&JsI^b(Ig%fYsqfHV)1bEE{j^256!{{5SyV3koLo}TJTcm zbiW5uTAC=5#Vk>2{b5e`uh13l%4JAEbJgK9Yi(;oW#YZc!P2mW2?xP<#slX_W~h*R z;7{oh#g2170!cMiwk(>x7+6$}l4cO;IPqRhcvCJ5O;`@BT5T@j%~S6!6UOh=;1n>A zQ08f@L!r-#SX9EI!3#6W0gIg>$%{mxj`1#;>?AP97IPIa zXdgsKsgoJ^oF;|+7?F$$1IhEn>juk~bnhx$s=eMT_)i(*S}kT*Atlo4cZ`zJTtx#C zS|!#zg>-B%Q{E9JydU~>-)_W0gLl>y+*D5S@S5a;Qy+i#LrLn1c73L=8}Yw~Rzg0X z_Z0EWOH1rCc*Js!9&pnpXM|54#U5HPVBG7FJxgRya5C>JkX6c~%_%&yhgO{H(|(I+ z*U7X$;Iy>8C0g^}q~s*iBJ9o3KwV;xB_ zAje*x!BJwc7!1T>H3W2cHxr9ul14g#Q}*@!_%ou;26cQRs_WyDyU+a^YjfCs1omfw zT`E3>8}H8l&WOJtE?)r`Den}N)JS1F*tiTGR;aq zhT@O!jHSHCMm=-u*c{tW#PamYq-TH~*X$dmybeAt2Gyj{LBCX`ClT>djCd9jUpwOc zS5e9%szJmpPlMhtdL-}X;8^S< zai~n$L-uD7F@(eem^HC$wBmUp_4supZpD!X;Wcec)Tv>ysQ3N8N%akVSC0cnn9VNS+Pd*utq~OMC&G2N$I7_*=#&y{81G!?8TM z^H*Z;pFXqM>Orp9uJH`DM|0WYEGfD)rbR_&>2+MT#9X{N**FYpgbVG_FFa zRvR0i9@3s2uI_uBqU(p)9pgC$c8sc=p4}i#>|hP@%C3gLlRI3l9!+JwAm)w^6GY}b z_cog1TqQl!zJ1J(`^T_Cg!MgOg@+YFLEkeNoEWR%m;__R)r>odezVRoJ^RO}A7E|1m9eK^iCGnGt`0mT(c?$>5kWnfCLB%LXZe6DT(6 zlY1~7`Nq0dtHl<#w?#?ky1V*?DfH@HjVh<-Ju=oNHKB_E6;a~Z$R)A{o#ao7d7X5B zfNY4j_Y;ZmP8`K%49=H|X{x{^v!tK$y%G`AUj8e@lG81gifK{-e^wokXhX^6k^~za zs}|!S)Pu)?U0scR?i5G0_-5RNOo1IBCU;`G6_e$+VplvFo95hePXnC?T1#5$X3x8m z_Mxd8KG_p`qPBxt8@^we-rKGeTvAA8a0U4*A5T6H{p+r$mpP-l;bD|QND%p1j>Ub0?JyHr6^_;Ey|}+GEpwz?bvTnegT;d;YvUA6nQz6 zTYq0ouiNQ0e48t{f%8CYI#5{!m9P2{OjQI#YDkB68Zg4hr3Ep;a;SG*O)K~tJ z-_~UVY{TyJ(ma$IGR~?iQU6;ldY3dbPd?j}A^NW%rkxdjc)i6I&abVaUi#LJfA^+s z<9f3)ys-mbfsd@An@$8%f> z<9M}KzzBn}6<5MI-l{8LM1?p-O-dGUS!y|62VmRM084zQ2Ybl!^R_nFnfNY`Y)-b9 z@wA&ZZE~-JdQ4Z5#21>hi{|oi@Uk(%gJ3K9IObGXt&E2^jbnJVG9EtsV%tbYKCl+i zhr9sTT7r3GVd$Gj2eoCp7vh$607mWd)_E4+ia&g?0iarpd=CQ;z z+4kKa$6vC2{Hp%5*Xe)hQ%rplh4?1vPo~hn^=YhSDAZ5C(}{0YFTk#F{b-4!eI8|q z%4d;BVZYQFThP>DFwVZu=Qk80F}RtdYmwiFIX*h2fqjY75FKOxfqaWo3-<#DpgKuw z0$}@7@a{yuF9K{J1*Ss&F9+=7e*av9{@>Q1FOPF{)%kr*__W%&2(PyT{sn>dwi+Se z^J7XI(;Z#QYMeoT2iih28$!F*8}PW?HG)g^P`Nz?;$yh;;XT2FeAF_=d9K?nbE40zlm4ph>VUnSDICi^6ef5B4V?KD zFyvsiJ1IX5Pc~xgUy=~sFD6@%GS!AVu7cyS03dR#jK$zwzq^KP+&Lz#Y!23#BH8@* zwhkAr_H(C<7%akyGt>~`~@L@lYp2Y1rV~T1l3XmtxT0W8U+r7 z9%H{l+qtYm@U=mf1+A|2>_-W{OJuezEx|(;PUdoZ0#?%%gk5CVAjT&N=LUnG0LdVO z*+VJXjA_r<#<8g7c9@>9ksNEXk9K`5ZPIcbd{<>K7PwnmvA@o!^Lt%>cdN$@F+5PI z-M_%siiywI{0zA86T8Q!!-4Z@YWf97puNe11<{pJ>j|{^{D`O|h_#Jd6ihVQ0}Bvr zp#VBPL6EW8it3Oj(B<$1nkl~hO`iGfA<8;z!nNENx!I%?sGN5IWG4tJ8@1Ng=9wFW z0v@_!H>Q>Qg06LF@g!B{TNiKzy5_@2LAH1ttx!H!D_mGpgO-7TUxtlNrk>hCv!D3@bU>zy^UGM?wuYS5}iSh*V)XS;v^An zX5iy*_N|8{oqiY6#(hp#VEsluB5{$GN_Kht8-0Oxe;XMb^O=v6z(Atw#40V2iRfP? zoB_{zDB%e>6Ftp|nZdNAi>E1sB!30b9gqdN2o47$b`x6shPgU?v)h8SA()N++4!#= zOcu}VR{d=K?1ai6w|xH-_P^Ky-@f^^a{9a!+e^NWP3|9)Dkaa7W1GT)ChGebI6qs` z3@hm9#0>!5MkR%u)SQ7^mD`HO81;unCUpg3r&-Ax<2l<(7P|c4GEp z%f-+5*Xr$ePh9&HO#rTwVG#@k2C&5WN&Gh`b=^Ff(AN^~e+tZA% zD!H#+0xaXATnae_{F@(cJ)N#5p|y*VviSvtI$dzaOpV~$;0bE-3(TlCxiuZ_LFB%6 zzpGV)bQq$9mtUX{YGzR9df!HmeywNP*A-@B z6n|LZX+hCI8`{llpf~0j{v#CoVX~hd5i(ulZii2eA*( zG-0J)?{UupK^Okb)a`4+%BER|9Zi7RG%^gd&e!S_x(YNENjD?F5`NeOWa+8}jjU6XoHne{MWFCK%;v72XhC-B0b~c!<9n&%SO>l@u!$*n^tNyWw5c$9JzEC6n^N$Sx2abF&kH;5 zX6kVOs_;B^X}jQopXq;?lY1y+4LT$TH9lc_P_vd+iM6AtW`YJk(2|~H<5b@NTkh$B z*+LXN(q<}@2`K;k{i9kSjZKE?DBG^IKpK6u%5)zC$G;8Y^n_=p9rVoL+MjJsXIpDB zmJqphwcZuAM1XlJFEMvE?u9aU#0KThxPxW-=p8JzpPgl9xVKz zaK53+&|p|&*lOrD>@e&yJZgB{u*dKX!?z9l4X+sv8{Ra$WjJm4o#B1MpYd64Mo~`D z%|$bdEJcfomKVKPw5j-;#eXi&HfoHsj5gyEW6-$W_>l1t1M5ZWh;_Uz*LIstXS3Pn*(z;IZMC+Q zwzalqTdOT#>$GjPb=!8>{@wPR?RneFwy5oowuSZvdzbw=`+56i`$zVvrL#+IrH;~- zrRz$cDt)o^ozf3W6?1atF}d!TztAerh^tI%7I#de3yp^pQzv&NJU)o@SnIHkhsE za`Pf{jk(dh9{v!_o6SAu9p>HUe=~pG{G9oF=9qcF{8RHu^B>KZ%paK*mK!ZoEK@Dh zEyb1+OS$E9mPU));UQ1bnffs&t;oG3X{^81nxN=8c3 zt!itY^$u&kwb*L4&b2PFF1N0>ZnE}RcUX5@{|)i*to6Ir*Q|%EN3AEVgVsydbeqa{ zlTB}1W^1vv+d{VcZ4cWXvpr#Z%JvQ0KHCenS8RuDM{Pg1{m%BT?V{~ZwjBFS_Nn$e z?K-=~zTCdj-eKQr@3!x>KWcx{{q9g>tNV`bMcr$<*L6S9ozR`q#dR6_N%|Z0x9YWeoxWVZ zNWTd(E{vrJ%`X}@U^}o>5yP8?xTc#KEo%(M5cKuGo;`90=g~tj{7Op7n zEZ$xGSn>14`|0hcLw+y!3|hyEj~AaUjw9kmid9B6=C9URWHcKqja7)awZ;ykVC=?R le$4o|@fqVjW7K%ac-(l>ID(i{n#xU;Ch+^`@Bcds{1*^g(L4YE literal 0 HcmV?d00001 diff --git a/RBXLegacyDiscordBot/lib/aiohttp/_websocket.pyx b/RBXLegacyDiscordBot/lib/aiohttp/_websocket.pyx new file mode 100644 index 0000000..4fc3397 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/_websocket.pyx @@ -0,0 +1,48 @@ +from cpython cimport PyBytes_AsString + +#from cpython cimport PyByteArray_AsString # cython still not exports that +cdef extern from "Python.h": + char* PyByteArray_AsString(bytearray ba) except NULL + +from libc.stdint cimport uint32_t, uint64_t, uintmax_t + +def _websocket_mask_cython(bytes mask, bytearray data): + """Note, this function mutates it's `data` argument + """ + cdef: + Py_ssize_t data_len, i + # bit operations on signed integers are implementation-specific + unsigned char * in_buf + const unsigned char * mask_buf + uint32_t uint32_msk + uint64_t uint64_msk + + assert len(mask) == 4 + + data_len = len(data) + in_buf = PyByteArray_AsString(data) + mask_buf = PyBytes_AsString(mask) + uint32_msk = (mask_buf)[0] + + # TODO: align in_data ptr to achieve even faster speeds + # does it need in python ?! malloc() always aligns to sizeof(long) bytes + + if sizeof(size_t) >= 8: + uint64_msk = uint32_msk + uint64_msk = (uint64_msk << 32) | uint32_msk + + while data_len >= 8: + (in_buf)[0] ^= uint64_msk + in_buf += 8 + data_len -= 8 + + + while data_len >= 4: + (in_buf)[0] ^= uint32_msk + in_buf += 4 + data_len -= 4 + + for i in range(0, data_len): + in_buf[i] ^= mask_buf[i] + + return data diff --git a/RBXLegacyDiscordBot/lib/aiohttp/_ws_impl.py b/RBXLegacyDiscordBot/lib/aiohttp/_ws_impl.py new file mode 100644 index 0000000..27ed569 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/_ws_impl.py @@ -0,0 +1,438 @@ +"""WebSocket protocol versions 13 and 8.""" + +import base64 +import binascii +import collections +import hashlib +import json +import os +import random +import sys +from enum import IntEnum +from struct import Struct + +from aiohttp import errors, hdrs +from aiohttp.log import ws_logger + +__all__ = ('WebSocketParser', 'WebSocketWriter', 'do_handshake', + 'WSMessage', 'WebSocketError', 'WSMsgType', 'WSCloseCode') + + +class WSCloseCode(IntEnum): + OK = 1000 + GOING_AWAY = 1001 + PROTOCOL_ERROR = 1002 + UNSUPPORTED_DATA = 1003 + INVALID_TEXT = 1007 + POLICY_VIOLATION = 1008 + MESSAGE_TOO_BIG = 1009 + MANDATORY_EXTENSION = 1010 + INTERNAL_ERROR = 1011 + SERVICE_RESTART = 1012 + TRY_AGAIN_LATER = 1013 + + +ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode} + + +class WSMsgType(IntEnum): + CONTINUATION = 0x0 + TEXT = 0x1 + BINARY = 0x2 + PING = 0x9 + PONG = 0xa + CLOSE = 0x8 + CLOSED = 0x101 + ERROR = 0x102 + + text = TEXT + binary = BINARY + ping = PING + pong = PONG + close = CLOSE + closed = CLOSED + error = ERROR + + +WS_KEY = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11' + + +UNPACK_LEN2 = Struct('!H').unpack_from +UNPACK_LEN3 = Struct('!Q').unpack_from +UNPACK_CLOSE_CODE = Struct('!H').unpack +PACK_LEN1 = Struct('!BB').pack +PACK_LEN2 = Struct('!BBH').pack +PACK_LEN3 = Struct('!BBQ').pack +PACK_CLOSE_CODE = Struct('!H').pack +MSG_SIZE = 2 ** 14 + + +_WSMessageBase = collections.namedtuple('_WSMessageBase', + ['type', 'data', 'extra']) + + +class WSMessage(_WSMessageBase): + def json(self, *, loads=json.loads): + """Return parsed JSON data. + + .. versionadded:: 0.22 + """ + return loads(self.data) + + @property + def tp(self): + return self.type + + +CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) + + +class WebSocketError(Exception): + """WebSocket protocol parser error.""" + + def __init__(self, code, message): + self.code = code + super().__init__(message) + + +def WebSocketParser(out, buf): + while True: + fin, opcode, payload = yield from parse_frame(buf) + + if opcode == WSMsgType.CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Invalid close code: {}'.format(close_code)) + try: + close_message = payload[2:].decode('utf-8') + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, + 'Invalid UTF-8 text message') from exc + msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) + elif payload: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Invalid close frame: {} {} {!r}'.format( + fin, opcode, payload)) + else: + msg = WSMessage(WSMsgType.CLOSE, 0, '') + + out.feed_data(msg, 0) + + elif opcode == WSMsgType.PING: + out.feed_data(WSMessage(WSMsgType.PING, payload, ''), len(payload)) + + elif opcode == WSMsgType.PONG: + out.feed_data(WSMessage(WSMsgType.PONG, payload, ''), len(payload)) + + elif opcode not in (WSMsgType.TEXT, WSMsgType.BINARY): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Unexpected opcode={!r}".format(opcode)) + else: + # load text/binary + data = [payload] + + while not fin: + fin, _opcode, payload = yield from parse_frame(buf, True) + + # We can receive ping/close in the middle of + # text message, Case 5.* + if _opcode == WSMsgType.PING: + out.feed_data( + WSMessage(WSMsgType.PING, payload, ''), len(payload)) + fin, _opcode, payload = yield from parse_frame(buf, True) + elif _opcode == WSMsgType.CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if (close_code not in ALLOWED_CLOSE_CODES and + close_code < 3000): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Invalid close code: {}'.format(close_code)) + try: + close_message = payload[2:].decode('utf-8') + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, + 'Invalid UTF-8 text message') from exc + msg = WSMessage(WSMsgType.CLOSE, close_code, + close_message) + elif payload: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Invalid close frame: {} {} {!r}'.format( + fin, opcode, payload)) + else: + msg = WSMessage(WSMsgType.CLOSE, 0, '') + + out.feed_data(msg, 0) + fin, _opcode, payload = yield from parse_frame(buf, True) + + if _opcode != WSMsgType.CONTINUATION: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'The opcode in non-fin frame is expected ' + 'to be zero, got {!r}'.format(_opcode)) + else: + data.append(payload) + + if opcode == WSMsgType.TEXT: + try: + text = b''.join(data).decode('utf-8') + out.feed_data(WSMessage(WSMsgType.TEXT, text, ''), + len(text)) + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, + 'Invalid UTF-8 text message') from exc + else: + data = b''.join(data) + out.feed_data( + WSMessage(WSMsgType.BINARY, data, ''), len(data)) + + +native_byteorder = sys.byteorder + + +def _websocket_mask_python(mask, data): + """Websocket masking function. + + `mask` is a `bytes` object of length 4; `data` is a `bytes` object + of any length. Returns a `bytes` object of the same length as + `data` with the mask applied as specified in section 5.3 of RFC + 6455. + + This pure-python implementation may be replaced by an optimized + version when available. + + """ + assert isinstance(data, bytearray), data + assert len(mask) == 4, mask + datalen = len(data) + if datalen == 0: + # everything work without this, but may be changed later in Python. + return bytearray() + data = int.from_bytes(data, native_byteorder) + mask = int.from_bytes(mask * (datalen // 4) + mask[: datalen % 4], + native_byteorder) + return (data ^ mask).to_bytes(datalen, native_byteorder) + + +if bool(os.environ.get('AIOHTTP_NO_EXTENSIONS')): + _websocket_mask = _websocket_mask_python +else: + try: + from ._websocket import _websocket_mask_cython + _websocket_mask = _websocket_mask_cython + except ImportError: # pragma: no cover + _websocket_mask = _websocket_mask_python + + +def parse_frame(buf, continuation=False): + """Return the next frame from the socket.""" + # read header + data = yield from buf.read(2) + first_byte, second_byte = data + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xf + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; 1 bit, MUST be 0 unless negotiated otherwise + if rsv1 or rsv2 or rsv3: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Received frame with non-zero reserved bits') + + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Received fragmented control frame') + + if fin == 0 and opcode == WSMsgType.CONTINUATION and not continuation: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + 'Received new fragment frame with non-zero ' + 'opcode {!r}'.format(opcode)) + + has_mask = (second_byte >> 7) & 1 + length = (second_byte) & 0x7f + + # Control frames MUST have a payload length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Control frame payload cannot be larger than 125 bytes") + + # read payload + if length == 126: + data = yield from buf.read(2) + length = UNPACK_LEN2(data)[0] + elif length > 126: + data = yield from buf.read(8) + length = UNPACK_LEN3(data)[0] + + if has_mask: + mask = yield from buf.read(4) + + if length: + payload = yield from buf.read(length) + else: + payload = bytearray() + + if has_mask: + payload = _websocket_mask(bytes(mask), payload) + + return fin, opcode, payload + + +class WebSocketWriter: + + def __init__(self, writer, *, use_mask=False, random=random.Random()): + self.writer = writer + self.use_mask = use_mask + self.randrange = random.randrange + + def _send_frame(self, message, opcode): + """Send a frame over the websocket with message as its payload.""" + msg_length = len(message) + + use_mask = self.use_mask + if use_mask: + mask_bit = 0x80 + else: + mask_bit = 0 + + if msg_length < 126: + header = PACK_LEN1(0x80 | opcode, msg_length | mask_bit) + elif msg_length < (1 << 16): + header = PACK_LEN2(0x80 | opcode, 126 | mask_bit, msg_length) + else: + header = PACK_LEN3(0x80 | opcode, 127 | mask_bit, msg_length) + if use_mask: + mask = self.randrange(0, 0xffffffff) + mask = mask.to_bytes(4, 'big') + message = _websocket_mask(mask, bytearray(message)) + self.writer.write(header + mask + message) + else: + if len(message) > MSG_SIZE: + self.writer.write(header) + self.writer.write(message) + else: + self.writer.write(header + message) + + def pong(self, message=b''): + """Send pong message.""" + if isinstance(message, str): + message = message.encode('utf-8') + self._send_frame(message, WSMsgType.PONG) + + def ping(self, message=b''): + """Send ping message.""" + if isinstance(message, str): + message = message.encode('utf-8') + self._send_frame(message, WSMsgType.PING) + + def send(self, message, binary=False): + """Send a frame over the websocket with message as its payload.""" + if isinstance(message, str): + message = message.encode('utf-8') + if binary: + self._send_frame(message, WSMsgType.BINARY) + else: + self._send_frame(message, WSMsgType.TEXT) + + def close(self, code=1000, message=b''): + """Close the websocket, sending the specified code and message.""" + if isinstance(message, str): + message = message.encode('utf-8') + self._send_frame( + PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE) + + +def do_handshake(method, headers, transport, protocols=()): + """Prepare WebSocket handshake. + + It return HTTP response code, response headers, websocket parser, + websocket writer. It does not perform any IO. + + `protocols` is a sequence of known protocols. On successful handshake, + the returned response headers contain the first protocol in this list + which the server also knows. + + """ + # WebSocket accepts only GET + if method.upper() != hdrs.METH_GET: + raise errors.HttpProcessingError( + code=405, headers=((hdrs.ALLOW, hdrs.METH_GET),)) + + if 'websocket' != headers.get(hdrs.UPGRADE, '').lower().strip(): + raise errors.HttpBadRequest( + message='No WebSocket UPGRADE hdr: {}\n Can ' + '"Upgrade" only to "WebSocket".'.format(headers.get(hdrs.UPGRADE))) + + if 'upgrade' not in headers.get(hdrs.CONNECTION, '').lower(): + raise errors.HttpBadRequest( + message='No CONNECTION upgrade hdr: {}'.format( + headers.get(hdrs.CONNECTION))) + + # find common sub-protocol between client and server + protocol = None + if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: + req_protocols = [str(proto.strip()) for proto in + headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(',')] + + for proto in req_protocols: + if proto in protocols: + protocol = proto + break + else: + # No overlap found: Return no protocol as per spec + ws_logger.warning( + 'Client protocols %r don’t overlap server-known ones %r', + req_protocols, protocols) + + # check supported version + version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, '') + if version not in ('13', '8', '7'): + raise errors.HttpBadRequest( + message='Unsupported version: {}'.format(version), + headers=((hdrs.SEC_WEBSOCKET_VERSION, '13'),)) + + # check client handshake for validity + key = headers.get(hdrs.SEC_WEBSOCKET_KEY) + try: + if not key or len(base64.b64decode(key)) != 16: + raise errors.HttpBadRequest( + message='Handshake error: {!r}'.format(key)) + except binascii.Error: + raise errors.HttpBadRequest( + message='Handshake error: {!r}'.format(key)) from None + + response_headers = [ + (hdrs.UPGRADE, 'websocket'), + (hdrs.CONNECTION, 'upgrade'), + (hdrs.TRANSFER_ENCODING, 'chunked'), + (hdrs.SEC_WEBSOCKET_ACCEPT, base64.b64encode( + hashlib.sha1(key.encode() + WS_KEY).digest()).decode())] + + if protocol: + response_headers.append((hdrs.SEC_WEBSOCKET_PROTOCOL, protocol)) + + # response code, headers, parser, writer, protocol + return (101, + response_headers, + WebSocketParser, + WebSocketWriter(transport), + protocol) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/abc.py b/RBXLegacyDiscordBot/lib/aiohttp/abc.py new file mode 100644 index 0000000..bc20b27 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/abc.py @@ -0,0 +1,88 @@ +import asyncio +import sys +from abc import ABC, abstractmethod +from collections.abc import Iterable, Sized + +PY_35 = sys.version_info >= (3, 5) + + +class AbstractRouter(ABC): + + @asyncio.coroutine # pragma: no branch + @abstractmethod + def resolve(self, request): + """Return MATCH_INFO for given request""" + + +class AbstractMatchInfo(ABC): + + @asyncio.coroutine # pragma: no branch + @abstractmethod + def handler(self, request): + """Execute matched request handler""" + + @asyncio.coroutine # pragma: no branch + @abstractmethod + def expect_handler(self, request): + """Expect handler for 100-continue processing""" + + @property # pragma: no branch + @abstractmethod + def http_exception(self): + """HTTPException instance raised on router's resolving, or None""" + + @abstractmethod # pragma: no branch + def get_info(self): + """Return a dict with additional info useful for introspection""" + + +class AbstractView(ABC): + + def __init__(self, request): + self._request = request + + @property + def request(self): + return self._request + + @asyncio.coroutine # pragma: no branch + @abstractmethod + def __iter__(self): + while False: # pragma: no cover + yield None + + if PY_35: # pragma: no branch + @abstractmethod + def __await__(self): + return # pragma: no cover + + +class AbstractResolver(ABC): + + @asyncio.coroutine # pragma: no branch + @abstractmethod + def resolve(self, hostname): + """Return IP address for given hostname""" + + @asyncio.coroutine # pragma: no branch + @abstractmethod + def close(self): + """Release resolver""" + + +class AbstractCookieJar(Sized, Iterable): + + def __init__(self, *, loop=None): + self._loop = loop or asyncio.get_event_loop() + + @abstractmethod + def clear(self): + """Clear all cookies.""" + + @abstractmethod + def update_cookies(self, cookies, response_url=None): + """Update cookies.""" + + @abstractmethod + def filter_cookies(self, request_url): + """Return the jar's cookies filtered by their attributes.""" diff --git a/RBXLegacyDiscordBot/lib/aiohttp/client.py b/RBXLegacyDiscordBot/lib/aiohttp/client.py new file mode 100644 index 0000000..4c616ef --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/client.py @@ -0,0 +1,786 @@ +"""HTTP Client for asyncio.""" + +import asyncio +import base64 +import hashlib +import os +import sys +import traceback +import urllib.parse +import warnings + +from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr + +import aiohttp + +from . import hdrs, helpers +from ._ws_impl import WS_KEY, WebSocketParser, WebSocketWriter +from .client_reqrep import ClientRequest, ClientResponse +from .client_ws import ClientWebSocketResponse +from .cookiejar import CookieJar +from .errors import WSServerHandshakeError +from .helpers import Timeout + +__all__ = ('ClientSession', 'request', 'get', 'options', 'head', + 'delete', 'post', 'put', 'patch', 'ws_connect') + +PY_35 = sys.version_info >= (3, 5) + + +class ClientSession: + """First-class interface for making HTTP requests.""" + + _source_traceback = None + _connector = None + + def __init__(self, *, connector=None, loop=None, cookies=None, + headers=None, skip_auto_headers=None, + auth=None, request_class=ClientRequest, + response_class=ClientResponse, + ws_response_class=ClientWebSocketResponse, + version=aiohttp.HttpVersion11, + cookie_jar=None): + + if connector is None: + connector = aiohttp.TCPConnector(loop=loop) + loop = connector._loop # never None + else: + if loop is None: + loop = connector._loop # never None + elif connector._loop is not loop: + raise ValueError("loop argument must agree with connector") + + self._loop = loop + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + if cookie_jar is None: + cookie_jar = CookieJar(loop=loop) + self._cookie_jar = cookie_jar + + if cookies is not None: + self._cookie_jar.update_cookies(cookies) + self._connector = connector + self._default_auth = auth + self._version = version + + # Convert to list of tuples + if headers: + headers = CIMultiDict(headers) + else: + headers = CIMultiDict() + self._default_headers = headers + if skip_auto_headers is not None: + self._skip_auto_headers = frozenset([istr(i) + for i in skip_auto_headers]) + else: + self._skip_auto_headers = frozenset() + + self._request_class = request_class + self._response_class = response_class + self._ws_response_class = ws_response_class + + def __del__(self, _warnings=warnings): + if not self.closed: + self.close() + + _warnings.warn("Unclosed client session {!r}".format(self), + ResourceWarning) + context = {'client_session': self, + 'message': 'Unclosed client session'} + if self._source_traceback is not None: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + + def request(self, method, url, *, + params=None, + data=None, + headers=None, + skip_auto_headers=None, + auth=None, + allow_redirects=True, + max_redirects=10, + encoding='utf-8', + version=None, + compress=None, + chunked=None, + expect100=False, + read_until_eof=True, + proxy=None, + proxy_auth=None, + timeout=5*60): + """Perform HTTP request.""" + + return _RequestContextManager( + self._request( + method, + url, + params=params, + data=data, + headers=headers, + skip_auto_headers=skip_auto_headers, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + encoding=encoding, + version=version, + compress=compress, + chunked=chunked, + expect100=expect100, + read_until_eof=read_until_eof, + proxy=proxy, + proxy_auth=proxy_auth, + timeout=timeout)) + + @asyncio.coroutine + def _request(self, method, url, *, + params=None, + data=None, + headers=None, + skip_auto_headers=None, + auth=None, + allow_redirects=True, + max_redirects=10, + encoding='utf-8', + version=None, + compress=None, + chunked=None, + expect100=False, + read_until_eof=True, + proxy=None, + proxy_auth=None, + timeout=5*60): + + if version is not None: + warnings.warn("HTTP version should be specified " + "by ClientSession constructor", DeprecationWarning) + else: + version = self._version + + if self.closed: + raise RuntimeError('Session is closed') + + redirects = 0 + history = [] + + # Merge with default headers and transform to CIMultiDict + headers = self._prepare_headers(headers) + if auth is None: + auth = self._default_auth + # It would be confusing if we support explicit Authorization header + # with `auth` argument + if (headers is not None and + auth is not None and + hdrs.AUTHORIZATION in headers): + raise ValueError("Can't combine `Authorization` header with " + "`auth` argument") + + skip_headers = set(self._skip_auto_headers) + if skip_auto_headers is not None: + for i in skip_auto_headers: + skip_headers.add(istr(i)) + + while True: + url, _ = urllib.parse.urldefrag(url) + + cookies = self._cookie_jar.filter_cookies(url) + + req = self._request_class( + method, url, params=params, headers=headers, + skip_auto_headers=skip_headers, data=data, + cookies=cookies, encoding=encoding, + auth=auth, version=version, compress=compress, chunked=chunked, + expect100=expect100, + loop=self._loop, response_class=self._response_class, + proxy=proxy, proxy_auth=proxy_auth, timeout=timeout) + + with Timeout(timeout, loop=self._loop): + conn = yield from self._connector.connect(req) + try: + resp = req.send(conn.writer, conn.reader) + try: + yield from resp.start(conn, read_until_eof) + except: + resp.close() + conn.close() + raise + except (aiohttp.HttpProcessingError, + aiohttp.ServerDisconnectedError) as exc: + raise aiohttp.ClientResponseError() from exc + except OSError as exc: + raise aiohttp.ClientOSError(*exc.args) from exc + + self._cookie_jar.update_cookies(resp.cookies, resp.url) + + # redirects + if resp.status in (301, 302, 303, 307) and allow_redirects: + redirects += 1 + history.append(resp) + if max_redirects and redirects >= max_redirects: + resp.close() + break + else: + # TODO: close the connection if BODY is large enough + # Redirect with big BODY is forbidden by HTTP protocol + # but malformed server may send illegal response. + # Small BODIES with text like "Not Found" are still + # perfectly fine and should be accepted. + yield from resp.release() + + # For 301 and 302, mimic IE behaviour, now changed in RFC. + # Details: https://github.com/kennethreitz/requests/pull/269 + if (resp.status == 303 and resp.method != hdrs.METH_HEAD) \ + or (resp.status in (301, 302) and + resp.method == hdrs.METH_POST): + method = hdrs.METH_GET + data = None + if headers.get(hdrs.CONTENT_LENGTH): + headers.pop(hdrs.CONTENT_LENGTH) + + r_url = (resp.headers.get(hdrs.LOCATION) or + resp.headers.get(hdrs.URI)) + + scheme = urllib.parse.urlsplit(r_url)[0] + if scheme not in ('http', 'https', ''): + resp.close() + raise ValueError('Can redirect only to http or https') + elif not scheme: + r_url = urllib.parse.urljoin(url, r_url) + + url = r_url + params = None + yield from resp.release() + continue + + break + + resp._history = tuple(history) + return resp + + def ws_connect(self, url, *, + protocols=(), + timeout=10.0, + autoclose=True, + autoping=True, + auth=None, + origin=None, + headers=None, + proxy=None, + proxy_auth=None): + """Initiate websocket connection.""" + return _WSRequestContextManager( + self._ws_connect(url, + protocols=protocols, + timeout=timeout, + autoclose=autoclose, + autoping=autoping, + auth=auth, + origin=origin, + headers=headers, + proxy=proxy, + proxy_auth=proxy_auth)) + + @asyncio.coroutine + def _ws_connect(self, url, *, + protocols=(), + timeout=10.0, + autoclose=True, + autoping=True, + auth=None, + origin=None, + headers=None, + proxy=None, + proxy_auth=None): + + sec_key = base64.b64encode(os.urandom(16)) + + if headers is None: + headers = CIMultiDict() + + default_headers = { + hdrs.UPGRADE: hdrs.WEBSOCKET, + hdrs.CONNECTION: hdrs.UPGRADE, + hdrs.SEC_WEBSOCKET_VERSION: '13', + hdrs.SEC_WEBSOCKET_KEY: sec_key.decode(), + } + + for key, value in default_headers.items(): + if key not in headers: + headers[key] = value + + if protocols: + headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ','.join(protocols) + if origin is not None: + headers[hdrs.ORIGIN] = origin + + # send request + resp = yield from self.get(url, headers=headers, + read_until_eof=False, + auth=auth, + proxy=proxy, + proxy_auth=proxy_auth) + + try: + # check handshake + if resp.status != 101: + raise WSServerHandshakeError( + message='Invalid response status', + code=resp.status, + headers=resp.headers) + + if resp.headers.get(hdrs.UPGRADE, '').lower() != 'websocket': + raise WSServerHandshakeError( + message='Invalid upgrade header', + code=resp.status, + headers=resp.headers) + + if resp.headers.get(hdrs.CONNECTION, '').lower() != 'upgrade': + raise WSServerHandshakeError( + message='Invalid connection header', + code=resp.status, + headers=resp.headers) + + # key calculation + key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, '') + match = base64.b64encode( + hashlib.sha1(sec_key + WS_KEY).digest()).decode() + if key != match: + raise WSServerHandshakeError( + message='Invalid challenge response', + code=resp.status, + headers=resp.headers) + + # websocket protocol + protocol = None + if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: + resp_protocols = [ + proto.strip() for proto in + resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(',')] + + for proto in resp_protocols: + if proto in protocols: + protocol = proto + break + + reader = resp.connection.reader.set_parser(WebSocketParser) + resp.connection.writer.set_tcp_nodelay(True) + writer = WebSocketWriter(resp.connection.writer, use_mask=True) + except Exception: + resp.close() + raise + else: + return self._ws_response_class(reader, + writer, + protocol, + resp, + timeout, + autoclose, + autoping, + self._loop) + + def _prepare_headers(self, headers): + """ Add default headers and transform it to CIMultiDict + """ + # Convert headers to MultiDict + result = CIMultiDict(self._default_headers) + if headers: + if not isinstance(headers, (MultiDictProxy, MultiDict)): + headers = CIMultiDict(headers) + added_names = set() + for key, value in headers.items(): + if key in added_names: + result.add(key, value) + else: + result[key] = value + added_names.add(key) + return result + + def get(self, url, *, allow_redirects=True, **kwargs): + """Perform HTTP GET request.""" + return _RequestContextManager( + self._request(hdrs.METH_GET, url, + allow_redirects=allow_redirects, + **kwargs)) + + def options(self, url, *, allow_redirects=True, **kwargs): + """Perform HTTP OPTIONS request.""" + return _RequestContextManager( + self._request(hdrs.METH_OPTIONS, url, + allow_redirects=allow_redirects, + **kwargs)) + + def head(self, url, *, allow_redirects=False, **kwargs): + """Perform HTTP HEAD request.""" + return _RequestContextManager( + self._request(hdrs.METH_HEAD, url, + allow_redirects=allow_redirects, + **kwargs)) + + def post(self, url, *, data=None, **kwargs): + """Perform HTTP POST request.""" + return _RequestContextManager( + self._request(hdrs.METH_POST, url, + data=data, + **kwargs)) + + def put(self, url, *, data=None, **kwargs): + """Perform HTTP PUT request.""" + return _RequestContextManager( + self._request(hdrs.METH_PUT, url, + data=data, + **kwargs)) + + def patch(self, url, *, data=None, **kwargs): + """Perform HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, url, + data=data, + **kwargs)) + + def delete(self, url, **kwargs): + """Perform HTTP DELETE request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, url, + **kwargs)) + + def close(self): + """Close underlying connector. + + Release all acquired resources. + """ + if not self.closed: + self._connector.close() + self._connector = None + ret = helpers.create_future(self._loop) + ret.set_result(None) + return ret + + @property + def closed(self): + """Is client session closed. + + A readonly property. + """ + return self._connector is None or self._connector.closed + + @property + def connector(self): + """Connector instance used for the session.""" + return self._connector + + @property + def cookie_jar(self): + """The session cookies.""" + return self._cookie_jar + + @property + def version(self): + """The session HTTP protocol version.""" + return self._version + + @property + def loop(self): + """Session's loop.""" + return self._loop + + def detach(self): + """Detach connector from session without closing the former. + + Session is switched to closed state anyway. + """ + self._connector = None + + def __enter__(self): + warnings.warn("Use async with instead", DeprecationWarning) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + if PY_35: + @asyncio.coroutine + def __aenter__(self): + return self + + @asyncio.coroutine + def __aexit__(self, exc_type, exc_val, exc_tb): + yield from self.close() + +if PY_35: + from collections.abc import Coroutine + base = Coroutine +else: + base = object + + +class _BaseRequestContextManager(base): + + __slots__ = ('_coro', '_resp') + + def __init__(self, coro): + self._coro = coro + self._resp = None + + def send(self, value): + return self._coro.send(value) + + def throw(self, typ, val=None, tb=None): + if val is None: + return self._coro.throw(typ) + elif tb is None: + return self._coro.throw(typ, val) + else: + return self._coro.throw(typ, val, tb) + + def close(self): + return self._coro.close() + + @property + def gi_frame(self): + return self._coro.gi_frame + + @property + def gi_running(self): + return self._coro.gi_running + + @property + def gi_code(self): + return self._coro.gi_code + + def __next__(self): + return self.send(None) + + @asyncio.coroutine + def __iter__(self): + resp = yield from self._coro + return resp + + if PY_35: + def __await__(self): + resp = yield from self._coro + return resp + + @asyncio.coroutine + def __aenter__(self): + self._resp = yield from self._coro + return self._resp + + +if not PY_35: + try: + from asyncio import coroutines + coroutines._COROUTINE_TYPES += (_BaseRequestContextManager,) + except: # pragma: no cover + pass # Python 3.4.2 and 3.4.3 has no coroutines._COROUTINE_TYPES + + +class _RequestContextManager(_BaseRequestContextManager): + if PY_35: + @asyncio.coroutine + def __aexit__(self, exc_type, exc, tb): + if exc_type is not None: + self._resp.close() + else: + yield from self._resp.release() + + +class _WSRequestContextManager(_BaseRequestContextManager): + if PY_35: + @asyncio.coroutine + def __aexit__(self, exc_type, exc, tb): + yield from self._resp.close() + + +class _DetachedRequestContextManager(_RequestContextManager): + + __slots__ = _RequestContextManager.__slots__ + ('_session', ) + + def __init__(self, coro, session): + super().__init__(coro) + self._session = session + + @asyncio.coroutine + def __iter__(self): + try: + return (yield from self._coro) + except: + yield from self._session.close() + raise + + if PY_35: + def __await__(self): + try: + return (yield from self._coro) + except: + yield from self._session.close() + raise + + def __del__(self): + self._session.detach() + + +class _DetachedWSRequestContextManager(_WSRequestContextManager): + + __slots__ = _WSRequestContextManager.__slots__ + ('_session', ) + + def __init__(self, coro, session): + super().__init__(coro) + self._session = session + + def __del__(self): + self._session.detach() + + +def request(method, url, *, + params=None, + data=None, + headers=None, + skip_auto_headers=None, + cookies=None, + auth=None, + allow_redirects=True, + max_redirects=10, + encoding='utf-8', + version=None, + compress=None, + chunked=None, + expect100=False, + connector=None, + loop=None, + read_until_eof=True, + request_class=None, + response_class=None, + proxy=None, + proxy_auth=None): + """Constructs and sends a request. Returns response object. + + method - HTTP method + url - request url + params - (optional) Dictionary or bytes to be sent in the query + string of the new request + data - (optional) Dictionary, bytes, or file-like object to + send in the body of the request + headers - (optional) Dictionary of HTTP Headers to send with + the request + cookies - (optional) Dict object to send with the request + auth - (optional) BasicAuth named tuple represent HTTP Basic Auth + auth - aiohttp.helpers.BasicAuth + allow_redirects - (optional) If set to False, do not follow + redirects + version - Request HTTP version. + compress - Set to True if request has to be compressed + with deflate encoding. + chunked - Set to chunk size for chunked transfer encoding. + expect100 - Expect 100-continue response from server. + connector - BaseConnector sub-class instance to support + connection pooling. + read_until_eof - Read response until eof if response + does not have Content-Length header. + request_class - (optional) Custom Request class implementation. + response_class - (optional) Custom Response class implementation. + loop - Optional event loop. + + Usage:: + + >>> import aiohttp + >>> resp = yield from aiohttp.request('GET', 'http://python.org/') + >>> resp + + >>> data = yield from resp.read() + + """ + warnings.warn("Use ClientSession().request() instead", DeprecationWarning) + if connector is None: + connector = aiohttp.TCPConnector(loop=loop, force_close=True) + + kwargs = {} + + if request_class is not None: + kwargs['request_class'] = request_class + + if response_class is not None: + kwargs['response_class'] = response_class + + session = ClientSession(loop=loop, + cookies=cookies, + connector=connector, + **kwargs) + return _DetachedRequestContextManager( + session._request(method, url, + params=params, + data=data, + headers=headers, + skip_auto_headers=skip_auto_headers, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + encoding=encoding, + version=version, + compress=compress, + chunked=chunked, + expect100=expect100, + read_until_eof=read_until_eof, + proxy=proxy, + proxy_auth=proxy_auth,), + session=session) + + +def get(url, **kwargs): + warnings.warn("Use ClientSession().get() instead", DeprecationWarning) + return request(hdrs.METH_GET, url, **kwargs) + + +def options(url, **kwargs): + warnings.warn("Use ClientSession().options() instead", DeprecationWarning) + return request(hdrs.METH_OPTIONS, url, **kwargs) + + +def head(url, **kwargs): + warnings.warn("Use ClientSession().head() instead", DeprecationWarning) + return request(hdrs.METH_HEAD, url, **kwargs) + + +def post(url, **kwargs): + warnings.warn("Use ClientSession().post() instead", DeprecationWarning) + return request(hdrs.METH_POST, url, **kwargs) + + +def put(url, **kwargs): + warnings.warn("Use ClientSession().put() instead", DeprecationWarning) + return request(hdrs.METH_PUT, url, **kwargs) + + +def patch(url, **kwargs): + warnings.warn("Use ClientSession().patch() instead", DeprecationWarning) + return request(hdrs.METH_PATCH, url, **kwargs) + + +def delete(url, **kwargs): + warnings.warn("Use ClientSession().delete() instead", DeprecationWarning) + return request(hdrs.METH_DELETE, url, **kwargs) + + +def ws_connect(url, *, protocols=(), timeout=10.0, connector=None, auth=None, + ws_response_class=ClientWebSocketResponse, autoclose=True, + autoping=True, loop=None, origin=None, headers=None): + + warnings.warn("Use ClientSession().ws_connect() instead", + DeprecationWarning) + if loop is None: + loop = asyncio.get_event_loop() + + if connector is None: + connector = aiohttp.TCPConnector(loop=loop, force_close=True) + + session = aiohttp.ClientSession(loop=loop, connector=connector, auth=auth, + ws_response_class=ws_response_class, + headers=headers) + + return _DetachedWSRequestContextManager( + session._ws_connect(url, + protocols=protocols, + timeout=timeout, + autoclose=autoclose, + autoping=autoping, + origin=origin), + session=session) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/client_reqrep.py b/RBXLegacyDiscordBot/lib/aiohttp/client_reqrep.py new file mode 100644 index 0000000..8631007 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/client_reqrep.py @@ -0,0 +1,801 @@ +import asyncio +import collections +import http.cookies +import io +import json +import mimetypes +import os +import sys +import traceback +import urllib.parse +import warnings + +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy + +import aiohttp + +from . import hdrs, helpers, streams +from .helpers import Timeout +from .log import client_logger +from .multipart import MultipartWriter +from .protocol import HttpMessage +from .streams import EOF_MARKER, FlowControlStreamReader + +try: + import cchardet as chardet +except ImportError: + import chardet + + +__all__ = ('ClientRequest', 'ClientResponse') + +PY_35 = sys.version_info >= (3, 5) + +HTTP_PORT = 80 +HTTPS_PORT = 443 + + +class ClientRequest: + + GET_METHODS = {hdrs.METH_GET, hdrs.METH_HEAD, hdrs.METH_OPTIONS} + POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} + ALL_METHODS = GET_METHODS.union(POST_METHODS).union( + {hdrs.METH_DELETE, hdrs.METH_TRACE}) + + DEFAULT_HEADERS = { + hdrs.ACCEPT: '*/*', + hdrs.ACCEPT_ENCODING: 'gzip, deflate', + } + + SERVER_SOFTWARE = HttpMessage.SERVER_SOFTWARE + + body = b'' + auth = None + response = None + response_class = None + + _writer = None # async task for streaming data + _continue = None # waiter future for '100 Continue' response + + # N.B. + # Adding __del__ method with self._writer closing doesn't make sense + # because _writer is instance method, thus it keeps a reference to self. + # Until writer has finished finalizer will not be called. + + def __init__(self, method, url, *, + params=None, headers=None, skip_auto_headers=frozenset(), + data=None, cookies=None, + auth=None, encoding='utf-8', + version=aiohttp.HttpVersion11, compress=None, + chunked=None, expect100=False, + loop=None, response_class=None, + proxy=None, proxy_auth=None, + timeout=5*60): + + if loop is None: + loop = asyncio.get_event_loop() + + self.url = url + self.method = method.upper() + self.encoding = encoding + self.chunked = chunked + self.compress = compress + self.loop = loop + self.response_class = response_class or ClientResponse + self._timeout = timeout + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self.update_version(version) + self.update_host(url) + self.update_path(params) + self.update_headers(headers) + self.update_auto_headers(skip_auto_headers) + self.update_cookies(cookies) + self.update_content_encoding(data) + self.update_auth(auth) + self.update_proxy(proxy, proxy_auth) + + self.update_body_from_data(data, skip_auto_headers) + self.update_transfer_encoding() + self.update_expect_continue(expect100) + + def update_host(self, url): + """Update destination host, port and connection type (ssl).""" + url_parsed = urllib.parse.urlsplit(url) + + # check for network location part + netloc = url_parsed.netloc + if not netloc: + raise ValueError('Host could not be detected.') + + # get host/port + host = url_parsed.hostname + if not host: + raise ValueError('Host could not be detected.') + + try: + port = url_parsed.port + except ValueError: + raise ValueError( + 'Port number could not be converted.') from None + + # check domain idna encoding + try: + host = host.encode('idna').decode('utf-8') + netloc = self.make_netloc(host, url_parsed.port) + except UnicodeError: + raise ValueError('URL has an invalid label.') + + # basic auth info + username, password = url_parsed.username, url_parsed.password + if username: + self.auth = helpers.BasicAuth(username, password or '') + + # Record entire netloc for usage in host header + self.netloc = netloc + + scheme = url_parsed.scheme + self.ssl = scheme in ('https', 'wss') + + # set port number if it isn't already set + if not port: + if self.ssl: + port = HTTPS_PORT + else: + port = HTTP_PORT + + self.host, self.port, self.scheme = host, port, scheme + + def make_netloc(self, host, port): + ret = host + if port: + ret = ret + ':' + str(port) + return ret + + def update_version(self, version): + """Convert request version to two elements tuple. + + parser HTTP version '1.1' => (1, 1) + """ + if isinstance(version, str): + v = [l.strip() for l in version.split('.', 1)] + try: + version = int(v[0]), int(v[1]) + except ValueError: + raise ValueError( + 'Can not parse http version number: {}' + .format(version)) from None + self.version = version + + def update_path(self, params): + """Build path.""" + # extract path + scheme, netloc, path, query, fragment = urllib.parse.urlsplit(self.url) + if not path: + path = '/' + + if isinstance(params, collections.Mapping): + params = list(params.items()) + + if params: + if not isinstance(params, str): + params = urllib.parse.urlencode(params) + if query: + query = '%s&%s' % (query, params) + else: + query = params + + self.path = urllib.parse.urlunsplit(('', '', helpers.requote_uri(path), + query, '')) + self.url = urllib.parse.urlunsplit( + (scheme, netloc, self.path, '', fragment)) + + def update_headers(self, headers): + """Update request headers.""" + self.headers = CIMultiDict() + if headers: + if isinstance(headers, dict): + headers = headers.items() + elif isinstance(headers, (MultiDictProxy, MultiDict)): + headers = headers.items() + + for key, value in headers: + self.headers.add(key, value) + + def update_auto_headers(self, skip_auto_headers): + self.skip_auto_headers = skip_auto_headers + used_headers = set(self.headers) | skip_auto_headers + + for hdr, val in self.DEFAULT_HEADERS.items(): + if hdr not in used_headers: + self.headers.add(hdr, val) + + # add host + if hdrs.HOST not in used_headers: + self.headers[hdrs.HOST] = self.netloc + + if hdrs.USER_AGENT not in used_headers: + self.headers[hdrs.USER_AGENT] = self.SERVER_SOFTWARE + + def update_cookies(self, cookies): + """Update request cookies header.""" + if not cookies: + return + + c = http.cookies.SimpleCookie() + if hdrs.COOKIE in self.headers: + c.load(self.headers.get(hdrs.COOKIE, '')) + del self.headers[hdrs.COOKIE] + + if isinstance(cookies, dict): + cookies = cookies.items() + + for name, value in cookies: + if isinstance(value, http.cookies.Morsel): + c[value.key] = value.value + else: + c[name] = value + + self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip() + + def update_content_encoding(self, data): + """Set request content encoding.""" + if not data: + return + + enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower() + if enc: + if self.compress is not False: + self.compress = enc + # enable chunked, no need to deal with length + self.chunked = True + elif self.compress: + if not isinstance(self.compress, str): + self.compress = 'deflate' + self.headers[hdrs.CONTENT_ENCODING] = self.compress + self.chunked = True # enable chunked, no need to deal with length + + def update_auth(self, auth): + """Set basic auth.""" + if auth is None: + auth = self.auth + if auth is None: + return + + if not isinstance(auth, helpers.BasicAuth): + raise TypeError('BasicAuth() tuple is required instead') + + self.headers[hdrs.AUTHORIZATION] = auth.encode() + + def update_body_from_data(self, data, skip_auto_headers): + if not data: + return + + if isinstance(data, str): + data = data.encode(self.encoding) + + if isinstance(data, (bytes, bytearray)): + self.body = data + if (hdrs.CONTENT_TYPE not in self.headers and + hdrs.CONTENT_TYPE not in skip_auto_headers): + self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream' + if hdrs.CONTENT_LENGTH not in self.headers and not self.chunked: + self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) + + elif isinstance(data, (asyncio.StreamReader, streams.StreamReader, + streams.DataQueue)): + self.body = data + + elif asyncio.iscoroutine(data): + self.body = data + if (hdrs.CONTENT_LENGTH not in self.headers and + self.chunked is None): + self.chunked = True + + elif isinstance(data, io.IOBase): + assert not isinstance(data, io.StringIO), \ + 'attempt to send text data instead of binary' + self.body = data + if not self.chunked and isinstance(data, io.BytesIO): + # Not chunking if content-length can be determined + size = len(data.getbuffer()) + self.headers[hdrs.CONTENT_LENGTH] = str(size) + self.chunked = False + elif not self.chunked and isinstance(data, io.BufferedReader): + # Not chunking if content-length can be determined + try: + size = os.fstat(data.fileno()).st_size - data.tell() + self.headers[hdrs.CONTENT_LENGTH] = str(size) + self.chunked = False + except OSError: + # data.fileno() is not supported, e.g. + # io.BufferedReader(io.BytesIO(b'data')) + self.chunked = True + else: + self.chunked = True + + if hasattr(data, 'mode'): + if data.mode == 'r': + raise ValueError('file {!r} should be open in binary mode' + ''.format(data)) + if (hdrs.CONTENT_TYPE not in self.headers and + hdrs.CONTENT_TYPE not in skip_auto_headers and + hasattr(data, 'name')): + mime = mimetypes.guess_type(data.name)[0] + mime = 'application/octet-stream' if mime is None else mime + self.headers[hdrs.CONTENT_TYPE] = mime + + elif isinstance(data, MultipartWriter): + self.body = data.serialize() + self.headers.update(data.headers) + self.chunked = self.chunked or 8192 + + else: + if not isinstance(data, helpers.FormData): + data = helpers.FormData(data) + + self.body = data(self.encoding) + + if (hdrs.CONTENT_TYPE not in self.headers and + hdrs.CONTENT_TYPE not in skip_auto_headers): + self.headers[hdrs.CONTENT_TYPE] = data.content_type + + if data.is_multipart: + self.chunked = self.chunked or 8192 + else: + if (hdrs.CONTENT_LENGTH not in self.headers and + not self.chunked): + self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) + + def update_transfer_encoding(self): + """Analyze transfer-encoding header.""" + te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower() + + if self.chunked: + if hdrs.CONTENT_LENGTH in self.headers: + del self.headers[hdrs.CONTENT_LENGTH] + if 'chunked' not in te: + self.headers[hdrs.TRANSFER_ENCODING] = 'chunked' + + self.chunked = self.chunked if type(self.chunked) is int else 8192 + else: + if 'chunked' in te: + self.chunked = 8192 + else: + self.chunked = None + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) + + def update_expect_continue(self, expect=False): + if expect: + self.headers[hdrs.EXPECT] = '100-continue' + elif self.headers.get(hdrs.EXPECT, '').lower() == '100-continue': + expect = True + + if expect: + self._continue = helpers.create_future(self.loop) + + def update_proxy(self, proxy, proxy_auth): + if proxy and not proxy.startswith('http://'): + raise ValueError("Only http proxies are supported") + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): + raise ValueError("proxy_auth must be None or BasicAuth() tuple") + self.proxy = proxy + self.proxy_auth = proxy_auth + + @asyncio.coroutine + def write_bytes(self, request, reader): + """Support coroutines that yields bytes objects.""" + # 100 response + if self._continue is not None: + yield from self._continue + + try: + if asyncio.iscoroutine(self.body): + request.transport.set_tcp_nodelay(True) + exc = None + value = None + stream = self.body + + while True: + try: + if exc is not None: + result = stream.throw(exc) + else: + result = stream.send(value) + except StopIteration as exc: + if isinstance(exc.value, bytes): + yield from request.write(exc.value, drain=True) + break + except: + self.response.close() + raise + + if isinstance(result, asyncio.Future): + exc = None + value = None + try: + value = yield result + except Exception as err: + exc = err + elif isinstance(result, (bytes, bytearray)): + yield from request.write(result, drain=True) + value = None + else: + raise ValueError( + 'Bytes object is expected, got: %s.' % + type(result)) + + elif isinstance(self.body, (asyncio.StreamReader, + streams.StreamReader)): + request.transport.set_tcp_nodelay(True) + chunk = yield from self.body.read(streams.DEFAULT_LIMIT) + while chunk: + yield from request.write(chunk, drain=True) + chunk = yield from self.body.read(streams.DEFAULT_LIMIT) + + elif isinstance(self.body, streams.DataQueue): + request.transport.set_tcp_nodelay(True) + while True: + try: + chunk = yield from self.body.read() + if chunk is EOF_MARKER: + break + yield from request.write(chunk, drain=True) + except streams.EofStream: + break + + elif isinstance(self.body, io.IOBase): + chunk = self.body.read(self.chunked) + while chunk: + request.write(chunk) + chunk = self.body.read(self.chunked) + request.transport.set_tcp_nodelay(True) + + else: + if isinstance(self.body, (bytes, bytearray)): + self.body = (self.body,) + + for chunk in self.body: + request.write(chunk) + request.transport.set_tcp_nodelay(True) + + except Exception as exc: + new_exc = aiohttp.ClientRequestError( + 'Can not write request body for %s' % self.url) + new_exc.__context__ = exc + new_exc.__cause__ = exc + reader.set_exception(new_exc) + else: + assert request.transport.tcp_nodelay + try: + ret = request.write_eof() + # NB: in asyncio 3.4.1+ StreamWriter.drain() is coroutine + # see bug #170 + if (asyncio.iscoroutine(ret) or + isinstance(ret, asyncio.Future)): + yield from ret + except Exception as exc: + new_exc = aiohttp.ClientRequestError( + 'Can not write request body for %s' % self.url) + new_exc.__context__ = exc + new_exc.__cause__ = exc + reader.set_exception(new_exc) + + self._writer = None + + def send(self, writer, reader): + writer.set_tcp_cork(True) + request = aiohttp.Request(writer, self.method, self.path, self.version) + + if self.compress: + request.add_compression_filter(self.compress) + + if self.chunked is not None: + request.enable_chunked_encoding() + request.add_chunking_filter(self.chunked) + + # set default content-type + if (self.method in self.POST_METHODS and + hdrs.CONTENT_TYPE not in self.skip_auto_headers and + hdrs.CONTENT_TYPE not in self.headers): + self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream' + + for k, value in self.headers.items(): + request.add_header(k, value) + request.send_headers() + + self._writer = helpers.ensure_future( + self.write_bytes(request, reader), loop=self.loop) + + self.response = self.response_class( + self.method, self.url, self.host, + writer=self._writer, continue100=self._continue, + timeout=self._timeout) + self.response._post_init(self.loop) + return self.response + + @asyncio.coroutine + def close(self): + if self._writer is not None: + try: + yield from self._writer + finally: + self._writer = None + + def terminate(self): + if self._writer is not None: + if not self.loop.is_closed(): + self._writer.cancel() + self._writer = None + + +class ClientResponse: + + # from the Status-Line of the response + version = None # HTTP-Version + status = None # Status-Code + reason = None # Reason-Phrase + + cookies = None # Response cookies (Set-Cookie) + content = None # Payload stream + headers = None # Response headers, CIMultiDictProxy + raw_headers = None # Response raw headers, a sequence of pairs + + _connection = None # current connection + flow_control_class = FlowControlStreamReader # reader flow control + _reader = None # input stream + _response_parser = aiohttp.HttpResponseParser() + _source_traceback = None + # setted up by ClientRequest after ClientResponse object creation + # post-init stage allows to not change ctor signature + _loop = None + _closed = True # to allow __del__ for non-initialized properly response + + def __init__(self, method, url, host='', *, writer=None, continue100=None, + timeout=5*60): + super().__init__() + + self.method = method + self.url = url + self.host = host + self._content = None + self._writer = writer + self._continue = continue100 + self._closed = False + self._should_close = True # override by message.should_close later + self._history = () + self._timeout = timeout + + def _post_init(self, loop): + self._loop = loop + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + def __del__(self, _warnings=warnings): + if self._loop is None: + return # not started + if self._closed: + return + self.close() + + _warnings.warn("Unclosed response {!r}".format(self), + ResourceWarning) + context = {'client_response': self, + 'message': 'Unclosed response'} + if self._source_traceback: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + + def __repr__(self): + out = io.StringIO() + ascii_encodable_url = self.url.encode('ascii', 'backslashreplace') \ + .decode('ascii') + if self.reason: + ascii_encodable_reason = self.reason.encode('ascii', + 'backslashreplace') \ + .decode('ascii') + else: + ascii_encodable_reason = self.reason + print(''.format( + ascii_encodable_url, self.status, ascii_encodable_reason), + file=out) + print(self.headers, file=out) + return out.getvalue() + + @property + def connection(self): + return self._connection + + @property + def history(self): + """A sequence of of responses, if redirects occured.""" + return self._history + + def waiting_for_continue(self): + return self._continue is not None + + def _setup_connection(self, connection): + self._reader = connection.reader + self._connection = connection + self.content = self.flow_control_class( + connection.reader, loop=connection.loop, timeout=self._timeout) + + def _need_parse_response_body(self): + return (self.method.lower() != 'head' and + self.status not in [204, 304]) + + @asyncio.coroutine + def start(self, connection, read_until_eof=False): + """Start response processing.""" + self._setup_connection(connection) + + while True: + httpstream = self._reader.set_parser(self._response_parser) + + # read response + with Timeout(self._timeout, loop=self._loop): + message = yield from httpstream.read() + if message.code != 100: + break + + if self._continue is not None and not self._continue.done(): + self._continue.set_result(True) + self._continue = None + + # response status + self.version = message.version + self.status = message.code + self.reason = message.reason + self._should_close = message.should_close + + # headers + self.headers = CIMultiDictProxy(message.headers) + self.raw_headers = tuple(message.raw_headers) + + # payload + rwb = self._need_parse_response_body() + self._reader.set_parser( + aiohttp.HttpPayloadParser(message, + readall=read_until_eof, + response_with_body=rwb), + self.content) + + # cookies + self.cookies = http.cookies.SimpleCookie() + if hdrs.SET_COOKIE in self.headers: + for hdr in self.headers.getall(hdrs.SET_COOKIE): + try: + self.cookies.load(hdr) + except http.cookies.CookieError as exc: + client_logger.warning( + 'Can not load response cookies: %s', exc) + return self + + def close(self): + if self._closed: + return + + self._closed = True + + if self._loop is None or self._loop.is_closed(): + return + + if self._connection is not None: + self._connection.close() + self._connection = None + self._cleanup_writer() + + @asyncio.coroutine + def release(self): + if self._closed: + return + try: + content = self.content + if content is not None and not content.at_eof(): + chunk = yield from content.readany() + while chunk is not EOF_MARKER or chunk: + chunk = yield from content.readany() + except Exception: + self._connection.close() + self._connection = None + raise + finally: + self._closed = True + if self._connection is not None: + self._connection.release() + if self._reader is not None: + self._reader.unset_parser() + self._connection = None + self._cleanup_writer() + + def raise_for_status(self): + if 400 <= self.status: + raise aiohttp.HttpProcessingError( + code=self.status, + message=self.reason) + + def _cleanup_writer(self): + if self._writer is not None and not self._writer.done(): + self._writer.cancel() + self._writer = None + + @asyncio.coroutine + def wait_for_close(self): + if self._writer is not None: + try: + yield from self._writer + finally: + self._writer = None + yield from self.release() + + @asyncio.coroutine + def read(self): + """Read response payload.""" + if self._content is None: + try: + self._content = yield from self.content.read() + except: + self.close() + raise + else: + yield from self.release() + + return self._content + + def _get_encoding(self): + ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower() + mtype, stype, _, params = helpers.parse_mimetype(ctype) + + encoding = params.get('charset') + if not encoding: + encoding = chardet.detect(self._content)['encoding'] + if not encoding: + encoding = 'utf-8' + + return encoding + + @asyncio.coroutine + def text(self, encoding=None): + """Read response payload and decode.""" + if self._content is None: + yield from self.read() + + if encoding is None: + encoding = self._get_encoding() + + return self._content.decode(encoding) + + @asyncio.coroutine + def json(self, *, encoding=None, loads=json.loads): + """Read and decodes JSON response.""" + if self._content is None: + yield from self.read() + + ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower() + if 'json' not in ctype: + client_logger.warning( + 'Attempt to decode JSON with unexpected mimetype: %s', ctype) + + stripped = self._content.strip() + if not stripped: + return None + + if encoding is None: + encoding = self._get_encoding() + + return loads(stripped.decode(encoding)) + + if PY_35: + @asyncio.coroutine + def __aenter__(self): + return self + + @asyncio.coroutine + def __aexit__(self, exc_type, exc_val, exc_tb): + if exc_type is None: + yield from self.release() + else: + self.close() diff --git a/RBXLegacyDiscordBot/lib/aiohttp/client_ws.py b/RBXLegacyDiscordBot/lib/aiohttp/client_ws.py new file mode 100644 index 0000000..984669c --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/client_ws.py @@ -0,0 +1,193 @@ +"""WebSocket client for asyncio.""" + +import asyncio +import json +import sys + +from ._ws_impl import CLOSED_MESSAGE, WebSocketError, WSMessage, WSMsgType + +PY_35 = sys.version_info >= (3, 5) +PY_352 = sys.version_info >= (3, 5, 2) + + +class ClientWebSocketResponse: + + def __init__(self, reader, writer, protocol, + response, timeout, autoclose, autoping, loop): + self._response = response + self._conn = response.connection + + self._writer = writer + self._reader = reader + self._protocol = protocol + self._closed = False + self._closing = False + self._close_code = None + self._timeout = timeout + self._autoclose = autoclose + self._autoping = autoping + self._loop = loop + self._waiting = False + self._exception = None + + @property + def closed(self): + return self._closed + + @property + def close_code(self): + return self._close_code + + @property + def protocol(self): + return self._protocol + + def exception(self): + return self._exception + + def ping(self, message='b'): + if self._closed: + raise RuntimeError('websocket connection is closed') + self._writer.ping(message) + + def pong(self, message='b'): + if self._closed: + raise RuntimeError('websocket connection is closed') + self._writer.pong(message) + + def send_str(self, data): + if self._closed: + raise RuntimeError('websocket connection is closed') + if not isinstance(data, str): + raise TypeError('data argument must be str (%r)' % type(data)) + self._writer.send(data, binary=False) + + def send_bytes(self, data): + if self._closed: + raise RuntimeError('websocket connection is closed') + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError('data argument must be byte-ish (%r)' % + type(data)) + self._writer.send(data, binary=True) + + def send_json(self, data, *, dumps=json.dumps): + self.send_str(dumps(data)) + + @asyncio.coroutine + def close(self, *, code=1000, message=b''): + if not self._closed: + self._closed = True + try: + self._writer.close(code, message) + except asyncio.CancelledError: + self._close_code = 1006 + self._response.close() + raise + except Exception as exc: + self._close_code = 1006 + self._exception = exc + self._response.close() + return True + + if self._closing: + self._response.close() + return True + + while True: + try: + msg = yield from asyncio.wait_for( + self._reader.read(), self._timeout, loop=self._loop) + except asyncio.CancelledError: + self._close_code = 1006 + self._response.close() + raise + except Exception as exc: + self._close_code = 1006 + self._exception = exc + self._response.close() + return True + + if msg.type == WSMsgType.CLOSE: + self._close_code = msg.data + self._response.close() + return True + else: + return False + + @asyncio.coroutine + def receive(self): + if self._waiting: + raise RuntimeError('Concurrent call to receive() is not allowed') + + self._waiting = True + try: + while True: + if self._closed: + return CLOSED_MESSAGE + + try: + msg = yield from self._reader.read() + except (asyncio.CancelledError, asyncio.TimeoutError): + raise + except WebSocketError as exc: + self._close_code = exc.code + yield from self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._closing = True + self._close_code = 1006 + yield from self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type == WSMsgType.CLOSE: + self._closing = True + self._close_code = msg.data + if not self._closed and self._autoclose: + yield from self.close() + return msg + if msg.type == WSMsgType.PING and self._autoping: + self.pong(msg.data) + elif msg.type == WSMsgType.PONG and self._autoping: + continue + else: + return msg + finally: + self._waiting = False + + @asyncio.coroutine + def receive_str(self): + msg = yield from self.receive() + if msg.type != WSMsgType.TEXT: + raise TypeError( + "Received message {}:{!r} is not str".format(msg.type, + msg.data)) + return msg.data + + @asyncio.coroutine + def receive_bytes(self): + msg = yield from self.receive() + if msg.type != WSMsgType.BINARY: + raise TypeError( + "Received message {}:{!r} is not bytes".format(msg.type, + msg.data)) + return msg.data + + @asyncio.coroutine + def receive_json(self, *, loads=json.loads): + data = yield from self.receive_str() + return loads(data) + + if PY_35: + def __aiter__(self): + return self + + if not PY_352: # pragma: no cover + __aiter__ = asyncio.coroutine(__aiter__) + + @asyncio.coroutine + def __anext__(self): + msg = yield from self.receive() + if msg.type == WSMsgType.CLOSE: + raise StopAsyncIteration # NOQA + return msg diff --git a/RBXLegacyDiscordBot/lib/aiohttp/connector.py b/RBXLegacyDiscordBot/lib/aiohttp/connector.py new file mode 100644 index 0000000..8ada87a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/connector.py @@ -0,0 +1,783 @@ +import asyncio +import functools +import http.cookies +import ssl +import sys +import traceback +import warnings +from collections import defaultdict +from hashlib import md5, sha1, sha256 +from itertools import chain +from math import ceil +from types import MappingProxyType + +import aiohttp + +from . import hdrs, helpers +from .client import ClientRequest +from .errors import (ClientOSError, ClientTimeoutError, FingerprintMismatch, + HttpProxyError, ProxyConnectionError, + ServerDisconnectedError) +from .helpers import is_ip_address, sentinel +from .resolver import DefaultResolver + +__all__ = ('BaseConnector', 'TCPConnector', 'ProxyConnector', 'UnixConnector') + +PY_343 = sys.version_info >= (3, 4, 3) + +HASHFUNC_BY_DIGESTLEN = { + 16: md5, + 20: sha1, + 32: sha256, +} + + +class Connection: + + _source_traceback = None + _transport = None + + def __init__(self, connector, key, request, transport, protocol, loop): + self._key = key + self._connector = connector + self._request = request + self._transport = transport + self._protocol = protocol + self._loop = loop + self.reader = protocol.reader + self.writer = protocol.writer + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + def __repr__(self): + return 'Connection<{}>'.format(self._key) + + def __del__(self, _warnings=warnings): + if self._transport is not None: + _warnings.warn('Unclosed connection {!r}'.format(self), + ResourceWarning) + if self._loop.is_closed(): + return + + self._connector._release( + self._key, self._request, self._transport, self._protocol, + should_close=True) + + context = {'client_connection': self, + 'message': 'Unclosed connection'} + if self._source_traceback is not None: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + + @property + def loop(self): + return self._loop + + def close(self): + if self._transport is not None: + self._connector._release( + self._key, self._request, self._transport, self._protocol, + should_close=True) + self._transport = None + + def release(self): + if self._transport is not None: + self._connector._release( + self._key, self._request, self._transport, self._protocol, + should_close=False) + self._transport = None + + def detach(self): + self._transport = None + + @property + def closed(self): + return self._transport is None + + +class BaseConnector(object): + """Base connector class. + + conn_timeout - (optional) Connect timeout. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The limit of simultaneous connections to the same endpoint. + loop - Optional event loop. + """ + + _closed = True # prevent AttributeError in __del__ if ctor was failed + _source_traceback = None + + def __init__(self, *, conn_timeout=None, keepalive_timeout=sentinel, + force_close=False, limit=20, + loop=None): + + if force_close: + if keepalive_timeout is not None and \ + keepalive_timeout is not sentinel: + raise ValueError('keepalive_timeout cannot ' + 'be set if force_close is True') + else: + if keepalive_timeout is sentinel: + keepalive_timeout = 30 + + if loop is None: + loop = asyncio.get_event_loop() + + self._closed = False + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self._conns = {} + self._acquired = defaultdict(set) + self._conn_timeout = conn_timeout + self._keepalive_timeout = keepalive_timeout + self._cleanup_handle = None + self._force_close = force_close + self._limit = limit + self._waiters = defaultdict(list) + + self._loop = loop + self._factory = functools.partial( + aiohttp.StreamProtocol, loop=loop, + disconnect_error=ServerDisconnectedError) + + self.cookies = http.cookies.SimpleCookie() + + def __del__(self, _warnings=warnings): + if self._closed: + return + if not self._conns: + return + + conns = [repr(c) for c in self._conns.values()] + + self.close() + + _warnings.warn("Unclosed connector {!r}".format(self), + ResourceWarning) + context = {'connector': self, + 'connections': conns, + 'message': 'Unclosed connector'} + if self._source_traceback is not None: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + + def __enter__(self): + return self + + def __exit__(self, *exc): + self.close() + + @property + def force_close(self): + """Ultimately close connection on releasing if True.""" + return self._force_close + + @property + def limit(self): + """The limit for simultaneous connections to the same endpoint. + + Endpoints are the same if they are have equal + (host, port, is_ssl) triple. + + If limit is None the connector has no limit. + The default limit size is 20. + """ + return self._limit + + def _cleanup(self): + """Cleanup unused transports.""" + if self._cleanup_handle: + self._cleanup_handle.cancel() + self._cleanup_handle = None + + now = self._loop.time() + + connections = {} + timeout = self._keepalive_timeout + + for key, conns in self._conns.items(): + alive = [] + for transport, proto, t0 in conns: + if transport is not None: + if proto and not proto.is_connected(): + transport = None + else: + delta = t0 + self._keepalive_timeout - now + if delta < 0: + transport.close() + transport = None + elif delta < timeout: + timeout = delta + + if transport is not None: + alive.append((transport, proto, t0)) + if alive: + connections[key] = alive + + if connections: + self._cleanup_handle = self._loop.call_at( + ceil(now + timeout), self._cleanup) + + self._conns = connections + + def _start_cleanup_task(self): + if self._cleanup_handle is None: + now = self._loop.time() + self._cleanup_handle = self._loop.call_at( + ceil(now + self._keepalive_timeout), self._cleanup) + + def close(self): + """Close all opened transports.""" + ret = helpers.create_future(self._loop) + ret.set_result(None) + if self._closed: + return ret + self._closed = True + + try: + if self._loop.is_closed(): + return ret + + for key, data in self._conns.items(): + for transport, proto, t0 in data: + transport.close() + + for transport in chain(*self._acquired.values()): + transport.close() + + if self._cleanup_handle: + self._cleanup_handle.cancel() + + finally: + self._conns.clear() + self._acquired.clear() + self._cleanup_handle = None + return ret + + @property + def closed(self): + """Is connector closed. + + A readonly property. + """ + return self._closed + + @asyncio.coroutine + def connect(self, req): + """Get from pool or create new connection.""" + key = (req.host, req.port, req.ssl) + + limit = self._limit + if limit is not None: + fut = helpers.create_future(self._loop) + waiters = self._waiters[key] + + # The limit defines the maximum number of concurrent connections + # for a key. Waiters must be counted against the limit, even before + # the underlying connection is created. + available = limit - len(waiters) - len(self._acquired[key]) + + # Don't wait if there are connections available. + if available > 0: + fut.set_result(None) + + # This connection will now count towards the limit. + waiters.append(fut) + + try: + if limit is not None: + yield from fut + + transport, proto = self._get(key) + if transport is None: + try: + if self._conn_timeout: + transport, proto = yield from asyncio.wait_for( + self._create_connection(req), + self._conn_timeout, loop=self._loop) + else: + transport, proto = \ + yield from self._create_connection(req) + + except asyncio.TimeoutError as exc: + raise ClientTimeoutError( + 'Connection timeout to host {0[0]}:{0[1]} ssl:{0[2]}' + .format(key)) from exc + except OSError as exc: + raise ClientOSError( + exc.errno, + 'Cannot connect to host {0[0]}:{0[1]} ssl:{0[2]} [{1}]' + .format(key, exc.strerror)) from exc + except: + self._release_waiter(key) + raise + + self._acquired[key].add(transport) + conn = Connection(self, key, req, transport, proto, self._loop) + return conn + + def _get(self, key): + try: + conns = self._conns[key] + except KeyError: + return None, None + t1 = self._loop.time() + while conns: + transport, proto, t0 = conns.pop() + if transport is not None and proto.is_connected(): + if t1 - t0 > self._keepalive_timeout: + transport.close() + transport = None + else: + if not conns: + # The very last connection was reclaimed: drop the key + del self._conns[key] + return transport, proto + # No more connections: drop the key + del self._conns[key] + return None, None + + def _release_waiter(self, key): + waiters = self._waiters[key] + while waiters: + waiter = waiters.pop(0) + if not waiter.done(): + waiter.set_result(None) + break + + def _release(self, key, req, transport, protocol, *, should_close=False): + if self._closed: + # acquired connection is already released on connector closing + return + + acquired = self._acquired[key] + try: + acquired.remove(transport) + except KeyError: # pragma: no cover + # this may be result of undetermenistic order of objects + # finalization due garbage collection. + pass + else: + if self._limit is not None and len(acquired) < self._limit: + self._release_waiter(key) + + resp = req.response + + if not should_close: + if self._force_close: + should_close = True + elif resp is not None: + should_close = resp._should_close + + reader = protocol.reader + if should_close or (reader.output and not reader.output.at_eof()): + transport.close() + else: + conns = self._conns.get(key) + if conns is None: + conns = self._conns[key] = [] + conns.append((transport, protocol, self._loop.time())) + reader.unset_parser() + + self._start_cleanup_task() + + @asyncio.coroutine + def _create_connection(self, req): + raise NotImplementedError() + + +_SSL_OP_NO_COMPRESSION = getattr(ssl, "OP_NO_COMPRESSION", 0) + + +class TCPConnector(BaseConnector): + """TCP connector. + + verify_ssl - Set to True to check ssl certifications. + fingerprint - Pass the binary md5, sha1, or sha256 + digest of the expected certificate in DER format to verify + that the certificate the server presents matches. See also + https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning + resolve - (Deprecated) Set to True to do DNS lookup for + host name. + resolver - Enable DNS lookups and use this + resolver + use_dns_cache - Use memory cache for DNS lookups. + family - socket address family + local_addr - local tuple of (host, port) to bind socket to + + conn_timeout - (optional) Connect timeout. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The limit of simultaneous connections to the same endpoint. + loop - Optional event loop. + """ + + def __init__(self, *, verify_ssl=True, fingerprint=None, + resolve=sentinel, use_dns_cache=sentinel, + family=0, ssl_context=None, local_addr=None, resolver=None, + conn_timeout=None, keepalive_timeout=sentinel, + force_close=False, limit=20, + loop=None): + super().__init__(conn_timeout=conn_timeout, + keepalive_timeout=keepalive_timeout, + force_close=force_close, limit=limit, loop=loop) + + if not verify_ssl and ssl_context is not None: + raise ValueError( + "Either disable ssl certificate validation by " + "verify_ssl=False or specify ssl_context, not both.") + + self._verify_ssl = verify_ssl + + if fingerprint: + digestlen = len(fingerprint) + hashfunc = HASHFUNC_BY_DIGESTLEN.get(digestlen) + if not hashfunc: + raise ValueError('fingerprint has invalid length') + self._hashfunc = hashfunc + self._fingerprint = fingerprint + + if resolve is not sentinel: + warnings.warn(("resolve parameter is deprecated, " + "use use_dns_cache instead"), + DeprecationWarning, stacklevel=2) + + if use_dns_cache is not sentinel and resolve is not sentinel: + if use_dns_cache != resolve: + raise ValueError("use_dns_cache must agree with resolve") + _use_dns_cache = use_dns_cache + elif use_dns_cache is not sentinel: + _use_dns_cache = use_dns_cache + elif resolve is not sentinel: + _use_dns_cache = resolve + else: + _use_dns_cache = True + + if resolver is None: + resolver = DefaultResolver(loop=self._loop) + self._resolver = resolver + + self._use_dns_cache = _use_dns_cache + self._cached_hosts = {} + self._ssl_context = ssl_context + self._family = family + self._local_addr = local_addr + + @property + def verify_ssl(self): + """Do check for ssl certifications?""" + return self._verify_ssl + + @property + def fingerprint(self): + """Expected ssl certificate fingerprint.""" + return self._fingerprint + + @property + def ssl_context(self): + """SSLContext instance for https requests. + + Lazy property, creates context on demand. + """ + if self._ssl_context is None: + if not self._verify_ssl: + sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.options |= _SSL_OP_NO_COMPRESSION + sslcontext.set_default_verify_paths() + else: + sslcontext = ssl.create_default_context() + self._ssl_context = sslcontext + return self._ssl_context + + @property + def family(self): + """Socket family like AF_INET.""" + return self._family + + @property + def use_dns_cache(self): + """True if local DNS caching is enabled.""" + return self._use_dns_cache + + @property + def cached_hosts(self): + """Read-only dict of cached DNS record.""" + return MappingProxyType(self._cached_hosts) + + def clear_dns_cache(self, host=None, port=None): + """Remove specified host/port or clear all dns local cache.""" + if host is not None and port is not None: + self._cached_hosts.pop((host, port), None) + elif host is not None or port is not None: + raise ValueError("either both host and port " + "or none of them are allowed") + else: + self._cached_hosts.clear() + + @property + def resolve(self): + """Do DNS lookup for host name?""" + warnings.warn((".resolve property is deprecated, " + "use .dns_cache instead"), + DeprecationWarning, stacklevel=2) + return self.use_dns_cache + + @property + def resolved_hosts(self): + """The dict of (host, port) -> (ipaddr, port) pairs.""" + warnings.warn((".resolved_hosts property is deprecated, " + "use .cached_hosts instead"), + DeprecationWarning, stacklevel=2) + return self.cached_hosts + + def clear_resolved_hosts(self, host=None, port=None): + """Remove specified host/port or clear all resolve cache.""" + warnings.warn((".clear_resolved_hosts() is deprecated, " + "use .clear_dns_cache() instead"), + DeprecationWarning, stacklevel=2) + if host is not None and port is not None: + self.clear_dns_cache(host, port) + else: + self.clear_dns_cache() + + @asyncio.coroutine + def _resolve_host(self, host, port): + if is_ip_address(host): + return [{'hostname': host, 'host': host, 'port': port, + 'family': self._family, 'proto': 0, 'flags': 0}] + + if self._use_dns_cache: + key = (host, port) + + if key not in self._cached_hosts: + self._cached_hosts[key] = yield from \ + self._resolver.resolve(host, port, family=self._family) + + return self._cached_hosts[key] + else: + res = yield from self._resolver.resolve( + host, port, family=self._family) + return res + + @asyncio.coroutine + def _create_connection(self, req): + """Create connection. + + Has same keyword arguments as BaseEventLoop.create_connection. + """ + if req.proxy: + transport, proto = yield from self._create_proxy_connection(req) + else: + transport, proto = yield from self._create_direct_connection(req) + + return transport, proto + + @asyncio.coroutine + def _create_direct_connection(self, req): + if req.ssl: + sslcontext = self.ssl_context + else: + sslcontext = None + + hosts = yield from self._resolve_host(req.host, req.port) + exc = None + + for hinfo in hosts: + try: + host = hinfo['host'] + port = hinfo['port'] + transp, proto = yield from self._loop.create_connection( + self._factory, host, port, + ssl=sslcontext, family=hinfo['family'], + proto=hinfo['proto'], flags=hinfo['flags'], + server_hostname=hinfo['hostname'] if sslcontext else None, + local_addr=self._local_addr) + has_cert = transp.get_extra_info('sslcontext') + if has_cert and self._fingerprint: + sock = transp.get_extra_info('socket') + if not hasattr(sock, 'getpeercert'): + # Workaround for asyncio 3.5.0 + # Starting from 3.5.1 version + # there is 'ssl_object' extra info in transport + sock = transp._ssl_protocol._sslpipe.ssl_object + # gives DER-encoded cert as a sequence of bytes (or None) + cert = sock.getpeercert(binary_form=True) + assert cert + got = self._hashfunc(cert).digest() + expected = self._fingerprint + if got != expected: + transp.close() + raise FingerprintMismatch(expected, got, host, port) + return transp, proto + except OSError as e: + exc = e + else: + raise ClientOSError(exc.errno, + 'Can not connect to %s:%s [%s]' % + (req.host, req.port, exc.strerror)) from exc + + @asyncio.coroutine + def _create_proxy_connection(self, req): + proxy_req = ClientRequest( + hdrs.METH_GET, req.proxy, + headers={hdrs.HOST: req.host}, + auth=req.proxy_auth, + loop=self._loop) + try: + # create connection to proxy server + transport, proto = yield from self._create_direct_connection( + proxy_req) + except OSError as exc: + raise ProxyConnectionError(*exc.args) from exc + + if not req.ssl: + req.path = '{scheme}://{host}{path}'.format(scheme=req.scheme, + host=req.netloc, + path=req.path) + if hdrs.AUTHORIZATION in proxy_req.headers: + auth = proxy_req.headers[hdrs.AUTHORIZATION] + del proxy_req.headers[hdrs.AUTHORIZATION] + if not req.ssl: + req.headers[hdrs.PROXY_AUTHORIZATION] = auth + else: + proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth + + if req.ssl: + # For HTTPS requests over HTTP proxy + # we must notify proxy to tunnel connection + # so we send CONNECT command: + # CONNECT www.python.org:443 HTTP/1.1 + # Host: www.python.org + # + # next we must do TLS handshake and so on + # to do this we must wrap raw socket into secure one + # asyncio handles this perfectly + proxy_req.method = hdrs.METH_CONNECT + proxy_req.path = '{}:{}'.format(req.host, req.port) + key = (req.host, req.port, req.ssl) + conn = Connection(self, key, proxy_req, + transport, proto, self._loop) + self._acquired[key].add(conn._transport) + proxy_resp = proxy_req.send(conn.writer, conn.reader) + try: + resp = yield from proxy_resp.start(conn, True) + except: + proxy_resp.close() + conn.close() + raise + else: + conn.detach() + if resp.status != 200: + raise HttpProxyError(code=resp.status, message=resp.reason) + rawsock = transport.get_extra_info('socket', default=None) + if rawsock is None: + raise RuntimeError( + "Transport does not expose socket instance") + transport.pause_reading() + transport, proto = yield from self._loop.create_connection( + self._factory, ssl=self.ssl_context, sock=rawsock, + server_hostname=req.host) + finally: + proxy_resp.close() + + return transport, proto + + +class ProxyConnector(TCPConnector): + """Http Proxy connector. + Deprecated, use ClientSession.request with proxy parameters. + Is still here for backward compatibility. + + proxy - Proxy URL address. Only HTTP proxy supported. + proxy_auth - (optional) Proxy HTTP Basic Auth + proxy_auth - aiohttp.helpers.BasicAuth + conn_timeout - (optional) Connect timeout. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The limit of simultaneous connections to the same endpoint. + loop - Optional event loop. + + Usage: + + >>> conn = ProxyConnector(proxy="http://some.proxy.com") + >>> session = ClientSession(connector=conn) + >>> resp = yield from session.get('http://python.org') + + """ + + def __init__(self, proxy, *, proxy_auth=None, force_close=True, + conn_timeout=None, keepalive_timeout=sentinel, + limit=20, loop=None): + warnings.warn("ProxyConnector is deprecated, use " + "client.get(url, proxy=proxy_url) instead", + DeprecationWarning) + super().__init__(force_close=force_close, + conn_timeout=conn_timeout, + keepalive_timeout=keepalive_timeout, + limit=limit, loop=loop) + self._proxy = proxy + self._proxy_auth = proxy_auth + + @property + def proxy(self): + return self._proxy + + @property + def proxy_auth(self): + return self._proxy_auth + + @asyncio.coroutine + def _create_connection(self, req): + """ + Use TCPConnector _create_connection, to emulate old ProxyConnector. + """ + req.update_proxy(self._proxy, self._proxy_auth) + transport, proto = yield from super()._create_connection(req) + + return transport, proto + + +class UnixConnector(BaseConnector): + """Unix socket connector. + + path - Unix socket path. + conn_timeout - (optional) Connect timeout. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The limit of simultaneous connections to the same endpoint. + loop - Optional event loop. + + Usage: + + >>> conn = UnixConnector(path='/path/to/socket') + >>> session = ClientSession(connector=conn) + >>> resp = yield from session.get('http://python.org') + + """ + + def __init__(self, path, force_close=False, conn_timeout=None, + keepalive_timeout=sentinel, limit=20, loop=None): + super().__init__(force_close=force_close, + conn_timeout=conn_timeout, + keepalive_timeout=keepalive_timeout, + limit=limit, loop=loop) + self._path = path + + @property + def path(self): + """Path to unix socket.""" + return self._path + + @asyncio.coroutine + def _create_connection(self, req): + return (yield from self._loop.create_unix_connection( + self._factory, self._path)) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/cookiejar.py b/RBXLegacyDiscordBot/lib/aiohttp/cookiejar.py new file mode 100644 index 0000000..907b0ca --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/cookiejar.py @@ -0,0 +1,290 @@ +import datetime +import re +from collections import defaultdict +from collections.abc import Mapping +from http.cookies import Morsel, SimpleCookie +from math import ceil +from urllib.parse import urlsplit + +from .abc import AbstractCookieJar +from .helpers import is_ip_address + + +class CookieJar(AbstractCookieJar): + """Implements cookie storage adhering to RFC 6265.""" + + DATE_TOKENS_RE = re.compile( + "[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" + "(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)") + + DATE_HMS_TIME_RE = re.compile("(\d{1,2}):(\d{1,2}):(\d{1,2})") + + DATE_DAY_OF_MONTH_RE = re.compile("(\d{1,2})") + + DATE_MONTH_RE = re.compile("(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" + "(aug)|(sep)|(oct)|(nov)|(dec)", re.I) + + DATE_YEAR_RE = re.compile("(\d{2,4})") + + MAX_TIME = 2051215261.0 # so far in future (2035-01-01) + + def __init__(self, *, unsafe=False, loop=None): + super().__init__(loop=loop) + self._cookies = defaultdict(SimpleCookie) + self._host_only_cookies = set() + self._unsafe = unsafe + self._next_expiration = ceil(self._loop.time()) + self._expirations = {} + + def clear(self): + self._cookies.clear() + self._host_only_cookies.clear() + self._next_expiration = ceil(self._loop.time()) + self._expirations.clear() + + def __iter__(self): + self._do_expiration() + for val in self._cookies.values(): + yield from val.values() + + def __len__(self): + return sum(1 for i in self) + + def _do_expiration(self): + now = self._loop.time() + if self._next_expiration > now: + return + if not self._expirations: + return + next_expiration = self.MAX_TIME + to_del = [] + cookies = self._cookies + expirations = self._expirations + for (domain, name), when in expirations.items(): + if when < now: + cookies[domain].pop(name, None) + to_del.append((domain, name)) + self._host_only_cookies.discard((domain, name)) + else: + next_expiration = min(next_expiration, when) + for key in to_del: + del expirations[key] + + self._next_expiration = ceil(next_expiration) + + def _expire_cookie(self, when, domain, name): + self._next_expiration = min(self._next_expiration, when) + self._expirations[(domain, name)] = when + + def update_cookies(self, cookies, response_url=None): + """Update cookies.""" + url_parsed = urlsplit(response_url or "") + hostname = url_parsed.hostname + + if not self._unsafe and is_ip_address(hostname): + # Don't accept cookies from IPs + return + + if isinstance(cookies, Mapping): + cookies = cookies.items() + + for name, cookie in cookies: + if not isinstance(cookie, Morsel): + tmp = SimpleCookie() + tmp[name] = cookie + cookie = tmp[name] + + domain = cookie["domain"] + + # ignore domains with trailing dots + if domain.endswith('.'): + domain = "" + del cookie["domain"] + + if not domain and hostname is not None: + # Set the cookie's domain to the response hostname + # and set its host-only-flag + self._host_only_cookies.add((hostname, name)) + domain = cookie["domain"] = hostname + + if domain.startswith("."): + # Remove leading dot + domain = domain[1:] + cookie["domain"] = domain + + if hostname and not self._is_domain_match(domain, hostname): + # Setting cookies for different domains is not allowed + continue + + path = cookie["path"] + if not path or not path.startswith("/"): + # Set the cookie's path to the response path + path = url_parsed.path + if not path.startswith("/"): + path = "/" + else: + # Cut everything from the last slash to the end + path = "/" + path[1:path.rfind("/")] + cookie["path"] = path + + max_age = cookie["max-age"] + if max_age: + try: + delta_seconds = int(max_age) + self._expire_cookie(self._loop.time() + delta_seconds, + domain, name) + except ValueError: + cookie["max-age"] = "" + + else: + expires = cookie["expires"] + if expires: + expire_time = self._parse_date(expires) + if expire_time: + self._expire_cookie(expire_time.timestamp(), + domain, name) + else: + cookie["expires"] = "" + + # use dict method because SimpleCookie class modifies value + # before Python 3.4.3 + dict.__setitem__(self._cookies[domain], name, cookie) + + self._do_expiration() + + def filter_cookies(self, request_url): + """Returns this jar's cookies filtered by their attributes.""" + self._do_expiration() + url_parsed = urlsplit(request_url) + filtered = SimpleCookie() + hostname = url_parsed.hostname or "" + is_not_secure = url_parsed.scheme not in ("https", "wss") + + for cookie in self: + name = cookie.key + domain = cookie["domain"] + + # Send shared cookies + if not domain: + filtered[name] = cookie.value + continue + + if not self._unsafe and is_ip_address(hostname): + continue + + if (domain, name) in self._host_only_cookies: + if domain != hostname: + continue + elif not self._is_domain_match(domain, hostname): + continue + + if not self._is_path_match(url_parsed.path, cookie["path"]): + continue + + if is_not_secure and cookie["secure"]: + continue + + filtered[name] = cookie.value + + return filtered + + @staticmethod + def _is_domain_match(domain, hostname): + """Implements domain matching adhering to RFC 6265.""" + if hostname == domain: + return True + + if not hostname.endswith(domain): + return False + + non_matching = hostname[:-len(domain)] + + if not non_matching.endswith("."): + return False + + return not is_ip_address(hostname) + + @staticmethod + def _is_path_match(req_path, cookie_path): + """Implements path matching adhering to RFC 6265.""" + if not req_path.startswith("/"): + req_path = "/" + + if req_path == cookie_path: + return True + + if not req_path.startswith(cookie_path): + return False + + if cookie_path.endswith("/"): + return True + + non_matching = req_path[len(cookie_path):] + + return non_matching.startswith("/") + + @classmethod + def _parse_date(cls, date_str): + """Implements date string parsing adhering to RFC 6265.""" + if not date_str: + return + + found_time = False + found_day = False + found_month = False + found_year = False + + hour = minute = second = 0 + day = 0 + month = 0 + year = 0 + + for token_match in cls.DATE_TOKENS_RE.finditer(date_str): + + token = token_match.group("token") + + if not found_time: + time_match = cls.DATE_HMS_TIME_RE.match(token) + if time_match: + found_time = True + hour, minute, second = [ + int(s) for s in time_match.groups()] + continue + + if not found_day: + day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) + if day_match: + found_day = True + day = int(day_match.group()) + continue + + if not found_month: + month_match = cls.DATE_MONTH_RE.match(token) + if month_match: + found_month = True + month = month_match.lastindex + continue + + if not found_year: + year_match = cls.DATE_YEAR_RE.match(token) + if year_match: + found_year = True + year = int(year_match.group()) + + if 70 <= year <= 99: + year += 1900 + elif 0 <= year <= 69: + year += 2000 + + if False in (found_day, found_month, found_year, found_time): + return + + if not 1 <= day <= 31: + return + + if year < 1601 or hour > 23 or minute > 59 or second > 59: + return + + return datetime.datetime(year, month, day, + hour, minute, second, + tzinfo=datetime.timezone.utc) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/errors.py b/RBXLegacyDiscordBot/lib/aiohttp/errors.py new file mode 100644 index 0000000..35f848e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/errors.py @@ -0,0 +1,186 @@ +"""HTTP related errors.""" + +from asyncio import TimeoutError + +__all__ = ( + 'DisconnectedError', 'ClientDisconnectedError', 'ServerDisconnectedError', + + 'HttpProcessingError', 'BadHttpMessage', + 'HttpMethodNotAllowed', 'HttpBadRequest', 'HttpProxyError', + 'BadStatusLine', 'LineTooLong', 'InvalidHeader', + + 'ClientError', 'ClientHttpProcessingError', 'ClientConnectionError', + 'ClientOSError', 'ClientTimeoutError', 'ProxyConnectionError', + 'ClientRequestError', 'ClientResponseError', + 'FingerprintMismatch', + + 'WSServerHandshakeError', 'WSClientDisconnectedError') + + +class DisconnectedError(Exception): + """Disconnected.""" + + +class ClientDisconnectedError(DisconnectedError): + """Client disconnected.""" + + +class ServerDisconnectedError(DisconnectedError): + """Server disconnected.""" + + +class WSClientDisconnectedError(ClientDisconnectedError): + """Deprecated.""" + + +class ClientError(Exception): + """Base class for client connection errors.""" + + +class ClientHttpProcessingError(ClientError): + """Base class for client HTTP processing errors.""" + + +class ClientRequestError(ClientHttpProcessingError): + """Connection error during sending request.""" + + +class ClientResponseError(ClientHttpProcessingError): + """Connection error during reading response.""" + + +class ClientConnectionError(ClientError): + """Base class for client socket errors.""" + + +class ClientOSError(ClientConnectionError, OSError): + """OSError error.""" + + +class ClientTimeoutError(ClientConnectionError, TimeoutError): + """Client connection timeout error.""" + + +class ProxyConnectionError(ClientConnectionError): + """Proxy connection error. + + Raised in :class:`aiohttp.connector.ProxyConnector` if + connection to proxy can not be established. + """ + + +class HttpProcessingError(Exception): + """HTTP error. + + Shortcut for raising HTTP errors with custom code, message and headers. + + :param int code: HTTP Error code. + :param str message: (optional) Error message. + :param list of [tuple] headers: (optional) Headers to be sent in response. + """ + + code = 0 + message = '' + headers = None + + def __init__(self, *, code=None, message='', headers=None): + if code is not None: + self.code = code + self.headers = headers + self.message = message + + super().__init__("%s, message='%s'" % (self.code, message)) + + +class WSServerHandshakeError(HttpProcessingError): + """websocket server handshake error.""" + + +class HttpProxyError(HttpProcessingError): + """HTTP proxy error. + + Raised in :class:`aiohttp.connector.ProxyConnector` if + proxy responds with status other than ``200 OK`` + on ``CONNECT`` request. + """ + + +class BadHttpMessage(HttpProcessingError): + + code = 400 + message = 'Bad Request' + + def __init__(self, message, *, headers=None): + super().__init__(message=message, headers=headers) + + +class HttpMethodNotAllowed(HttpProcessingError): + + code = 405 + message = 'Method Not Allowed' + + +class HttpBadRequest(BadHttpMessage): + + code = 400 + message = 'Bad Request' + + +class ContentEncodingError(BadHttpMessage): + """Content encoding error.""" + + +class TransferEncodingError(BadHttpMessage): + """transfer encoding error.""" + + +class LineTooLong(BadHttpMessage): + + def __init__(self, line, limit='Unknown'): + super().__init__( + "got more than %s bytes when reading %s" % (limit, line)) + + +class InvalidHeader(BadHttpMessage): + + def __init__(self, hdr): + if isinstance(hdr, bytes): + hdr = hdr.decode('utf-8', 'surrogateescape') + super().__init__('Invalid HTTP Header: {}'.format(hdr)) + self.hdr = hdr + + +class BadStatusLine(BadHttpMessage): + + def __init__(self, line=''): + if not line: + line = repr(line) + self.args = line, + self.line = line + + +class LineLimitExceededParserError(HttpBadRequest): + """Line is too long.""" + + def __init__(self, msg, limit): + super().__init__(msg) + self.limit = limit + + +class FingerprintMismatch(ClientConnectionError): + """SSL certificate does not match expected fingerprint.""" + + def __init__(self, expected, got, host, port): + self.expected = expected + self.got = got + self.host = host + self.port = port + + def __repr__(self): + return '<{} expected={} got={} host={} port={}>'.format( + self.__class__.__name__, self.expected, self.got, + self.host, self.port) + + +class InvalidURL(Exception): + """Invalid URL.""" diff --git a/RBXLegacyDiscordBot/lib/aiohttp/file_sender.py b/RBXLegacyDiscordBot/lib/aiohttp/file_sender.py new file mode 100644 index 0000000..c2768de --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/file_sender.py @@ -0,0 +1,168 @@ +import asyncio +import mimetypes +import os + +from . import hdrs +from .helpers import create_future +from .web_reqrep import StreamResponse + + +class FileSender: + """"A helper that can be used to send files. + """ + + def __init__(self, *, resp_factory=StreamResponse, chunk_size=256*1024): + self._response_factory = resp_factory + self._chunk_size = chunk_size + if bool(os.environ.get("AIOHTTP_NOSENDFILE")): + self._sendfile = self._sendfile_fallback + + def _sendfile_cb(self, fut, out_fd, in_fd, offset, + count, loop, registered): + if registered: + loop.remove_writer(out_fd) + if fut.cancelled(): + return + try: + n = os.sendfile(out_fd, in_fd, offset, count) + if n == 0: # EOF reached + n = count + except (BlockingIOError, InterruptedError): + n = 0 + except Exception as exc: + fut.set_exception(exc) + return + + if n < count: + loop.add_writer(out_fd, self._sendfile_cb, fut, out_fd, in_fd, + offset + n, count - n, loop, True) + else: + fut.set_result(None) + + @asyncio.coroutine + def _sendfile_system(self, request, resp, fobj, count): + # Write count bytes of fobj to resp using + # the os.sendfile system call. + # + # request should be a aiohttp.web.Request instance. + # + # resp should be a aiohttp.web.StreamResponse instance. + # + # fobj should be an open file object. + # + # count should be an integer > 0. + + transport = request.transport + + if transport.get_extra_info("sslcontext"): + yield from self._sendfile_fallback(request, resp, fobj, count) + return + + def _send_headers(resp_impl): + # Durty hack required for + # https://github.com/KeepSafe/aiohttp/issues/1093 + # don't send headers in sendfile mode + pass + + resp._send_headers = _send_headers + + @asyncio.coroutine + def write_eof(): + # Durty hack required for + # https://github.com/KeepSafe/aiohttp/issues/1177 + # do nothing in write_eof + pass + + resp.write_eof = write_eof + + resp_impl = yield from resp.prepare(request) + + loop = request.app.loop + # See https://github.com/KeepSafe/aiohttp/issues/958 for details + + # send headers + headers = ['HTTP/{0.major}.{0.minor} 200 OK\r\n'.format( + request.version)] + for hdr, val in resp.headers.items(): + headers.append('{}: {}\r\n'.format(hdr, val)) + headers.append('\r\n') + + out_socket = transport.get_extra_info("socket").dup() + out_socket.setblocking(False) + out_fd = out_socket.fileno() + in_fd = fobj.fileno() + + bheaders = ''.join(headers).encode('utf-8') + headers_length = len(bheaders) + resp_impl.headers_length = headers_length + resp_impl.output_length = headers_length + count + + try: + yield from loop.sock_sendall(out_socket, bheaders) + fut = create_future(loop) + self._sendfile_cb(fut, out_fd, in_fd, 0, count, loop, False) + + yield from fut + finally: + out_socket.close() + + @asyncio.coroutine + def _sendfile_fallback(self, request, resp, fobj, count): + # Mimic the _sendfile_system() method, but without using the + # os.sendfile() system call. This should be used on systems + # that don't support the os.sendfile(). + + # To avoid blocking the event loop & to keep memory usage low, + # fobj is transferred in chunks controlled by the + # constructor's chunk_size argument. + + yield from resp.prepare(request) + + chunk_size = self._chunk_size + + chunk = fobj.read(chunk_size) + while True: + resp.write(chunk) + yield from resp.drain() + count = count - chunk_size + if count <= 0: + break + chunk = fobj.read(count) + + if hasattr(os, "sendfile"): # pragma: no cover + _sendfile = _sendfile_system + else: # pragma: no cover + _sendfile = _sendfile_fallback + + @asyncio.coroutine + def send(self, request, filepath): + """Send filepath to client using request.""" + st = filepath.stat() + + modsince = request.if_modified_since + if modsince is not None and st.st_mtime <= modsince.timestamp(): + from .web_exceptions import HTTPNotModified + raise HTTPNotModified() + + ct, encoding = mimetypes.guess_type(str(filepath)) + if not ct: + ct = 'application/octet-stream' + + resp = self._response_factory() + resp.content_type = ct + if encoding: + resp.headers[hdrs.CONTENT_ENCODING] = encoding + resp.last_modified = st.st_mtime + + file_size = st.st_size + + resp.content_length = file_size + resp.set_tcp_cork(True) + try: + with filepath.open('rb') as f: + yield from self._sendfile(request, resp, f, file_size) + + finally: + resp.set_tcp_nodelay(True) + + return resp diff --git a/RBXLegacyDiscordBot/lib/aiohttp/hdrs.py b/RBXLegacyDiscordBot/lib/aiohttp/hdrs.py new file mode 100644 index 0000000..f994319 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/hdrs.py @@ -0,0 +1,91 @@ +"""HTTP Headers constants.""" +from multidict import istr + +METH_ANY = '*' +METH_CONNECT = 'CONNECT' +METH_HEAD = 'HEAD' +METH_GET = 'GET' +METH_DELETE = 'DELETE' +METH_OPTIONS = 'OPTIONS' +METH_PATCH = 'PATCH' +METH_POST = 'POST' +METH_PUT = 'PUT' +METH_TRACE = 'TRACE' + +METH_ALL = {METH_CONNECT, METH_HEAD, METH_GET, METH_DELETE, + METH_OPTIONS, METH_PATCH, METH_POST, METH_PUT, METH_TRACE} + + +ACCEPT = istr('ACCEPT') +ACCEPT_CHARSET = istr('ACCEPT-CHARSET') +ACCEPT_ENCODING = istr('ACCEPT-ENCODING') +ACCEPT_LANGUAGE = istr('ACCEPT-LANGUAGE') +ACCEPT_RANGES = istr('ACCEPT-RANGES') +ACCESS_CONTROL_MAX_AGE = istr('ACCESS-CONTROL-MAX-AGE') +ACCESS_CONTROL_ALLOW_CREDENTIALS = istr('ACCESS-CONTROL-ALLOW-CREDENTIALS') +ACCESS_CONTROL_ALLOW_HEADERS = istr('ACCESS-CONTROL-ALLOW-HEADERS') +ACCESS_CONTROL_ALLOW_METHODS = istr('ACCESS-CONTROL-ALLOW-METHODS') +ACCESS_CONTROL_ALLOW_ORIGIN = istr('ACCESS-CONTROL-ALLOW-ORIGIN') +ACCESS_CONTROL_EXPOSE_HEADERS = istr('ACCESS-CONTROL-EXPOSE-HEADERS') +ACCESS_CONTROL_REQUEST_HEADERS = istr('ACCESS-CONTROL-REQUEST-HEADERS') +ACCESS_CONTROL_REQUEST_METHOD = istr('ACCESS-CONTROL-REQUEST-METHOD') +AGE = istr('AGE') +ALLOW = istr('ALLOW') +AUTHORIZATION = istr('AUTHORIZATION') +CACHE_CONTROL = istr('CACHE-CONTROL') +CONNECTION = istr('CONNECTION') +CONTENT_DISPOSITION = istr('CONTENT-DISPOSITION') +CONTENT_ENCODING = istr('CONTENT-ENCODING') +CONTENT_LANGUAGE = istr('CONTENT-LANGUAGE') +CONTENT_LENGTH = istr('CONTENT-LENGTH') +CONTENT_LOCATION = istr('CONTENT-LOCATION') +CONTENT_MD5 = istr('CONTENT-MD5') +CONTENT_RANGE = istr('CONTENT-RANGE') +CONTENT_TRANSFER_ENCODING = istr('CONTENT-TRANSFER-ENCODING') +CONTENT_TYPE = istr('CONTENT-TYPE') +COOKIE = istr('COOKIE') +DATE = istr('DATE') +DESTINATION = istr('DESTINATION') +DIGEST = istr('DIGEST') +ETAG = istr('ETAG') +EXPECT = istr('EXPECT') +EXPIRES = istr('EXPIRES') +FROM = istr('FROM') +HOST = istr('HOST') +IF_MATCH = istr('IF-MATCH') +IF_MODIFIED_SINCE = istr('IF-MODIFIED-SINCE') +IF_NONE_MATCH = istr('IF-NONE-MATCH') +IF_RANGE = istr('IF-RANGE') +IF_UNMODIFIED_SINCE = istr('IF-UNMODIFIED-SINCE') +KEEP_ALIVE = istr('KEEP-ALIVE') +LAST_EVENT_ID = istr('LAST-EVENT-ID') +LAST_MODIFIED = istr('LAST-MODIFIED') +LINK = istr('LINK') +LOCATION = istr('LOCATION') +MAX_FORWARDS = istr('MAX-FORWARDS') +ORIGIN = istr('ORIGIN') +PRAGMA = istr('PRAGMA') +PROXY_AUTHENTICATE = istr('PROXY_AUTHENTICATE') +PROXY_AUTHORIZATION = istr('PROXY-AUTHORIZATION') +RANGE = istr('RANGE') +REFERER = istr('REFERER') +RETRY_AFTER = istr('RETRY-AFTER') +SEC_WEBSOCKET_ACCEPT = istr('SEC-WEBSOCKET-ACCEPT') +SEC_WEBSOCKET_VERSION = istr('SEC-WEBSOCKET-VERSION') +SEC_WEBSOCKET_PROTOCOL = istr('SEC-WEBSOCKET-PROTOCOL') +SEC_WEBSOCKET_KEY = istr('SEC-WEBSOCKET-KEY') +SEC_WEBSOCKET_KEY1 = istr('SEC-WEBSOCKET-KEY1') +SERVER = istr('SERVER') +SET_COOKIE = istr('SET-COOKIE') +TE = istr('TE') +TRAILER = istr('TRAILER') +TRANSFER_ENCODING = istr('TRANSFER-ENCODING') +UPGRADE = istr('UPGRADE') +WEBSOCKET = istr('WEBSOCKET') +URI = istr('URI') +USER_AGENT = istr('USER-AGENT') +VARY = istr('VARY') +VIA = istr('VIA') +WANT_DIGEST = istr('WANT-DIGEST') +WARNING = istr('WARNING') +WWW_AUTHENTICATE = istr('WWW-AUTHENTICATE') diff --git a/RBXLegacyDiscordBot/lib/aiohttp/helpers.py b/RBXLegacyDiscordBot/lib/aiohttp/helpers.py new file mode 100644 index 0000000..5b3f524 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/helpers.py @@ -0,0 +1,534 @@ +"""Various helper functions""" + +import asyncio +import base64 +import binascii +import datetime +import functools +import io +import os +import re +import warnings +from collections import namedtuple +from pathlib import Path +from urllib.parse import quote, urlencode + +from async_timeout import timeout +from multidict import MultiDict, MultiDictProxy + +from . import hdrs +from .errors import InvalidURL + +try: + from asyncio import ensure_future +except ImportError: + ensure_future = asyncio.async + + +__all__ = ('BasicAuth', 'create_future', 'FormData', 'parse_mimetype', + 'Timeout', 'ensure_future') + + +sentinel = object() +Timeout = timeout + + +class BasicAuth(namedtuple('BasicAuth', ['login', 'password', 'encoding'])): + """Http basic authentication helper. + + :param str login: Login + :param str password: Password + :param str encoding: (optional) encoding ('latin1' by default) + """ + + def __new__(cls, login, password='', encoding='latin1'): + if login is None: + raise ValueError('None is not allowed as login value') + + if password is None: + raise ValueError('None is not allowed as password value') + + return super().__new__(cls, login, password, encoding) + + @classmethod + def decode(cls, auth_header, encoding='latin1'): + """Create a :class:`BasicAuth` object from an ``Authorization`` HTTP + header.""" + split = auth_header.strip().split(' ') + if len(split) == 2: + if split[0].strip().lower() != 'basic': + raise ValueError('Unknown authorization method %s' % split[0]) + to_decode = split[1] + else: + raise ValueError('Could not parse authorization header.') + + try: + username, _, password = base64.b64decode( + to_decode.encode('ascii') + ).decode(encoding).partition(':') + except binascii.Error: + raise ValueError('Invalid base64 encoding.') + + return cls(username, password, encoding=encoding) + + def encode(self): + """Encode credentials.""" + creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding) + return 'Basic %s' % base64.b64encode(creds).decode(self.encoding) + + +def create_future(loop): + """Compatibility wrapper for the loop.create_future() call introduced in + 3.5.2.""" + if hasattr(loop, 'create_future'): + return loop.create_future() + else: + return asyncio.Future(loop=loop) + + +class FormData: + """Helper class for multipart/form-data and + application/x-www-form-urlencoded body generation.""" + + def __init__(self, fields=()): + from . import multipart + self._writer = multipart.MultipartWriter('form-data') + self._fields = [] + self._is_multipart = False + + if isinstance(fields, dict): + fields = list(fields.items()) + elif not isinstance(fields, (list, tuple)): + fields = (fields,) + self.add_fields(*fields) + + @property + def is_multipart(self): + return self._is_multipart + + @property + def content_type(self): + if self._is_multipart: + return self._writer.headers[hdrs.CONTENT_TYPE] + else: + return 'application/x-www-form-urlencoded' + + def add_field(self, name, value, *, content_type=None, filename=None, + content_transfer_encoding=None): + + if isinstance(value, io.IOBase): + self._is_multipart = True + elif isinstance(value, (bytes, bytearray, memoryview)): + if filename is None and content_transfer_encoding is None: + filename = name + + type_options = MultiDict({'name': name}) + if filename is not None and not isinstance(filename, str): + raise TypeError('filename must be an instance of str. ' + 'Got: %s' % filename) + if filename is None and isinstance(value, io.IOBase): + filename = guess_filename(value, name) + if filename is not None: + type_options['filename'] = filename + self._is_multipart = True + + headers = {} + if content_type is not None: + if not isinstance(content_type, str): + raise TypeError('content_type must be an instance of str. ' + 'Got: %s' % content_type) + headers[hdrs.CONTENT_TYPE] = content_type + self._is_multipart = True + if content_transfer_encoding is not None: + if not isinstance(content_transfer_encoding, str): + raise TypeError('content_transfer_encoding must be an instance' + ' of str. Got: %s' % content_transfer_encoding) + headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding + self._is_multipart = True + + self._fields.append((type_options, headers, value)) + + def add_fields(self, *fields): + to_add = list(fields) + + while to_add: + rec = to_add.pop(0) + + if isinstance(rec, io.IOBase): + k = guess_filename(rec, 'unknown') + self.add_field(k, rec) + + elif isinstance(rec, (MultiDictProxy, MultiDict)): + to_add.extend(rec.items()) + + elif isinstance(rec, (list, tuple)) and len(rec) == 2: + k, fp = rec + self.add_field(k, fp) + + else: + raise TypeError('Only io.IOBase, multidict and (name, file) ' + 'pairs allowed, use .add_field() for passing ' + 'more complex parameters') + + def _gen_form_urlencoded(self, encoding): + # form data (x-www-form-urlencoded) + data = [] + for type_options, _, value in self._fields: + data.append((type_options['name'], value)) + + data = urlencode(data, doseq=True) + return data.encode(encoding) + + def _gen_form_data(self, *args, **kwargs): + """Encode a list of fields using the multipart/form-data MIME format""" + for dispparams, headers, value in self._fields: + part = self._writer.append(value, headers) + if dispparams: + part.set_content_disposition('form-data', **dispparams) + # FIXME cgi.FieldStorage doesn't likes body parts with + # Content-Length which were sent via chunked transfer encoding + part.headers.pop(hdrs.CONTENT_LENGTH, None) + yield from self._writer.serialize() + + def __call__(self, encoding): + if self._is_multipart: + return self._gen_form_data(encoding) + else: + return self._gen_form_urlencoded(encoding) + + +def parse_mimetype(mimetype): + """Parses a MIME type into its components. + + :param str mimetype: MIME type + + :returns: 4 element tuple for MIME type, subtype, suffix and parameters + :rtype: tuple + + Example: + + >>> parse_mimetype('text/html; charset=utf-8') + ('text', 'html', '', {'charset': 'utf-8'}) + + """ + if not mimetype: + return '', '', '', {} + + parts = mimetype.split(';') + params = [] + for item in parts[1:]: + if not item: + continue + key, value = item.split('=', 1) if '=' in item else (item, '') + params.append((key.lower().strip(), value.strip(' "'))) + params = dict(params) + + fulltype = parts[0].strip().lower() + if fulltype == '*': + fulltype = '*/*' + + mtype, stype = fulltype.split('/', 1) \ + if '/' in fulltype else (fulltype, '') + stype, suffix = stype.split('+', 1) if '+' in stype else (stype, '') + + return mtype, stype, suffix, params + + +def guess_filename(obj, default=None): + name = getattr(obj, 'name', None) + if name and name[0] != '<' and name[-1] != '>': + return Path(name).name + return default + + +class AccessLogger: + """Helper object to log access. + + Usage: + log = logging.getLogger("spam") + log_format = "%a %{User-Agent}i" + access_logger = AccessLogger(log, log_format) + access_logger.log(message, environ, response, transport, time) + + Format: + %% The percent sign + %a Remote IP-address (IP-address of proxy if using reverse proxy) + %t Time when the request was started to process + %P The process ID of the child that serviced the request + %r First line of request + %s Response status code + %b Size of response in bytes, excluding HTTP headers + %O Bytes sent, including headers + %T Time taken to serve the request, in seconds + %Tf Time taken to serve the request, in seconds with floating fraction + in .06f format + %D Time taken to serve the request, in microseconds + %{FOO}i request.headers['FOO'] + %{FOO}o response.headers['FOO'] + %{FOO}e os.environ['FOO'] + + """ + + LOG_FORMAT = '%a %l %u %t "%r" %s %b "%{Referrer}i" "%{User-Agent}i"' + FORMAT_RE = re.compile(r'%(\{([A-Za-z\-]+)\}([ioe])|[atPrsbOD]|Tf?)') + CLEANUP_RE = re.compile(r'(%[^s])') + _FORMAT_CACHE = {} + + def __init__(self, logger, log_format=LOG_FORMAT): + """Initialise the logger. + + :param logger: logger object to be used for logging + :param log_format: apache compatible log format + + """ + self.logger = logger + _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format) + if not _compiled_format: + _compiled_format = self.compile_format(log_format) + AccessLogger._FORMAT_CACHE[log_format] = _compiled_format + self._log_format, self._methods = _compiled_format + + def compile_format(self, log_format): + """Translate log_format into form usable by modulo formatting + + All known atoms will be replaced with %s + Also methods for formatting of those atoms will be added to + _methods in apropriate order + + For example we have log_format = "%a %t" + This format will be translated to "%s %s" + Also contents of _methods will be + [self._format_a, self._format_t] + These method will be called and results will be passed + to translated string format. + + Each _format_* method receive 'args' which is list of arguments + given to self.log + + Exceptions are _format_e, _format_i and _format_o methods which + also receive key name (by functools.partial) + + """ + + log_format = log_format.replace("%l", "-") + log_format = log_format.replace("%u", "-") + methods = [] + + for atom in self.FORMAT_RE.findall(log_format): + if atom[1] == '': + methods.append(getattr(AccessLogger, '_format_%s' % atom[0])) + else: + m = getattr(AccessLogger, '_format_%s' % atom[2]) + methods.append(functools.partial(m, atom[1])) + log_format = self.FORMAT_RE.sub(r'%s', log_format) + log_format = self.CLEANUP_RE.sub(r'%\1', log_format) + return log_format, methods + + @staticmethod + def _format_e(key, args): + return (args[1] or {}).get(key, '-') + + @staticmethod + def _format_i(key, args): + if not args[0]: + return '(no headers)' + # suboptimal, make istr(key) once + return args[0].headers.get(key, '-') + + @staticmethod + def _format_o(key, args): + # suboptimal, make istr(key) once + return args[2].headers.get(key, '-') + + @staticmethod + def _format_a(args): + if args[3] is None: + return '-' + peername = args[3].get_extra_info('peername') + if isinstance(peername, (list, tuple)): + return peername[0] + else: + return peername + + @staticmethod + def _format_t(args): + return datetime.datetime.utcnow().strftime('[%d/%b/%Y:%H:%M:%S +0000]') + + @staticmethod + def _format_P(args): + return "<%s>" % os.getpid() + + @staticmethod + def _format_r(args): + msg = args[0] + if not msg: + return '-' + return '%s %s HTTP/%s.%s' % tuple((msg.method, + msg.path) + msg.version) + + @staticmethod + def _format_s(args): + return args[2].status + + @staticmethod + def _format_b(args): + return args[2].body_length + + @staticmethod + def _format_O(args): + return args[2].output_length + + @staticmethod + def _format_T(args): + return round(args[4]) + + @staticmethod + def _format_Tf(args): + return '%06f' % args[4] + + @staticmethod + def _format_D(args): + return round(args[4] * 1000000) + + def _format_line(self, args): + return tuple(m(args) for m in self._methods) + + def log(self, message, environ, response, transport, time): + """Log access. + + :param message: Request object. May be None. + :param environ: Environment dict. May be None. + :param response: Response object. + :param transport: Tansport object. May be None + :param float time: Time taken to serve the request. + """ + try: + self.logger.info(self._log_format % self._format_line( + [message, environ, response, transport, time])) + except Exception: + self.logger.exception("Error in logging") + + +class reify: + """Use as a class method decorator. It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + + """ + + def __init__(self, wrapped): + self.wrapped = wrapped + try: + self.__doc__ = wrapped.__doc__ + except: # pragma: no cover + self.__doc__ = "" + self.name = wrapped.__name__ + + def __get__(self, inst, owner, _sentinel=sentinel): + if inst is None: + return self + val = inst.__dict__.get(self.name, _sentinel) + if val is not _sentinel: + return val + val = self.wrapped(inst) + inst.__dict__[self.name] = val + return val + + def __set__(self, inst, value): + raise AttributeError("reified property is read-only") + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + + "0123456789-._~") + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + """ + parts = uri.split('%') + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = '%' + parts[i] + else: + parts[i] = '%' + parts[i] + return ''.join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +_ipv4_pattern = ('^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}' + '(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$') +_ipv6_pattern = ( + '^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}' + '(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)' + '((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})' + '(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}' + '(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}' + '[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)' + '(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}' + ':|:(:[A-F0-9]{1,4}){7})$') +_ipv4_regex = re.compile(_ipv4_pattern) +_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) +_ipv4_regexb = re.compile(_ipv4_pattern.encode('ascii')) +_ipv6_regexb = re.compile(_ipv6_pattern.encode('ascii'), flags=re.IGNORECASE) + + +def is_ip_address(host): + if host is None: + return False + if isinstance(host, str): + if _ipv4_regex.match(host) or _ipv6_regex.match(host): + return True + else: + return False + elif isinstance(host, (bytes, bytearray, memoryview)): + if _ipv4_regexb.match(host) or _ipv6_regexb.match(host): + return True + else: + return False + else: + raise TypeError("{} [{}] is not a str or bytes" + .format(host, type(host))) + + +def _get_kwarg(kwargs, old, new, value): + val = kwargs.pop(old, sentinel) + if val is not sentinel: + warnings.warn("{} is deprecated, use {} instead".format(old, new), + DeprecationWarning, + stacklevel=3) + return val + else: + return value diff --git a/RBXLegacyDiscordBot/lib/aiohttp/log.py b/RBXLegacyDiscordBot/lib/aiohttp/log.py new file mode 100644 index 0000000..cfda0e5 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/log.py @@ -0,0 +1,8 @@ +import logging + +access_logger = logging.getLogger('aiohttp.access') +client_logger = logging.getLogger('aiohttp.client') +internal_logger = logging.getLogger('aiohttp.internal') +server_logger = logging.getLogger('aiohttp.server') +web_logger = logging.getLogger('aiohttp.web') +ws_logger = logging.getLogger('aiohttp.websocket') diff --git a/RBXLegacyDiscordBot/lib/aiohttp/multipart.py b/RBXLegacyDiscordBot/lib/aiohttp/multipart.py new file mode 100644 index 0000000..093c856 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/multipart.py @@ -0,0 +1,973 @@ +import asyncio +import base64 +import binascii +import io +import json +import mimetypes +import os +import re +import sys +import uuid +import warnings +import zlib +from collections import Mapping, Sequence, deque +from pathlib import Path +from urllib.parse import parse_qsl, quote, unquote, urlencode + +from multidict import CIMultiDict + +from .hdrs import (CONTENT_DISPOSITION, CONTENT_ENCODING, CONTENT_LENGTH, + CONTENT_TRANSFER_ENCODING, CONTENT_TYPE) +from .helpers import parse_mimetype +from .protocol import HttpParser + +__all__ = ('MultipartReader', 'MultipartWriter', + 'BodyPartReader', 'BodyPartWriter', + 'BadContentDispositionHeader', 'BadContentDispositionParam', + 'parse_content_disposition', 'content_disposition_filename') + + +CHAR = set(chr(i) for i in range(0, 128)) +CTL = set(chr(i) for i in range(0, 32)) | {chr(127), } +SEPARATORS = {'(', ')', '<', '>', '@', ',', ';', ':', '\\', '"', '/', '[', ']', + '?', '=', '{', '}', ' ', chr(9)} +TOKEN = CHAR ^ CTL ^ SEPARATORS + +PY_35 = sys.version_info >= (3, 5) +PY_352 = sys.version_info >= (3, 5, 2) + + +class BadContentDispositionHeader(RuntimeWarning): + pass + + +class BadContentDispositionParam(RuntimeWarning): + pass + + +def parse_content_disposition(header): + def is_token(string): + return string and TOKEN >= set(string) + + def is_quoted(string): + return string[0] == string[-1] == '"' + + def is_rfc5987(string): + return is_token(string) and string.count("'") == 2 + + def is_extended_param(string): + return string.endswith('*') + + def is_continuous_param(string): + pos = string.find('*') + 1 + if not pos: + return False + substring = string[pos:-1] if string.endswith('*') else string[pos:] + return substring.isdigit() + + def unescape(text, *, chars=''.join(map(re.escape, CHAR))): + return re.sub('\\\\([{}])'.format(chars), '\\1', text) + + if not header: + return None, {} + + disptype, *parts = header.split(';') + if not is_token(disptype): + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params = {} + for item in parts: + if '=' not in item: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + key, value = item.split('=', 1) + key = key.lower().strip() + value = value.lstrip() + + if key in params: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + if not is_token(key): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_continuous_param(key): + if is_quoted(value): + value = unescape(value[1:-1]) + elif not is_token(value): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_extended_param(key): + if is_rfc5987(value): + encoding, _, value = value.split("'", 2) + encoding = encoding or 'utf-8' + else: + warnings.warn(BadContentDispositionParam(item)) + continue + + try: + value = unquote(value, encoding, 'strict') + except UnicodeDecodeError: # pragma: nocover + warnings.warn(BadContentDispositionParam(item)) + continue + + else: + if is_quoted(value): + value = unescape(value[1:-1].lstrip('\\/')) + elif not is_token(value): + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params[key] = value + + return disptype.lower(), params + + +def content_disposition_filename(params): + if not params: + return None + elif 'filename*' in params: + return params['filename*'] + elif 'filename' in params: + return params['filename'] + else: + parts = [] + fnparams = sorted((key, value) + for key, value in params.items() + if key.startswith('filename*')) + for num, (key, value) in enumerate(fnparams): + _, tail = key.split('*', 1) + if tail.endswith('*'): + tail = tail[:-1] + if tail == str(num): + parts.append(value) + else: + break + if not parts: + return None + value = ''.join(parts) + if "'" in value: + encoding, _, value = value.split("'", 2) + encoding = encoding or 'utf-8' + return unquote(value, encoding, 'strict') + return value + + +class MultipartResponseWrapper(object): + """Wrapper around the :class:`MultipartBodyReader` to take care about + underlying connection and close it when it needs in.""" + + def __init__(self, resp, stream): + self.resp = resp + self.stream = stream + + if PY_35: + def __aiter__(self): + return self + + if not PY_352: # pragma: no cover + __aiter__ = asyncio.coroutine(__aiter__) + + @asyncio.coroutine + def __anext__(self): + part = yield from self.next() + if part is None: + raise StopAsyncIteration # NOQA + return part + + def at_eof(self): + """Returns ``True`` when all response data had been read. + + :rtype: bool + """ + return self.resp.content.at_eof() + + @asyncio.coroutine + def next(self): + """Emits next multipart reader object.""" + item = yield from self.stream.next() + if self.stream.at_eof(): + yield from self.release() + return item + + @asyncio.coroutine + def release(self): + """Releases the connection gracefully, reading all the content + to the void.""" + yield from self.resp.release() + + +class BodyPartReader(object): + """Multipart reader for single body part.""" + + chunk_size = 8192 + + def __init__(self, boundary, headers, content): + self.headers = headers + self._boundary = boundary + self._content = content + self._at_eof = False + length = self.headers.get(CONTENT_LENGTH, None) + self._length = int(length) if length is not None else None + self._read_bytes = 0 + self._unread = deque() + self._prev_chunk = None + self._content_eof = 0 + + if PY_35: + def __aiter__(self): + return self + + if not PY_352: # pragma: no cover + __aiter__ = asyncio.coroutine(__aiter__) + + @asyncio.coroutine + def __anext__(self): + part = yield from self.next() + if part is None: + raise StopAsyncIteration # NOQA + return part + + @asyncio.coroutine + def next(self): + item = yield from self.read() + if not item: + return None + return item + + @asyncio.coroutine + def read(self, *, decode=False): + """Reads body part data. + + :param bool decode: Decodes data following by encoding + method from `Content-Encoding` header. If it missed + data remains untouched + + :rtype: bytearray + """ + if self._at_eof: + return b'' + data = bytearray() + if self._length is None: + while not self._at_eof: + data.extend((yield from self.readline())) + else: + while not self._at_eof: + data.extend((yield from self.read_chunk(self.chunk_size))) + if decode: + return self.decode(data) + return data + + @asyncio.coroutine + def read_chunk(self, size=chunk_size): + """Reads body part content chunk of the specified size. + + :param int size: chunk size + + :rtype: bytearray + """ + if self._at_eof: + return b'' + if self._length: + chunk = yield from self._read_chunk_from_length(size) + else: + chunk = yield from self._read_chunk_from_stream(size) + + self._read_bytes += len(chunk) + if self._read_bytes == self._length: + self._at_eof = True + if self._at_eof: + assert b'\r\n' == (yield from self._content.readline()), \ + 'reader did not read all the data or it is malformed' + return chunk + + @asyncio.coroutine + def _read_chunk_from_length(self, size): + """Reads body part content chunk of the specified size. + The body part must has `Content-Length` header with proper value. + + :param int size: chunk size + + :rtype: bytearray + """ + assert self._length is not None, \ + 'Content-Length required for chunked read' + chunk_size = min(size, self._length - self._read_bytes) + chunk = yield from self._content.read(chunk_size) + return chunk + + @asyncio.coroutine + def _read_chunk_from_stream(self, size): + """Reads content chunk of body part with unknown length. + The `Content-Length` header for body part is not necessary. + + :param int size: chunk size + + :rtype: bytearray + """ + assert size >= len(self._boundary) + 2, \ + 'Chunk size must be greater or equal than boundary length + 2' + first_chunk = self._prev_chunk is None + if first_chunk: + self._prev_chunk = yield from self._content.read(size) + + chunk = yield from self._content.read(size) + self._content_eof += int(self._content.at_eof()) + assert self._content_eof < 3, "Reading after EOF" + window = self._prev_chunk + chunk + sub = b'\r\n' + self._boundary + if first_chunk: + idx = window.find(sub) + else: + idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) + if idx >= 0: + # pushing boundary back to content + self._content.unread_data(window[idx:]) + if size > idx: + self._prev_chunk = self._prev_chunk[:idx] + chunk = window[len(self._prev_chunk):idx] + if not chunk: + self._at_eof = True + if 0 < len(chunk) < len(sub) and not self._content_eof: + self._prev_chunk += chunk + self._at_eof = False + return b'' + result = self._prev_chunk + self._prev_chunk = chunk + return result + + @asyncio.coroutine + def readline(self): + """Reads body part by line by line. + + :rtype: bytearray + """ + if self._at_eof: + return b'' + + if self._unread: + line = self._unread.popleft() + else: + line = yield from self._content.readline() + + if line.startswith(self._boundary): + # the very last boundary may not come with \r\n, + # so set single rules for everyone + sline = line.rstrip(b'\r\n') + boundary = self._boundary + last_boundary = self._boundary + b'--' + # ensure that we read exactly the boundary, not something alike + if sline == boundary or sline == last_boundary: + self._at_eof = True + self._unread.append(line) + return b'' + else: + next_line = yield from self._content.readline() + if next_line.startswith(self._boundary): + line = line[:-2] # strip CRLF but only once + self._unread.append(next_line) + + return line + + @asyncio.coroutine + def release(self): + """Like :meth:`read`, but reads all the data to the void. + + :rtype: None + """ + if self._at_eof: + return + if self._length is None: + while not self._at_eof: + yield from self.readline() + else: + while not self._at_eof: + yield from self.read_chunk(self.chunk_size) + + @asyncio.coroutine + def text(self, *, encoding=None): + """Like :meth:`read`, but assumes that body part contains text data. + + :param str encoding: Custom text encoding. Overrides specified + in charset param of `Content-Type` header + + :rtype: str + """ + data = yield from self.read(decode=True) + encoding = encoding or self.get_charset(default='latin1') + return data.decode(encoding) + + @asyncio.coroutine + def json(self, *, encoding=None): + """Like :meth:`read`, but assumes that body parts contains JSON data. + + :param str encoding: Custom JSON encoding. Overrides specified + in charset param of `Content-Type` header + """ + data = yield from self.read(decode=True) + if not data: + return None + encoding = encoding or self.get_charset(default='utf-8') + return json.loads(data.decode(encoding)) + + @asyncio.coroutine + def form(self, *, encoding=None): + """Like :meth:`read`, but assumes that body parts contains form + urlencoded data. + + :param str encoding: Custom form encoding. Overrides specified + in charset param of `Content-Type` header + """ + data = yield from self.read(decode=True) + if not data: + return None + encoding = encoding or self.get_charset(default='utf-8') + return parse_qsl(data.rstrip().decode(encoding), encoding=encoding) + + def at_eof(self): + """Returns ``True`` if the boundary was reached or + ``False`` otherwise. + + :rtype: bool + """ + return self._at_eof + + def decode(self, data): + """Decodes data according the specified `Content-Encoding` + or `Content-Transfer-Encoding` headers value. + + Supports ``gzip``, ``deflate`` and ``identity`` encodings for + `Content-Encoding` header. + + Supports ``base64``, ``quoted-printable``, ``binary`` encodings for + `Content-Transfer-Encoding` header. + + :param bytearray data: Data to decode. + + :raises: :exc:`RuntimeError` - if encoding is unknown. + + :rtype: bytes + """ + if CONTENT_TRANSFER_ENCODING in self.headers: + data = self._decode_content_transfer(data) + if CONTENT_ENCODING in self.headers: + return self._decode_content(data) + return data + + def _decode_content(self, data): + encoding = self.headers[CONTENT_ENCODING].lower() + + if encoding == 'deflate': + return zlib.decompress(data, -zlib.MAX_WBITS) + elif encoding == 'gzip': + return zlib.decompress(data, 16 + zlib.MAX_WBITS) + elif encoding == 'identity': + return data + else: + raise RuntimeError('unknown content encoding: {}'.format(encoding)) + + def _decode_content_transfer(self, data): + encoding = self.headers[CONTENT_TRANSFER_ENCODING].lower() + + if encoding == 'base64': + return base64.b64decode(data) + elif encoding == 'quoted-printable': + return binascii.a2b_qp(data) + elif encoding == 'binary': + return data + else: + raise RuntimeError('unknown content transfer encoding: {}' + ''.format(encoding)) + + def get_charset(self, default=None): + """Returns charset parameter from ``Content-Type`` header or default. + """ + ctype = self.headers.get(CONTENT_TYPE, '') + *_, params = parse_mimetype(ctype) + return params.get('charset', default) + + @property + def filename(self): + """Returns filename specified in Content-Disposition header or ``None`` + if missed or header is malformed.""" + _, params = parse_content_disposition( + self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params) + + +class MultipartReader(object): + """Multipart body reader.""" + + #: Response wrapper, used when multipart readers constructs from response. + response_wrapper_cls = MultipartResponseWrapper + #: Multipart reader class, used to handle multipart/* body parts. + #: None points to type(self) + multipart_reader_cls = None + #: Body part reader class for non multipart/* content types. + part_reader_cls = BodyPartReader + + def __init__(self, headers, content): + self.headers = headers + self._boundary = ('--' + self._get_boundary()).encode() + self._content = content + self._last_part = None + self._at_eof = False + self._at_bof = True + self._unread = [] + + if PY_35: + def __aiter__(self): + return self + + if not PY_352: # pragma: no cover + __aiter__ = asyncio.coroutine(__aiter__) + + @asyncio.coroutine + def __anext__(self): + part = yield from self.next() + if part is None: + raise StopAsyncIteration # NOQA + return part + + @classmethod + def from_response(cls, response): + """Constructs reader instance from HTTP response. + + :param response: :class:`~aiohttp.client.ClientResponse` instance + """ + obj = cls.response_wrapper_cls(response, cls(response.headers, + response.content)) + return obj + + def at_eof(self): + """Returns ``True`` if the final boundary was reached or + ``False`` otherwise. + + :rtype: bool + """ + return self._at_eof + + @asyncio.coroutine + def next(self): + """Emits the next multipart body part.""" + # So, if we're at BOF, we need to skip till the boundary. + if self._at_eof: + return + yield from self._maybe_release_last_part() + if self._at_bof: + yield from self._read_until_first_boundary() + self._at_bof = False + else: + yield from self._read_boundary() + if self._at_eof: # we just read the last boundary, nothing to do there + return + self._last_part = yield from self.fetch_next_part() + return self._last_part + + @asyncio.coroutine + def release(self): + """Reads all the body parts to the void till the final boundary.""" + while not self._at_eof: + item = yield from self.next() + if item is None: + break + yield from item.release() + + @asyncio.coroutine + def fetch_next_part(self): + """Returns the next body part reader.""" + headers = yield from self._read_headers() + return self._get_part_reader(headers) + + def _get_part_reader(self, headers): + """Dispatches the response by the `Content-Type` header, returning + suitable reader instance. + + :param dict headers: Response headers + """ + ctype = headers.get(CONTENT_TYPE, '') + mtype, *_ = parse_mimetype(ctype) + if mtype == 'multipart': + if self.multipart_reader_cls is None: + return type(self)(headers, self._content) + return self.multipart_reader_cls(headers, self._content) + else: + return self.part_reader_cls(self._boundary, headers, self._content) + + def _get_boundary(self): + mtype, *_, params = parse_mimetype(self.headers[CONTENT_TYPE]) + + assert mtype == 'multipart', 'multipart/* content type expected' + + if 'boundary' not in params: + raise ValueError('boundary missed for Content-Type: %s' + % self.headers[CONTENT_TYPE]) + + boundary = params['boundary'] + if len(boundary) > 70: + raise ValueError('boundary %r is too long (70 chars max)' + % boundary) + + return boundary + + @asyncio.coroutine + def _readline(self): + if self._unread: + return self._unread.pop() + return (yield from self._content.readline()) + + @asyncio.coroutine + def _read_until_first_boundary(self): + while True: + chunk = yield from self._readline() + if chunk == b'': + raise ValueError("Could not find starting boundary %r" + % (self._boundary)) + chunk = chunk.rstrip() + if chunk == self._boundary: + return + elif chunk == self._boundary + b'--': + self._at_eof = True + return + + @asyncio.coroutine + def _read_boundary(self): + chunk = (yield from self._readline()).rstrip() + if chunk == self._boundary: + pass + elif chunk == self._boundary + b'--': + self._at_eof = True + else: + raise ValueError('Invalid boundary %r, expected %r' + % (chunk, self._boundary)) + + @asyncio.coroutine + def _read_headers(self): + lines = [b''] + while True: + chunk = yield from self._content.readline() + chunk = chunk.strip() + lines.append(chunk) + if not chunk: + break + parser = HttpParser() + headers, *_ = parser.parse_headers(lines) + return headers + + @asyncio.coroutine + def _maybe_release_last_part(self): + """Ensures that the last read body part is read completely.""" + if self._last_part is not None: + if not self._last_part.at_eof(): + yield from self._last_part.release() + self._unread.extend(self._last_part._unread) + self._last_part = None + + +class BodyPartWriter(object): + """Multipart writer for single body part.""" + + def __init__(self, obj, headers=None, *, chunk_size=8192): + if headers is None: + headers = CIMultiDict() + elif not isinstance(headers, CIMultiDict): + headers = CIMultiDict(headers) + + self.obj = obj + self.headers = headers + self._chunk_size = chunk_size + self._fill_headers_with_defaults() + + self._serialize_map = { + bytes: self._serialize_bytes, + str: self._serialize_str, + io.IOBase: self._serialize_io, + MultipartWriter: self._serialize_multipart, + ('application', 'json'): self._serialize_json, + ('application', 'x-www-form-urlencoded'): self._serialize_form + } + + def _fill_headers_with_defaults(self): + if CONTENT_TYPE not in self.headers: + content_type = self._guess_content_type(self.obj) + if content_type is not None: + self.headers[CONTENT_TYPE] = content_type + + if CONTENT_LENGTH not in self.headers: + content_length = self._guess_content_length(self.obj) + if content_length is not None: + self.headers[CONTENT_LENGTH] = str(content_length) + + if CONTENT_DISPOSITION not in self.headers: + filename = self._guess_filename(self.obj) + if filename is not None: + self.set_content_disposition('attachment', filename=filename) + + def _guess_content_length(self, obj): + if isinstance(obj, bytes): + return len(obj) + elif isinstance(obj, str): + *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE)) + charset = params.get('charset', 'us-ascii') + return len(obj.encode(charset)) + elif isinstance(obj, io.StringIO): + *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE)) + charset = params.get('charset', 'us-ascii') + return len(obj.getvalue().encode(charset)) - obj.tell() + elif isinstance(obj, io.BytesIO): + return len(obj.getvalue()) - obj.tell() + elif isinstance(obj, io.IOBase): + try: + return os.fstat(obj.fileno()).st_size - obj.tell() + except (AttributeError, OSError): + return None + else: + return None + + def _guess_content_type(self, obj, default='application/octet-stream'): + if hasattr(obj, 'name'): + name = getattr(obj, 'name') + return mimetypes.guess_type(name)[0] + elif isinstance(obj, (str, io.StringIO)): + return 'text/plain; charset=utf-8' + else: + return default + + def _guess_filename(self, obj): + if isinstance(obj, io.IOBase): + name = getattr(obj, 'name', None) + if name is not None: + return Path(name).name + + def serialize(self): + """Yields byte chunks for body part.""" + + has_encoding = ( + CONTENT_ENCODING in self.headers and + self.headers[CONTENT_ENCODING] != 'identity' or + CONTENT_TRANSFER_ENCODING in self.headers + ) + if has_encoding: + # since we're following streaming approach which doesn't assumes + # any intermediate buffers, we cannot calculate real content length + # with the specified content encoding scheme. So, instead of lying + # about content length and cause reading issues, we have to strip + # this information. + self.headers.pop(CONTENT_LENGTH, None) + + if self.headers: + yield b'\r\n'.join( + b': '.join(map(lambda i: i.encode('latin1'), item)) + for item in self.headers.items() + ) + yield b'\r\n\r\n' + yield from self._maybe_encode_stream(self._serialize_obj()) + yield b'\r\n' + + def _serialize_obj(self): + obj = self.obj + mtype, stype, *_ = parse_mimetype(self.headers.get(CONTENT_TYPE)) + serializer = self._serialize_map.get((mtype, stype)) + if serializer is not None: + return serializer(obj) + + for key in self._serialize_map: + if not isinstance(key, tuple) and isinstance(obj, key): + return self._serialize_map[key](obj) + return self._serialize_default(obj) + + def _serialize_bytes(self, obj): + yield obj + + def _serialize_str(self, obj): + *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE)) + yield obj.encode(params.get('charset', 'us-ascii')) + + def _serialize_io(self, obj): + while True: + chunk = obj.read(self._chunk_size) + if not chunk: + break + if isinstance(chunk, str): + yield from self._serialize_str(chunk) + else: + yield from self._serialize_bytes(chunk) + + def _serialize_multipart(self, obj): + yield from obj.serialize() + + def _serialize_json(self, obj): + *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE)) + yield json.dumps(obj).encode(params.get('charset', 'utf-8')) + + def _serialize_form(self, obj): + if isinstance(obj, Mapping): + obj = list(obj.items()) + return self._serialize_str(urlencode(obj, doseq=True)) + + def _serialize_default(self, obj): + raise TypeError('unknown body part type %r' % type(obj)) + + def _maybe_encode_stream(self, stream): + if CONTENT_ENCODING in self.headers: + stream = self._apply_content_encoding(stream) + if CONTENT_TRANSFER_ENCODING in self.headers: + stream = self._apply_content_transfer_encoding(stream) + yield from stream + + def _apply_content_encoding(self, stream): + encoding = self.headers[CONTENT_ENCODING].lower() + if encoding == 'identity': + yield from stream + elif encoding in ('deflate', 'gzip'): + if encoding == 'gzip': + zlib_mode = 16 + zlib.MAX_WBITS + else: + zlib_mode = -zlib.MAX_WBITS + zcomp = zlib.compressobj(wbits=zlib_mode) + for chunk in stream: + yield zcomp.compress(chunk) + else: + yield zcomp.flush() + else: + raise RuntimeError('unknown content encoding: {}' + ''.format(encoding)) + + def _apply_content_transfer_encoding(self, stream): + encoding = self.headers[CONTENT_TRANSFER_ENCODING].lower() + if encoding == 'base64': + buffer = bytearray() + while True: + if buffer: + div, mod = divmod(len(buffer), 3) + chunk, buffer = buffer[:div * 3], buffer[div * 3:] + if chunk: + yield base64.b64encode(chunk) + chunk = next(stream, None) + if not chunk: + if buffer: + yield base64.b64encode(buffer[:]) + return + buffer.extend(chunk) + elif encoding == 'quoted-printable': + for chunk in stream: + yield binascii.b2a_qp(chunk) + elif encoding == 'binary': + yield from stream + else: + raise RuntimeError('unknown content transfer encoding: {}' + ''.format(encoding)) + + def set_content_disposition(self, disptype, **params): + """Sets ``Content-Disposition`` header. + + :param str disptype: Disposition type: inline, attachment, form-data. + Should be valid extension token (see RFC 2183) + :param dict params: Disposition params + """ + if not disptype or not (TOKEN > set(disptype)): + raise ValueError('bad content disposition type {!r}' + ''.format(disptype)) + value = disptype + if params: + lparams = [] + for key, val in params.items(): + if not key or not (TOKEN > set(key)): + raise ValueError('bad content disposition parameter' + ' {!r}={!r}'.format(key, val)) + qval = quote(val, '') + lparams.append((key, '"%s"' % qval)) + if key == 'filename': + lparams.append(('filename*', "utf-8''" + qval)) + sparams = '; '.join('='.join(pair) for pair in lparams) + value = '; '.join((value, sparams)) + self.headers[CONTENT_DISPOSITION] = value + + @property + def filename(self): + """Returns filename specified in Content-Disposition header or ``None`` + if missed.""" + _, params = parse_content_disposition( + self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params) + + +class MultipartWriter(object): + """Multipart body writer.""" + + #: Body part reader class for non multipart/* content types. + part_writer_cls = BodyPartWriter + + def __init__(self, subtype='mixed', boundary=None): + boundary = boundary if boundary is not None else uuid.uuid4().hex + try: + boundary.encode('us-ascii') + except UnicodeEncodeError: + raise ValueError('boundary should contains ASCII only chars') + self.headers = CIMultiDict() + self.headers[CONTENT_TYPE] = 'multipart/{}; boundary="{}"'.format( + subtype, boundary + ) + self.parts = [] + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + pass + + def __iter__(self): + return iter(self.parts) + + def __len__(self): + return len(self.parts) + + @property + def boundary(self): + *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE)) + return params['boundary'].encode('us-ascii') + + def append(self, obj, headers=None): + """Adds a new body part to multipart writer.""" + if isinstance(obj, self.part_writer_cls): + if headers: + obj.headers.update(headers) + self.parts.append(obj) + else: + if not headers: + headers = CIMultiDict() + self.parts.append(self.part_writer_cls(obj, headers)) + return self.parts[-1] + + def append_json(self, obj, headers=None): + """Helper to append JSON part.""" + if not headers: + headers = CIMultiDict() + headers[CONTENT_TYPE] = 'application/json' + return self.append(obj, headers) + + def append_form(self, obj, headers=None): + """Helper to append form urlencoded part.""" + if not headers: + headers = CIMultiDict() + headers[CONTENT_TYPE] = 'application/x-www-form-urlencoded' + assert isinstance(obj, (Sequence, Mapping)) + return self.append(obj, headers) + + def serialize(self): + """Yields multipart byte chunks.""" + if not self.parts: + yield b'' + return + + for part in self.parts: + yield b'--' + self.boundary + b'\r\n' + yield from part.serialize() + else: + yield b'--' + self.boundary + b'--\r\n' + + yield b'' diff --git a/RBXLegacyDiscordBot/lib/aiohttp/parsers.py b/RBXLegacyDiscordBot/lib/aiohttp/parsers.py new file mode 100644 index 0000000..3168cda --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/parsers.py @@ -0,0 +1,495 @@ +"""Parser is a generator function (NOT coroutine). + +Parser receives data with generator's send() method and sends data to +destination DataQueue. Parser receives ParserBuffer and DataQueue objects +as a parameters of the parser call, all subsequent send() calls should +send bytes objects. Parser sends parsed `term` to destination buffer with +DataQueue.feed_data() method. DataQueue object should implement two methods. +feed_data() - parser uses this method to send parsed protocol data. +feed_eof() - parser uses this method for indication of end of parsing stream. +To indicate end of incoming data stream EofStream exception should be sent +into parser. Parser could throw exceptions. + +There are three stages: + + * Data flow chain: + + 1. Application creates StreamParser object for storing incoming data. + 2. StreamParser creates ParserBuffer as internal data buffer. + 3. Application create parser and set it into stream buffer: + + parser = HttpRequestParser() + data_queue = stream.set_parser(parser) + + 3. At this stage StreamParser creates DataQueue object and passes it + and internal buffer into parser as an arguments. + + def set_parser(self, parser): + output = DataQueue() + self.p = parser(output, self._input) + return output + + 4. Application waits data on output.read() + + while True: + msg = yield from output.read() + ... + + * Data flow: + + 1. asyncio's transport reads data from socket and sends data to protocol + with data_received() call. + 2. Protocol sends data to StreamParser with feed_data() call. + 3. StreamParser sends data into parser with generator's send() method. + 4. Parser processes incoming data and sends parsed data + to DataQueue with feed_data() + 5. Application received parsed data from DataQueue.read() + + * Eof: + + 1. StreamParser receives eof with feed_eof() call. + 2. StreamParser throws EofStream exception into parser. + 3. Then it unsets parser. + +_SocketSocketTransport -> + -> "protocol" -> StreamParser -> "parser" -> DataQueue <- "application" + +""" + +import asyncio +import asyncio.streams +import inspect +import socket + +from . import errors +from .streams import EofStream, FlowControlDataQueue + +__all__ = ('EofStream', 'StreamParser', 'StreamProtocol', + 'ParserBuffer', 'StreamWriter') + +DEFAULT_LIMIT = 2 ** 16 + +if hasattr(socket, 'TCP_CORK'): # pragma: no cover + CORK = socket.TCP_CORK +elif hasattr(socket, 'TCP_NOPUSH'): # pragma: no cover + CORK = socket.TCP_NOPUSH +else: # pragma: no cover + CORK = None + + +class StreamParser: + """StreamParser manages incoming bytes stream and protocol parsers. + + StreamParser uses ParserBuffer as internal buffer. + + set_parser() sets current parser, it creates DataQueue object + and sends ParserBuffer and DataQueue into parser generator. + + unset_parser() sends EofStream into parser and then removes it. + """ + + def __init__(self, *, loop=None, buf=None, + limit=DEFAULT_LIMIT, eof_exc_class=RuntimeError, **kwargs): + self._loop = loop + self._eof = False + self._exception = None + self._parser = None + self._output = None + self._limit = limit + self._eof_exc_class = eof_exc_class + self._buffer = buf if buf is not None else ParserBuffer() + + self.paused = False + self.transport = None + + @property + def output(self): + return self._output + + def set_transport(self, transport): + assert transport is None or self.transport is None, \ + 'Transport already set' + self.transport = transport + + def at_eof(self): + return self._eof + + def exception(self): + return self._exception + + def set_exception(self, exc): + if isinstance(exc, ConnectionError): + exc, old_exc = self._eof_exc_class(), exc + exc.__cause__ = old_exc + exc.__context__ = old_exc + + self._exception = exc + + if self._output is not None: + self._output.set_exception(exc) + self._output = None + self._parser = None + + def feed_data(self, data): + """send data to current parser or store in buffer.""" + if data is None: + return + + if self._parser: + try: + self._parser.send(data) + except StopIteration: + self._output.feed_eof() + self._output = None + self._parser = None + except Exception as exc: + self._output.set_exception(exc) + self._output = None + self._parser = None + else: + self._buffer.feed_data(data) + + def feed_eof(self): + """send eof to all parsers, recursively.""" + if self._parser: + try: + if self._buffer: + self._parser.send(b'') + self._parser.throw(EofStream()) + except StopIteration: + self._output.feed_eof() + except EofStream: + self._output.set_exception(self._eof_exc_class()) + except Exception as exc: + self._output.set_exception(exc) + + self._parser = None + self._output = None + + self._eof = True + + def set_parser(self, parser, output=None): + """set parser to stream. return parser's DataQueue.""" + if self._parser: + self.unset_parser() + + if output is None: + output = FlowControlDataQueue( + self, limit=self._limit, loop=self._loop) + + if self._exception: + output.set_exception(self._exception) + return output + + # init parser + p = parser(output, self._buffer) + assert inspect.isgenerator(p), 'Generator is required' + + try: + # initialize parser with data and parser buffers + next(p) + except StopIteration: + pass + except Exception as exc: + output.set_exception(exc) + else: + # parser still require more data + self._parser = p + self._output = output + + if self._eof: + self.unset_parser() + + return output + + def unset_parser(self): + """unset parser, send eof to the parser and then remove it.""" + if self._parser is None: + return + + # TODO: write test + if self._loop.is_closed(): + # TODO: log something + return + + try: + self._parser.throw(EofStream()) + except StopIteration: + self._output.feed_eof() + except EofStream: + self._output.set_exception(self._eof_exc_class()) + except Exception as exc: + self._output.set_exception(exc) + finally: + self._output = None + self._parser = None + + +class StreamWriter(asyncio.streams.StreamWriter): + + def __init__(self, transport, protocol, reader, loop): + self._transport = transport + self._protocol = protocol + self._reader = reader + self._loop = loop + self._tcp_nodelay = False + self._tcp_cork = False + self._socket = transport.get_extra_info('socket') + + @property + def tcp_nodelay(self): + return self._tcp_nodelay + + def set_tcp_nodelay(self, value): + value = bool(value) + if self._tcp_nodelay == value: + return + self._tcp_nodelay = value + if self._socket is None: + return + if self._socket.family not in (socket.AF_INET, socket.AF_INET6): + return + if self._tcp_cork: + self._tcp_cork = False + if CORK is not None: # pragma: no branch + self._socket.setsockopt(socket.IPPROTO_TCP, CORK, False) + self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value) + + @property + def tcp_cork(self): + return self._tcp_cork + + def set_tcp_cork(self, value): + value = bool(value) + if self._tcp_cork == value: + return + self._tcp_cork = value + if self._socket is None: + return + if self._socket.family not in (socket.AF_INET, socket.AF_INET6): + return + if self._tcp_nodelay: + self._socket.setsockopt(socket.IPPROTO_TCP, + socket.TCP_NODELAY, + False) + self._tcp_nodelay = False + if CORK is not None: # pragma: no branch + self._socket.setsockopt(socket.IPPROTO_TCP, CORK, value) + + +class StreamProtocol(asyncio.streams.FlowControlMixin, asyncio.Protocol): + """Helper class to adapt between Protocol and StreamReader.""" + + def __init__(self, *, loop=None, disconnect_error=RuntimeError, **kwargs): + super().__init__(loop=loop) + + self.transport = None + self.writer = None + self.reader = StreamParser( + loop=loop, eof_exc_class=disconnect_error, **kwargs) + + def is_connected(self): + return self.transport is not None + + def connection_made(self, transport): + self.transport = transport + self.reader.set_transport(transport) + self.writer = StreamWriter(transport, self, self.reader, self._loop) + + def connection_lost(self, exc): + self.transport = self.writer = None + self.reader.set_transport(None) + + if exc is None: + self.reader.feed_eof() + else: + self.reader.set_exception(exc) + + super().connection_lost(exc) + + def data_received(self, data): + self.reader.feed_data(data) + + def eof_received(self): + self.reader.feed_eof() + + +class _ParserBufferHelper: + + __slots__ = ('exception', 'data') + + def __init__(self, exception, data): + self.exception = exception + self.data = data + + +class ParserBuffer: + """ParserBuffer is NOT a bytearray extension anymore. + + ParserBuffer provides helper methods for parsers. + """ + __slots__ = ('_helper', '_writer', '_data') + + def __init__(self, *args): + self._data = bytearray(*args) + self._helper = _ParserBufferHelper(None, self._data) + self._writer = self._feed_data(self._helper) + next(self._writer) + + def exception(self): + return self._helper.exception + + def set_exception(self, exc): + self._helper.exception = exc + + @staticmethod + def _feed_data(helper): + while True: + chunk = yield + if chunk: + helper.data.extend(chunk) + + if helper.exception: + raise helper.exception + + def feed_data(self, data): + if not self._helper.exception: + self._writer.send(data) + + def read(self, size): + """read() reads specified amount of bytes.""" + + while True: + if self._helper.exception: + raise self._helper.exception + + if len(self._data) >= size: + data = self._data[:size] + del self._data[:size] + return data + + self._writer.send((yield)) + + def readsome(self, size=None): + """reads size of less amount of bytes.""" + + while True: + if self._helper.exception: + raise self._helper.exception + + length = len(self._data) + if length > 0: + if size is None or length < size: + size = length + + data = self._data[:size] + del self._data[:size] + return data + + self._writer.send((yield)) + + def readuntil(self, stop, limit=None): + assert isinstance(stop, bytes) and stop, \ + 'bytes is required: {!r}'.format(stop) + + stop_len = len(stop) + + while True: + if self._helper.exception: + raise self._helper.exception + + pos = self._data.find(stop) + if pos >= 0: + end = pos + stop_len + size = end + if limit is not None and size > limit: + raise errors.LineLimitExceededParserError( + 'Line is too long.', limit) + + data = self._data[:size] + del self._data[:size] + return data + else: + if limit is not None and len(self._data) > limit: + raise errors.LineLimitExceededParserError( + 'Line is too long.', limit) + + self._writer.send((yield)) + + def wait(self, size): + """wait() waits for specified amount of bytes + then returns data without changing internal buffer.""" + + while True: + if self._helper.exception: + raise self._helper.exception + + if len(self._data) >= size: + return self._data[:size] + + self._writer.send((yield)) + + def waituntil(self, stop, limit=None): + """waituntil() reads until `stop` bytes sequence.""" + assert isinstance(stop, bytes) and stop, \ + 'bytes is required: {!r}'.format(stop) + + stop_len = len(stop) + + while True: + if self._helper.exception: + raise self._helper.exception + + pos = self._data.find(stop) + if pos >= 0: + size = pos + stop_len + if limit is not None and size > limit: + raise errors.LineLimitExceededParserError( + 'Line is too long. %s' % bytes(self._data), limit) + + return self._data[:size] + else: + if limit is not None and len(self._data) > limit: + raise errors.LineLimitExceededParserError( + 'Line is too long. %s' % bytes(self._data), limit) + + self._writer.send((yield)) + + def skip(self, size): + """skip() skips specified amount of bytes.""" + + while len(self._data) < size: + if self._helper.exception: + raise self._helper.exception + + self._writer.send((yield)) + + del self._data[:size] + + def skipuntil(self, stop): + """skipuntil() reads until `stop` bytes sequence.""" + assert isinstance(stop, bytes) and stop, \ + 'bytes is required: {!r}'.format(stop) + + stop_len = len(stop) + + while True: + if self._helper.exception: + raise self._helper.exception + + stop_line = self._data.find(stop) + if stop_line >= 0: + size = stop_line + stop_len + del self._data[:size] + return + + self._writer.send((yield)) + + def extend(self, data): + self._data.extend(data) + + def __len__(self): + return len(self._data) + + def __bytes__(self): + return bytes(self._data) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/protocol.py b/RBXLegacyDiscordBot/lib/aiohttp/protocol.py new file mode 100644 index 0000000..457f0cf --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/protocol.py @@ -0,0 +1,916 @@ +"""Http related parsers and protocol.""" + +import collections +import functools +import http.server +import re +import string +import sys +import zlib +from abc import ABC, abstractmethod +from wsgiref.handlers import format_date_time + +from multidict import CIMultiDict, istr + +import aiohttp + +from . import errors, hdrs +from .helpers import reify +from .log import internal_logger + +__all__ = ('HttpMessage', 'Request', 'Response', + 'HttpVersion', 'HttpVersion10', 'HttpVersion11', + 'RawRequestMessage', 'RawResponseMessage', + 'HttpPrefixParser', 'HttpRequestParser', 'HttpResponseParser', + 'HttpPayloadParser') + +ASCIISET = set(string.printable) +METHRE = re.compile('[A-Z0-9$-_.]+') +VERSRE = re.compile('HTTP/(\d+).(\d+)') +HDRRE = re.compile(b'[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]') +EOF_MARKER = object() +EOL_MARKER = object() +STATUS_LINE_READY = object() + +RESPONSES = http.server.BaseHTTPRequestHandler.responses + +HttpVersion = collections.namedtuple( + 'HttpVersion', ['major', 'minor']) +HttpVersion10 = HttpVersion(1, 0) +HttpVersion11 = HttpVersion(1, 1) + +RawStatusLineMessage = collections.namedtuple( + 'RawStatusLineMessage', ['method', 'path', 'version']) + +RawRequestMessage = collections.namedtuple( + 'RawRequestMessage', + ['method', 'path', 'version', 'headers', 'raw_headers', + 'should_close', 'compression']) + + +RawResponseMessage = collections.namedtuple( + 'RawResponseMessage', + ['version', 'code', 'reason', 'headers', 'raw_headers', + 'should_close', 'compression']) + + +class HttpParser: + + def __init__(self, max_line_size=8190, max_headers=32768, + max_field_size=8190): + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + + def parse_headers(self, lines): + """Parses RFC 5322 headers from a stream. + + Line continuations are supported. Returns list of header name + and value pairs. Header name is in upper case. + """ + close_conn = None + encoding = None + headers = CIMultiDict() + raw_headers = [] + + lines_idx = 1 + line = lines[1] + + while line: + header_length = len(line) + + # Parse initial header name : value pair. + try: + bname, bvalue = line.split(b':', 1) + except ValueError: + raise errors.InvalidHeader(line) from None + + bname = bname.strip(b' \t').upper() + if HDRRE.search(bname): + raise errors.InvalidHeader(bname) + + # next line + lines_idx += 1 + line = lines[lines_idx] + + # consume continuation lines + continuation = line and line[0] in (32, 9) # (' ', '\t') + + if continuation: + bvalue = [bvalue] + while continuation: + header_length += len(line) + if header_length > self.max_field_size: + raise errors.LineTooLong( + 'limit request headers fields size') + bvalue.append(line) + + # next line + lines_idx += 1 + line = lines[lines_idx] + continuation = line[0] in (32, 9) # (' ', '\t') + bvalue = b'\r\n'.join(bvalue) + else: + if header_length > self.max_field_size: + raise errors.LineTooLong( + 'limit request headers fields size') + + bvalue = bvalue.strip() + + name = istr(bname.decode('utf-8', 'surrogateescape')) + value = bvalue.decode('utf-8', 'surrogateescape') + + # keep-alive and encoding + if name == hdrs.CONNECTION: + v = value.lower() + if v == 'close': + close_conn = True + elif v == 'keep-alive': + close_conn = False + elif name == hdrs.CONTENT_ENCODING: + enc = value.lower() + if enc in ('gzip', 'deflate'): + encoding = enc + + headers.add(name, value) + raw_headers.append((bname, bvalue)) + + return headers, raw_headers, close_conn, encoding + + +class HttpPrefixParser: + """Waits for 'HTTP' prefix (non destructive)""" + + def __init__(self, allowed_methods=()): + self.allowed_methods = [m.upper() for m in allowed_methods] + + def __call__(self, out, buf): + raw_data = yield from buf.waituntil(b' ', 12) + method = raw_data.decode('ascii', 'surrogateescape').strip() + + # method + method = method.upper() + if not METHRE.match(method): + raise errors.BadStatusLine(method) + + # allowed method + if self.allowed_methods and method not in self.allowed_methods: + raise errors.HttpMethodNotAllowed(message=method) + + out.feed_data(method, len(method)) + out.feed_eof() + + +class HttpRequestParser(HttpParser): + """Read request status line. Exception errors.BadStatusLine + could be raised in case of any errors in status line. + Returns RawRequestMessage. + """ + + def __call__(self, out, buf): + # read HTTP message (request line + headers) + try: + raw_data = yield from buf.readuntil( + b'\r\n\r\n', self.max_headers) + except errors.LineLimitExceededParserError as exc: + raise errors.LineTooLong(exc.limit) from None + + lines = raw_data.split(b'\r\n') + + # request line + line = lines[0].decode('utf-8', 'surrogateescape') + try: + method, path, version = line.split(None, 2) + except ValueError: + raise errors.BadStatusLine(line) from None + + # method + method = method.upper() + if not METHRE.match(method): + raise errors.BadStatusLine(method) + + # version + try: + if version.startswith('HTTP/'): + n1, n2 = version[5:].split('.', 1) + version = HttpVersion(int(n1), int(n2)) + else: + raise errors.BadStatusLine(version) + except: + raise errors.BadStatusLine(version) + + # read headers + headers, raw_headers, close, compression = self.parse_headers(lines) + if close is None: # then the headers weren't set in the request + if version <= HttpVersion10: # HTTP 1.0 must asks to not close + close = True + else: # HTTP 1.1 must ask to close. + close = False + + out.feed_data( + RawRequestMessage( + method, path, version, headers, raw_headers, + close, compression), + len(raw_data)) + out.feed_eof() + + +class HttpResponseParser(HttpParser): + """Read response status line and headers. + + BadStatusLine could be raised in case of any errors in status line. + Returns RawResponseMessage""" + + def __call__(self, out, buf): + # read HTTP message (response line + headers) + try: + raw_data = yield from buf.readuntil( + b'\r\n\r\n', self.max_line_size + self.max_headers) + except errors.LineLimitExceededParserError as exc: + raise errors.LineTooLong(exc.limit) from None + + lines = raw_data.split(b'\r\n') + + line = lines[0].decode('utf-8', 'surrogateescape') + try: + version, status = line.split(None, 1) + except ValueError: + raise errors.BadStatusLine(line) from None + else: + try: + status, reason = status.split(None, 1) + except ValueError: + reason = '' + + # version + match = VERSRE.match(version) + if match is None: + raise errors.BadStatusLine(line) + version = HttpVersion(int(match.group(1)), int(match.group(2))) + + # The status code is a three-digit number + try: + status = int(status) + except ValueError: + raise errors.BadStatusLine(line) from None + + if status < 100 or status > 999: + raise errors.BadStatusLine(line) + + # read headers + headers, raw_headers, close, compression = self.parse_headers(lines) + + if close is None: + close = version <= HttpVersion10 + + out.feed_data( + RawResponseMessage( + version, status, reason.strip(), + headers, raw_headers, close, compression), + len(raw_data)) + out.feed_eof() + + +class HttpPayloadParser: + + def __init__(self, message, length=None, compression=True, + readall=False, response_with_body=True): + self.message = message + self.length = length + self.compression = compression + self.readall = readall + self.response_with_body = response_with_body + + def __call__(self, out, buf): + # payload params + length = self.message.headers.get(hdrs.CONTENT_LENGTH, self.length) + if hdrs.SEC_WEBSOCKET_KEY1 in self.message.headers: + length = 8 + + # payload decompression wrapper + if (self.response_with_body and + self.compression and self.message.compression): + out = DeflateBuffer(out, self.message.compression) + + # payload parser + if not self.response_with_body: + # don't parse payload if it's not expected to be received + pass + + elif 'chunked' in self.message.headers.get( + hdrs.TRANSFER_ENCODING, ''): + yield from self.parse_chunked_payload(out, buf) + + elif length is not None: + try: + length = int(length) + except ValueError: + raise errors.InvalidHeader(hdrs.CONTENT_LENGTH) from None + + if length < 0: + raise errors.InvalidHeader(hdrs.CONTENT_LENGTH) + elif length > 0: + yield from self.parse_length_payload(out, buf, length) + else: + if self.readall and getattr(self.message, 'code', 0) != 204: + yield from self.parse_eof_payload(out, buf) + elif getattr(self.message, 'method', None) in ('PUT', 'POST'): + internal_logger.warning( # pragma: no cover + 'Content-Length or Transfer-Encoding header is required') + + out.feed_eof() + + def parse_chunked_payload(self, out, buf): + """Chunked transfer encoding parser.""" + while True: + # read next chunk size + line = yield from buf.readuntil(b'\r\n', 8192) + + i = line.find(b';') + if i >= 0: + line = line[:i] # strip chunk-extensions + else: + line = line.strip() + try: + size = int(line, 16) + except ValueError: + raise errors.TransferEncodingError(line) from None + + if size == 0: # eof marker + break + + # read chunk and feed buffer + while size: + chunk = yield from buf.readsome(size) + out.feed_data(chunk, len(chunk)) + size = size - len(chunk) + + # toss the CRLF at the end of the chunk + yield from buf.skip(2) + + # read and discard trailer up to the CRLF terminator + yield from buf.skipuntil(b'\r\n') + + def parse_length_payload(self, out, buf, length=0): + """Read specified amount of bytes.""" + required = length + while required: + chunk = yield from buf.readsome(required) + out.feed_data(chunk, len(chunk)) + required -= len(chunk) + + def parse_eof_payload(self, out, buf): + """Read all bytes until eof.""" + try: + while True: + chunk = yield from buf.readsome() + out.feed_data(chunk, len(chunk)) + except aiohttp.EofStream: + pass + + +class DeflateBuffer: + """DeflateStream decompress stream and feed data into specified stream.""" + + def __init__(self, out, encoding): + self.out = out + zlib_mode = (16 + zlib.MAX_WBITS + if encoding == 'gzip' else -zlib.MAX_WBITS) + + self.zlib = zlib.decompressobj(wbits=zlib_mode) + + def feed_data(self, chunk, size): + try: + chunk = self.zlib.decompress(chunk) + except Exception: + raise errors.ContentEncodingError('deflate') + + if chunk: + self.out.feed_data(chunk, len(chunk)) + + def feed_eof(self): + chunk = self.zlib.flush() + self.out.feed_data(chunk, len(chunk)) + if not self.zlib.eof: + raise errors.ContentEncodingError('deflate') + + self.out.feed_eof() + + +def wrap_payload_filter(func): + """Wraps payload filter and piped filters. + + Filter is a generator that accepts arbitrary chunks of data, + modify data and emit new stream of data. + + For example we have stream of chunks: ['1', '2', '3', '4', '5'], + we can apply chunking filter to this stream: + + ['1', '2', '3', '4', '5'] + | + response.add_chunking_filter(2) + | + ['12', '34', '5'] + + It is possible to use different filters at the same time. + + For a example to compress incoming stream with 'deflate' encoding + and then split data and emit chunks of 8192 bytes size chunks: + + >>> response.add_compression_filter('deflate') + >>> response.add_chunking_filter(8192) + + Filters do not alter transfer encoding. + + Filter can receive types types of data, bytes object or EOF_MARKER. + + 1. If filter receives bytes object, it should process data + and yield processed data then yield EOL_MARKER object. + 2. If Filter received EOF_MARKER, it should yield remaining + data (buffered) and then yield EOF_MARKER. + """ + @functools.wraps(func) + def wrapper(self, *args, **kw): + new_filter = func(self, *args, **kw) + + filter = self.filter + if filter is not None: + next(new_filter) + self.filter = filter_pipe(filter, new_filter) + else: + self.filter = new_filter + + next(self.filter) + + return wrapper + + +def filter_pipe(filter, filter2, *, + EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER): + """Creates pipe between two filters. + + filter_pipe() feeds first filter with incoming data and then + send yielded from first filter data into filter2, results of + filter2 are being emitted. + + 1. If filter_pipe receives bytes object, it sends it to the first filter. + 2. Reads yielded values from the first filter until it receives + EOF_MARKER or EOL_MARKER. + 3. Each of this values is being send to second filter. + 4. Reads yielded values from second filter until it receives EOF_MARKER + or EOL_MARKER. Each of this values yields to writer. + """ + chunk = yield + + while True: + eof = chunk is EOF_MARKER + chunk = filter.send(chunk) + + while chunk is not EOL_MARKER: + chunk = filter2.send(chunk) + + while chunk not in (EOF_MARKER, EOL_MARKER): + yield chunk + chunk = next(filter2) + + if chunk is not EOF_MARKER: + if eof: + chunk = EOF_MARKER + else: + chunk = next(filter) + else: + break + + chunk = yield EOL_MARKER + + +class HttpMessage(ABC): + """HttpMessage allows to write headers and payload to a stream. + + For example, lets say we want to read file then compress it with deflate + compression and then send it with chunked transfer encoding, code may look + like this: + + >>> response = aiohttp.Response(transport, 200) + + We have to use deflate compression first: + + >>> response.add_compression_filter('deflate') + + Then we want to split output stream into chunks of 1024 bytes size: + + >>> response.add_chunking_filter(1024) + + We can add headers to response with add_headers() method. add_headers() + does not send data to transport, send_headers() sends request/response + line and then sends headers: + + >>> response.add_headers( + ... ('Content-Disposition', 'attachment; filename="..."')) + >>> response.send_headers() + + Now we can use chunked writer to write stream to a network stream. + First call to write() method sends response status line and headers, + add_header() and add_headers() method unavailable at this stage: + + >>> with open('...', 'rb') as f: + ... chunk = fp.read(8192) + ... while chunk: + ... response.write(chunk) + ... chunk = fp.read(8192) + + >>> response.write_eof() + + """ + + writer = None + + # 'filter' is being used for altering write() behaviour, + # add_chunking_filter adds deflate/gzip compression and + # add_compression_filter splits incoming data into a chunks. + filter = None + + HOP_HEADERS = None # Must be set by subclass. + + SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format( + sys.version_info, aiohttp.__version__) + + upgrade = False # Connection: UPGRADE + websocket = False # Upgrade: WEBSOCKET + has_chunked_hdr = False # Transfer-encoding: chunked + + # subclass can enable auto sending headers with write() call, + # this is useful for wsgi's start_response implementation. + _send_headers = False + + def __init__(self, transport, version, close): + self.transport = transport + self._version = version + self.closing = close + self.keepalive = None + self.chunked = False + self.length = None + self.headers = CIMultiDict() + self.headers_sent = False + self.output_length = 0 + self.headers_length = 0 + self._output_size = 0 + + @property + @abstractmethod + def status_line(self): + return b'' + + @abstractmethod + def autochunked(self): + return False + + @property + def version(self): + return self._version + + @property + def body_length(self): + return self.output_length - self.headers_length + + def force_close(self): + self.closing = True + self.keepalive = False + + def enable_chunked_encoding(self): + self.chunked = True + + def keep_alive(self): + if self.keepalive is None: + if self.version < HttpVersion10: + # keep alive not supported at all + return False + if self.version == HttpVersion10: + if self.headers.get(hdrs.CONNECTION) == 'keep-alive': + return True + else: # no headers means we close for Http 1.0 + return False + else: + return not self.closing + else: + return self.keepalive + + def is_headers_sent(self): + return self.headers_sent + + def add_header(self, name, value): + """Analyze headers. Calculate content length, + removes hop headers, etc.""" + assert not self.headers_sent, 'headers have been sent already' + assert isinstance(name, str), \ + 'Header name should be a string, got {!r}'.format(name) + assert set(name).issubset(ASCIISET), \ + 'Header name should contain ASCII chars, got {!r}'.format(name) + assert isinstance(value, str), \ + 'Header {!r} should have string value, got {!r}'.format( + name, value) + + name = istr(name) + value = value.strip() + + if name == hdrs.CONTENT_LENGTH: + self.length = int(value) + + if name == hdrs.TRANSFER_ENCODING: + self.has_chunked_hdr = value.lower().strip() == 'chunked' + + if name == hdrs.CONNECTION: + val = value.lower() + # handle websocket + if 'upgrade' in val: + self.upgrade = True + # connection keep-alive + elif 'close' in val: + self.keepalive = False + elif 'keep-alive' in val: + self.keepalive = True + + elif name == hdrs.UPGRADE: + if 'websocket' in value.lower(): + self.websocket = True + self.headers[name] = value + + elif name not in self.HOP_HEADERS: + # ignore hop-by-hop headers + self.headers.add(name, value) + + def add_headers(self, *headers): + """Adds headers to a HTTP message.""" + for name, value in headers: + self.add_header(name, value) + + def send_headers(self, _sep=': ', _end='\r\n'): + """Writes headers to a stream. Constructs payload writer.""" + # Chunked response is only for HTTP/1.1 clients or newer + # and there is no Content-Length header is set. + # Do not use chunked responses when the response is guaranteed to + # not have a response body (304, 204). + assert not self.headers_sent, 'headers have been sent already' + self.headers_sent = True + + if self.chunked or self.autochunked(): + self.writer = self._write_chunked_payload() + self.headers[hdrs.TRANSFER_ENCODING] = 'chunked' + + elif self.length is not None: + self.writer = self._write_length_payload(self.length) + + else: + self.writer = self._write_eof_payload() + + next(self.writer) + + self._add_default_headers() + + # status + headers + headers = self.status_line + ''.join( + [k + _sep + v + _end for k, v in self.headers.items()]) + headers = headers.encode('utf-8') + b'\r\n' + + self.output_length += len(headers) + self.headers_length = len(headers) + self.transport.write(headers) + + def _add_default_headers(self): + # set the connection header + connection = None + if self.upgrade: + connection = 'upgrade' + elif not self.closing if self.keepalive is None else self.keepalive: + if self.version == HttpVersion10: + connection = 'keep-alive' + else: + if self.version == HttpVersion11: + connection = 'close' + + if connection is not None: + self.headers[hdrs.CONNECTION] = connection + + def write(self, chunk, *, + drain=False, EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER): + """Writes chunk of data to a stream by using different writers. + + writer uses filter to modify chunk of data. + write_eof() indicates end of stream. + writer can't be used after write_eof() method being called. + write() return drain future. + """ + assert (isinstance(chunk, (bytes, bytearray)) or + chunk is EOF_MARKER), chunk + + size = self.output_length + + if self._send_headers and not self.headers_sent: + self.send_headers() + + assert self.writer is not None, 'send_headers() is not called.' + + if self.filter: + chunk = self.filter.send(chunk) + while chunk not in (EOF_MARKER, EOL_MARKER): + if chunk: + self.writer.send(chunk) + chunk = next(self.filter) + else: + if chunk is not EOF_MARKER: + self.writer.send(chunk) + + self._output_size += self.output_length - size + + if self._output_size > 64 * 1024: + if drain: + self._output_size = 0 + return self.transport.drain() + + return () + + def write_eof(self): + self.write(EOF_MARKER) + try: + self.writer.throw(aiohttp.EofStream()) + except StopIteration: + pass + + return self.transport.drain() + + def _write_chunked_payload(self): + """Write data in chunked transfer encoding.""" + while True: + try: + chunk = yield + except aiohttp.EofStream: + self.transport.write(b'0\r\n\r\n') + self.output_length += 5 + break + + chunk = bytes(chunk) + chunk_len = '{:x}\r\n'.format(len(chunk)).encode('ascii') + self.transport.write(chunk_len + chunk + b'\r\n') + self.output_length += len(chunk_len) + len(chunk) + 2 + + def _write_length_payload(self, length): + """Write specified number of bytes to a stream.""" + while True: + try: + chunk = yield + except aiohttp.EofStream: + break + + if length: + l = len(chunk) + if length >= l: + self.transport.write(chunk) + self.output_length += l + length = length-l + else: + self.transport.write(chunk[:length]) + self.output_length += length + length = 0 + + def _write_eof_payload(self): + while True: + try: + chunk = yield + except aiohttp.EofStream: + break + + self.transport.write(chunk) + self.output_length += len(chunk) + + @wrap_payload_filter + def add_chunking_filter(self, chunk_size=16*1024, *, + EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER): + """Split incoming stream into chunks.""" + buf = bytearray() + chunk = yield + + while True: + if chunk is EOF_MARKER: + if buf: + yield buf + + yield EOF_MARKER + + else: + buf.extend(chunk) + + while len(buf) >= chunk_size: + chunk = bytes(buf[:chunk_size]) + del buf[:chunk_size] + yield chunk + + chunk = yield EOL_MARKER + + @wrap_payload_filter + def add_compression_filter(self, encoding='deflate', *, + EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER): + """Compress incoming stream with deflate or gzip encoding.""" + zlib_mode = (16 + zlib.MAX_WBITS + if encoding == 'gzip' else -zlib.MAX_WBITS) + zcomp = zlib.compressobj(wbits=zlib_mode) + + chunk = yield + while True: + if chunk is EOF_MARKER: + yield zcomp.flush() + chunk = yield EOF_MARKER + + else: + yield zcomp.compress(chunk) + chunk = yield EOL_MARKER + + +class Response(HttpMessage): + """Create HTTP response message. + + Transport is a socket stream transport. status is a response status code, + status has to be integer value. http_version is a tuple that represents + HTTP version, (1, 0) stands for HTTP/1.0 and (1, 1) is for HTTP/1.1 + """ + + HOP_HEADERS = () + + @staticmethod + def calc_reason(status, *, _RESPONSES=RESPONSES): + record = _RESPONSES.get(status) + if record is not None: + reason = record[0] + else: + reason = str(status) + return reason + + def __init__(self, transport, status, + http_version=HttpVersion11, close=False, reason=None): + super().__init__(transport, http_version, close) + + self._status = status + if reason is None: + reason = self.calc_reason(status) + + self._reason = reason + + @property + def status(self): + return self._status + + @property + def reason(self): + return self._reason + + @reify + def status_line(self): + version = self.version + return 'HTTP/{}.{} {} {}\r\n'.format( + version[0], version[1], self.status, self.reason) + + def autochunked(self): + return (self.length is None and + self.version >= HttpVersion11) + + def _add_default_headers(self): + super()._add_default_headers() + + if hdrs.DATE not in self.headers: + # format_date_time(None) is quite expensive + self.headers.setdefault(hdrs.DATE, format_date_time(None)) + self.headers.setdefault(hdrs.SERVER, self.SERVER_SOFTWARE) + + +class Request(HttpMessage): + + HOP_HEADERS = () + + def __init__(self, transport, method, path, + http_version=HttpVersion11, close=False): + # set the default for HTTP 0.9 to be different + # will only be overwritten with keep-alive header + if http_version < HttpVersion10: + close = True + + super().__init__(transport, http_version, close) + + self._method = method + self._path = path + + @property + def method(self): + return self._method + + @property + def path(self): + return self._path + + @reify + def status_line(self): + return '{0} {1} HTTP/{2[0]}.{2[1]}\r\n'.format( + self.method, self.path, self.version) + + def autochunked(self): + return (self.length is None and + self.version >= HttpVersion11 and + self.status not in (304, 204)) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/pytest_plugin.py b/RBXLegacyDiscordBot/lib/aiohttp/pytest_plugin.py new file mode 100644 index 0000000..f22b819 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/pytest_plugin.py @@ -0,0 +1,113 @@ +import asyncio +import contextlib + +import pytest + +from aiohttp.web import Application + +from .test_utils import unused_port as _unused_port +from .test_utils import (TestClient, TestServer, loop_context, setup_test_loop, + teardown_test_loop) + + +@contextlib.contextmanager +def _passthrough_loop_context(loop): + if loop: + # loop already exists, pass it straight through + yield loop + else: + # this shadows loop_context's standard behavior + loop = setup_test_loop() + yield loop + teardown_test_loop(loop) + + +def pytest_pycollect_makeitem(collector, name, obj): + """ + Fix pytest collecting for coroutines. + """ + if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): + return list(collector._genfunctions(name, obj)) + + +def pytest_pyfunc_call(pyfuncitem): + """ + Run coroutines in an event loop instead of a normal function call. + """ + if asyncio.iscoroutinefunction(pyfuncitem.function): + existing_loop = pyfuncitem.funcargs.get('loop', None) + with _passthrough_loop_context(existing_loop) as _loop: + testargs = {arg: pyfuncitem.funcargs[arg] + for arg in pyfuncitem._fixtureinfo.argnames} + + task = _loop.create_task(pyfuncitem.obj(**testargs)) + _loop.run_until_complete(task) + + return True + + +@pytest.yield_fixture +def loop(): + with loop_context() as _loop: + yield _loop + + +@pytest.fixture +def unused_port(): + return _unused_port + + +@pytest.yield_fixture +def test_server(loop): + servers = [] + + @asyncio.coroutine + def go(app, **kwargs): + assert app.loop is loop, \ + "Application is attached to other event loop" + + server = TestServer(app) + yield from server.start_server(**kwargs) + servers.append(server) + return server + + yield go + + @asyncio.coroutine + def finalize(): + while servers: + yield from servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.yield_fixture +def test_client(loop): + clients = [] + + @asyncio.coroutine + def go(__param, *args, **kwargs): + if isinstance(__param, Application): + assert not args, "args should be empty" + assert not kwargs, "kwargs should be empty" + assert __param.loop is loop, \ + "Application is attached to other event loop" + elif isinstance(__param, TestServer): + assert __param.app.loop is loop, \ + "TestServer is attached to other event loop" + else: + __param = __param(loop, *args, **kwargs) + + client = TestClient(__param) + yield from client.start_server() + clients.append(client) + return client + + yield go + + @asyncio.coroutine + def finalize(): + while clients: + yield from clients.pop().close() + + loop.run_until_complete(finalize()) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/resolver.py b/RBXLegacyDiscordBot/lib/aiohttp/resolver.py new file mode 100644 index 0000000..e66e412 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/resolver.py @@ -0,0 +1,100 @@ +import asyncio +import socket + +from .abc import AbstractResolver + +__all__ = ('ThreadedResolver', 'AsyncResolver', 'DefaultResolver') + +try: + import aiodns + aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') +except ImportError: # pragma: no cover + aiodns = None + aiodns_default = False + + +class ThreadedResolver(AbstractResolver): + """Use Executor for synchronous getaddrinfo() calls, which defaults to + concurrent.futures.ThreadPoolExecutor. + """ + + def __init__(self, loop=None): + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + + @asyncio.coroutine + def resolve(self, host, port=0, family=socket.AF_INET): + infos = yield from self._loop.getaddrinfo( + host, port, type=socket.SOCK_STREAM, family=family) + + hosts = [] + for family, _, proto, _, address in infos: + hosts.append( + {'hostname': host, + 'host': address[0], 'port': address[1], + 'family': family, 'proto': proto, + 'flags': socket.AI_NUMERICHOST}) + + return hosts + + @asyncio.coroutine + def close(self): + pass + + +class AsyncResolver(AbstractResolver): + """Use the `aiodns` package to make asynchronous DNS lookups""" + + def __init__(self, loop=None, *args, **kwargs): + if loop is None: + loop = asyncio.get_event_loop() + + if aiodns is None: + raise RuntimeError("Resolver requires aiodns library") + + self._loop = loop + self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs) + + if not hasattr(self._resolver, 'gethostbyname'): + # aiodns 1.1 is not available, fallback to DNSResolver.query + self.resolve = self.resolve_with_query + + @asyncio.coroutine + def resolve(self, host, port=0, family=socket.AF_INET): + hosts = [] + resp = yield from self._resolver.gethostbyname(host, family) + + for address in resp.addresses: + hosts.append( + {'hostname': host, + 'host': address, 'port': port, + 'family': family, 'proto': 0, + 'flags': socket.AI_NUMERICHOST}) + return hosts + + @asyncio.coroutine + def resolve_with_query(self, host, port=0, family=socket.AF_INET): + if family == socket.AF_INET6: + qtype = 'AAAA' + else: + qtype = 'A' + + hosts = [] + resp = yield from self._resolver.query(host, qtype) + + for rr in resp: + hosts.append( + {'hostname': host, + 'host': rr.host, 'port': port, + 'family': family, 'proto': 0, + 'flags': socket.AI_NUMERICHOST}) + + return hosts + + @asyncio.coroutine + def close(self): + return self._resolver.cancel() + + +DefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver diff --git a/RBXLegacyDiscordBot/lib/aiohttp/server.py b/RBXLegacyDiscordBot/lib/aiohttp/server.py new file mode 100644 index 0000000..9223a1f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/server.py @@ -0,0 +1,376 @@ +"""simple HTTP server.""" + +import asyncio +import http.server +import socket +import traceback +import warnings +from contextlib import suppress +from html import escape as html_escape + +import aiohttp +from aiohttp import errors, hdrs, helpers, streams +from aiohttp.helpers import Timeout, _get_kwarg, ensure_future +from aiohttp.log import access_logger, server_logger + +__all__ = ('ServerHttpProtocol',) + + +RESPONSES = http.server.BaseHTTPRequestHandler.responses +DEFAULT_ERROR_MESSAGE = """ + + + {status} {reason} + + +

{status} {reason}

+ {message} + +""" + + +if hasattr(socket, 'SO_KEEPALIVE'): + def tcp_keepalive(server, transport): + sock = transport.get_extra_info('socket') + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) +else: + def tcp_keepalive(server, transport): # pragma: no cover + pass + +EMPTY_PAYLOAD = streams.EmptyStreamReader() + + +class ServerHttpProtocol(aiohttp.StreamProtocol): + """Simple HTTP protocol implementation. + + ServerHttpProtocol handles incoming HTTP request. It reads request line, + request headers and request payload and calls handle_request() method. + By default it always returns with 404 response. + + ServerHttpProtocol handles errors in incoming request, like bad + status line, bad headers or incomplete payload. If any error occurs, + connection gets closed. + + :param keepalive_timeout: number of seconds before closing + keep-alive connection + :type keepalive_timeout: int or None + + :param bool tcp_keepalive: TCP keep-alive is on, default is on + + :param int slow_request_timeout: slow request timeout + + :param bool debug: enable debug mode + + :param logger: custom logger object + :type logger: aiohttp.log.server_logger + + :param access_log: custom logging object + :type access_log: aiohttp.log.server_logger + + :param str access_log_format: access log format string + + :param loop: Optional event loop + + :param int max_line_size: Optional maximum header line size + + :param int max_field_size: Optional maximum header field size + + :param int max_headers: Optional maximum header size + + """ + _request_count = 0 + _request_handler = None + _reading_request = False + _keepalive = False # keep transport open + + def __init__(self, *, loop=None, + keepalive_timeout=75, # NGINX default value is 75 secs + tcp_keepalive=True, + slow_request_timeout=0, + logger=server_logger, + access_log=access_logger, + access_log_format=helpers.AccessLogger.LOG_FORMAT, + debug=False, + max_line_size=8190, + max_headers=32768, + max_field_size=8190, + **kwargs): + + # process deprecated params + logger = _get_kwarg(kwargs, 'log', 'logger', logger) + + tcp_keepalive = _get_kwarg(kwargs, 'keep_alive_on', + 'tcp_keepalive', tcp_keepalive) + + keepalive_timeout = _get_kwarg(kwargs, 'keep_alive', + 'keepalive_timeout', keepalive_timeout) + + slow_request_timeout = _get_kwarg(kwargs, 'timeout', + 'slow_request_timeout', + slow_request_timeout) + + super().__init__( + loop=loop, + disconnect_error=errors.ClientDisconnectedError, **kwargs) + + self._tcp_keepalive = tcp_keepalive + self._keepalive_timeout = keepalive_timeout + self._slow_request_timeout = slow_request_timeout + self._loop = loop if loop is not None else asyncio.get_event_loop() + + self._request_prefix = aiohttp.HttpPrefixParser() + self._request_parser = aiohttp.HttpRequestParser( + max_line_size=max_line_size, + max_field_size=max_field_size, + max_headers=max_headers) + + self.logger = logger + self.debug = debug + self.access_log = access_log + if access_log: + self.access_logger = helpers.AccessLogger(access_log, + access_log_format) + else: + self.access_logger = None + self._closing = False + + @property + def keep_alive_timeout(self): + warnings.warn("Use keepalive_timeout property instead", + DeprecationWarning, + stacklevel=2) + return self._keepalive_timeout + + @property + def keepalive_timeout(self): + return self._keepalive_timeout + + @asyncio.coroutine + def shutdown(self, timeout=15.0): + """Worker process is about to exit, we need cleanup everything and + stop accepting requests. It is especially important for keep-alive + connections.""" + if self._request_handler is None: + return + self._closing = True + + if timeout: + canceller = self._loop.call_later(timeout, + self._request_handler.cancel) + with suppress(asyncio.CancelledError): + yield from self._request_handler + canceller.cancel() + else: + self._request_handler.cancel() + + def connection_made(self, transport): + super().connection_made(transport) + + self._request_handler = ensure_future(self.start(), loop=self._loop) + + if self._tcp_keepalive: + tcp_keepalive(self, transport) + + def connection_lost(self, exc): + super().connection_lost(exc) + + self._closing = True + if self._request_handler is not None: + self._request_handler.cancel() + + def data_received(self, data): + super().data_received(data) + + # reading request + if not self._reading_request: + self._reading_request = True + + def keep_alive(self, val): + """Set keep-alive connection mode. + + :param bool val: new state. + """ + self._keepalive = val + + def log_access(self, message, environ, response, time): + if self.access_logger: + self.access_logger.log(message, environ, response, + self.transport, time) + + def log_debug(self, *args, **kw): + if self.debug: + self.logger.debug(*args, **kw) + + def log_exception(self, *args, **kw): + self.logger.exception(*args, **kw) + + @asyncio.coroutine + def start(self): + """Start processing of incoming requests. + + It reads request line, request headers and request payload, then + calls handle_request() method. Subclass has to override + handle_request(). start() handles various exceptions in request + or response handling. Connection is being closed always unless + keep_alive(True) specified. + """ + reader = self.reader + + try: + while not self._closing: + message = None + self._keepalive = False + self._request_count += 1 + self._reading_request = False + + payload = None + with Timeout(max(self._slow_request_timeout, + self._keepalive_timeout), + loop=self._loop): + # read HTTP request method + prefix = reader.set_parser(self._request_prefix) + yield from prefix.read() + + # start reading request + self._reading_request = True + + # start slow request timer + # read request headers + httpstream = reader.set_parser(self._request_parser) + message = yield from httpstream.read() + + # request may not have payload + try: + content_length = int( + message.headers.get(hdrs.CONTENT_LENGTH, 0)) + except ValueError: + raise errors.InvalidHeader(hdrs.CONTENT_LENGTH) from None + + if (content_length > 0 or + message.method == 'CONNECT' or + hdrs.SEC_WEBSOCKET_KEY1 in message.headers or + 'chunked' in message.headers.get( + hdrs.TRANSFER_ENCODING, '')): + payload = streams.FlowControlStreamReader( + reader, loop=self._loop) + reader.set_parser( + aiohttp.HttpPayloadParser(message), payload) + else: + payload = EMPTY_PAYLOAD + + yield from self.handle_request(message, payload) + + if payload and not payload.is_eof(): + self.log_debug('Uncompleted request.') + self._closing = True + else: + reader.unset_parser() + if not self._keepalive or not self._keepalive_timeout: + self._closing = True + + except asyncio.CancelledError: + self.log_debug( + 'Request handler cancelled.') + return + except asyncio.TimeoutError: + self.log_debug( + 'Request handler timed out.') + return + except errors.ClientDisconnectedError: + self.log_debug( + 'Ignored premature client disconnection #1.') + return + except errors.HttpProcessingError as exc: + yield from self.handle_error(exc.code, message, + None, exc, exc.headers, + exc.message) + except Exception as exc: + yield from self.handle_error(500, message, None, exc) + finally: + self._request_handler = None + if self.transport is None: + self.log_debug( + 'Ignored premature client disconnection #2.') + else: + self.transport.close() + + def handle_error(self, status=500, message=None, + payload=None, exc=None, headers=None, reason=None): + """Handle errors. + + Returns HTTP response with specific status code. Logs additional + information. It always closes current connection.""" + now = self._loop.time() + try: + if self.transport is None: + # client has been disconnected during writing. + return () + + if status == 500: + self.log_exception("Error handling request") + + try: + if reason is None or reason == '': + reason, msg = RESPONSES[status] + else: + msg = reason + except KeyError: + status = 500 + reason, msg = '???', '' + + if self.debug and exc is not None: + try: + tb = traceback.format_exc() + tb = html_escape(tb) + msg += '

Traceback:

\n
{}
'.format(tb) + except: + pass + + html = DEFAULT_ERROR_MESSAGE.format( + status=status, reason=reason, message=msg).encode('utf-8') + + response = aiohttp.Response(self.writer, status, close=True) + response.add_header(hdrs.CONTENT_TYPE, 'text/html; charset=utf-8') + response.add_header(hdrs.CONTENT_LENGTH, str(len(html))) + if headers is not None: + for name, value in headers: + response.add_header(name, value) + response.send_headers() + + response.write(html) + # disable CORK, enable NODELAY if needed + self.writer.set_tcp_nodelay(True) + drain = response.write_eof() + + self.log_access(message, None, response, self._loop.time() - now) + return drain + finally: + self.keep_alive(False) + + def handle_request(self, message, payload): + """Handle a single HTTP request. + + Subclass should override this method. By default it always + returns 404 response. + + :param message: Request headers + :type message: aiohttp.protocol.HttpRequestParser + :param payload: Request payload + :type payload: aiohttp.streams.FlowControlStreamReader + """ + now = self._loop.time() + response = aiohttp.Response( + self.writer, 404, http_version=message.version, close=True) + + body = b'Page Not Found!' + + response.add_header(hdrs.CONTENT_TYPE, 'text/plain') + response.add_header(hdrs.CONTENT_LENGTH, str(len(body))) + response.send_headers() + response.write(body) + drain = response.write_eof() + + self.keep_alive(False) + self.log_access(message, None, response, self._loop.time() - now) + + return drain diff --git a/RBXLegacyDiscordBot/lib/aiohttp/signals.py b/RBXLegacyDiscordBot/lib/aiohttp/signals.py new file mode 100644 index 0000000..5093bb8 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/signals.py @@ -0,0 +1,71 @@ +import asyncio +from itertools import count + + +class BaseSignal(list): + + @asyncio.coroutine + def _send(self, *args, **kwargs): + for receiver in self: + res = receiver(*args, **kwargs) + if asyncio.iscoroutine(res) or isinstance(res, asyncio.Future): + yield from res + + def copy(self): + raise NotImplementedError("copy() is forbidden") + + def sort(self): + raise NotImplementedError("sort() is forbidden") + + +class Signal(BaseSignal): + """Coroutine-based signal implementation. + + To connect a callback to a signal, use any list method. + + Signals are fired using the :meth:`send` coroutine, which takes named + arguments. + """ + + def __init__(self, app): + super().__init__() + self._app = app + klass = self.__class__ + self._name = klass.__module__ + ':' + klass.__qualname__ + self._pre = app.on_pre_signal + self._post = app.on_post_signal + + @asyncio.coroutine + def send(self, *args, **kwargs): + """ + Sends data to all registered receivers. + """ + ordinal = None + debug = self._app._debug + if debug: + ordinal = self._pre.ordinal() + yield from self._pre.send(ordinal, self._name, *args, **kwargs) + yield from self._send(*args, **kwargs) + if debug: + yield from self._post.send(ordinal, self._name, *args, **kwargs) + + +class DebugSignal(BaseSignal): + + @asyncio.coroutine + def send(self, ordinal, name, *args, **kwargs): + yield from self._send(ordinal, name, *args, **kwargs) + + +class PreSignal(DebugSignal): + + def __init__(self): + super().__init__() + self._counter = count(1) + + def ordinal(self): + return next(self._counter) + + +class PostSignal(DebugSignal): + pass diff --git a/RBXLegacyDiscordBot/lib/aiohttp/streams.py b/RBXLegacyDiscordBot/lib/aiohttp/streams.py new file mode 100644 index 0000000..fbe8d67 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/streams.py @@ -0,0 +1,672 @@ +import asyncio +import collections +import functools +import sys +import traceback + +from . import helpers +from .log import internal_logger + +__all__ = ( + 'EofStream', 'StreamReader', 'DataQueue', 'ChunksQueue', + 'FlowControlStreamReader', + 'FlowControlDataQueue', 'FlowControlChunksQueue') + +PY_35 = sys.version_info >= (3, 5) +PY_352 = sys.version_info >= (3, 5, 2) + +EOF_MARKER = b'' +DEFAULT_LIMIT = 2 ** 16 + + +class EofStream(Exception): + """eof stream indication.""" + + +if PY_35: + class AsyncStreamIterator: + + def __init__(self, read_func): + self.read_func = read_func + + def __aiter__(self): + return self + + if not PY_352: # pragma: no cover + __aiter__ = asyncio.coroutine(__aiter__) + + @asyncio.coroutine + def __anext__(self): + try: + rv = yield from self.read_func() + except EofStream: + raise StopAsyncIteration # NOQA + if rv == EOF_MARKER: + raise StopAsyncIteration # NOQA + return rv + + +class AsyncStreamReaderMixin: + + if PY_35: + def __aiter__(self): + return AsyncStreamIterator(self.readline) + + if not PY_352: # pragma: no cover + __aiter__ = asyncio.coroutine(__aiter__) + + def iter_chunked(self, n): + """Returns an asynchronous iterator that yields chunks of size n. + + Python-3.5 available for Python 3.5+ only + """ + return AsyncStreamIterator(lambda: self.read(n)) + + def iter_any(self): + """Returns an asynchronous iterator that yields slices of data + as they come. + + Python-3.5 available for Python 3.5+ only + """ + return AsyncStreamIterator(self.readany) + + +class StreamReader(AsyncStreamReaderMixin): + """An enhancement of asyncio.StreamReader. + + Supports asynchronous iteration by line, chunk or as available:: + + async for line in reader: + ... + async for chunk in reader.iter_chunked(1024): + ... + async for slice in reader.iter_any(): + ... + + """ + + total_bytes = 0 + + def __init__(self, limit=DEFAULT_LIMIT, timeout=None, loop=None): + self._limit = limit + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self._buffer = collections.deque() + self._buffer_size = 0 + self._buffer_offset = 0 + self._eof = False + self._waiter = None + self._canceller = None + self._eof_waiter = None + self._exception = None + self._timeout = timeout + + def __repr__(self): + info = ['StreamReader'] + if self._buffer_size: + info.append('%d bytes' % self._buffer_size) + if self._eof: + info.append('eof') + if self._limit != DEFAULT_LIMIT: + info.append('l=%d' % self._limit) + if self._waiter: + info.append('w=%r' % self._waiter) + if self._exception: + info.append('e=%r' % self._exception) + return '<%s>' % ' '.join(info) + + def exception(self): + return self._exception + + def set_exception(self, exc): + self._exception = exc + + waiter = self._waiter + if waiter is not None: + self._waiter = None + if not waiter.cancelled(): + waiter.set_exception(exc) + + canceller = self._canceller + if canceller is not None: + self._canceller = None + canceller.cancel() + + def feed_eof(self): + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + if not waiter.cancelled(): + waiter.set_result(True) + + canceller = self._canceller + if canceller is not None: + self._canceller = None + canceller.cancel() + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + if not waiter.cancelled(): + waiter.set_result(True) + + def is_eof(self): + """Return True if 'feed_eof' was called.""" + return self._eof + + def at_eof(self): + """Return True if the buffer is empty and 'feed_eof' was called.""" + return self._eof and not self._buffer + + @asyncio.coroutine + def wait_eof(self): + if self._eof: + return + + assert self._eof_waiter is None + self._eof_waiter = helpers.create_future(self._loop) + try: + yield from self._eof_waiter + finally: + self._eof_waiter = None + + def unread_data(self, data): + """ rollback reading some data from stream, inserting it to buffer head. + """ + if not data: + return + + if self._buffer_offset: + self._buffer[0] = self._buffer[0][self._buffer_offset:] + self._buffer_offset = 0 + self._buffer.appendleft(data) + self._buffer_size += len(data) + + def feed_data(self, data): + assert not self._eof, 'feed_data after feed_eof' + + if not data: + return + + self._buffer.append(data) + self._buffer_size += len(data) + self.total_bytes += len(data) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + if not waiter.cancelled(): + waiter.set_result(False) + + canceller = self._canceller + if canceller is not None: + self._canceller = None + canceller.cancel() + + @asyncio.coroutine + def _wait(self, func_name): + # StreamReader uses a future to link the protocol feed_data() method + # to a read coroutine. Running two read coroutines at the same time + # would have an unexpected behaviour. It would not possible to know + # which coroutine would get the next data. + if self._waiter is not None: + raise RuntimeError('%s() called while another coroutine is ' + 'already waiting for incoming data' % func_name) + waiter = self._waiter = helpers.create_future(self._loop) + if self._timeout: + self._canceller = self._loop.call_later(self._timeout, + self.set_exception, + asyncio.TimeoutError()) + try: + yield from waiter + finally: + self._waiter = None + if self._canceller is not None: + self._canceller.cancel() + self._canceller = None + + @asyncio.coroutine + def readline(self): + if self._exception is not None: + raise self._exception + + line = [] + line_size = 0 + not_enough = True + + while not_enough: + while self._buffer and not_enough: + offset = self._buffer_offset + ichar = self._buffer[0].find(b'\n', offset) + 1 + # Read from current offset to found b'\n' or to the end. + data = self._read_nowait_chunk(ichar - offset if ichar else -1) + line.append(data) + line_size += len(data) + if ichar: + not_enough = False + + if line_size > self._limit: + raise ValueError('Line is too long') + + if self._eof: + break + + if not_enough: + yield from self._wait('readline') + + return b''.join(line) + + @asyncio.coroutine + def read(self, n=-1): + if self._exception is not None: + raise self._exception + + # migration problem; with DataQueue you have to catch + # EofStream exception, so common way is to run payload.read() inside + # infinite loop. what can cause real infinite loop with StreamReader + # lets keep this code one major release. + if __debug__: + if self._eof and not self._buffer: + self._eof_counter = getattr(self, '_eof_counter', 0) + 1 + if self._eof_counter > 5: + stack = traceback.format_stack() + internal_logger.warning( + 'Multiple access to StreamReader in eof state, ' + 'might be infinite loop: \n%s', stack) + + if not n: + return EOF_MARKER + + if n < 0: + # This used to just loop creating a new waiter hoping to + # collect everything in self._buffer, but that would + # deadlock if the subprocess sends more than self.limit + # bytes. So just call self.readany() until EOF. + blocks = [] + while True: + block = yield from self.readany() + if not block: + break + blocks.append(block) + return b''.join(blocks) + + if not self._buffer and not self._eof: + yield from self._wait('read') + + return self._read_nowait(n) + + @asyncio.coroutine + def readany(self): + if self._exception is not None: + raise self._exception + + if not self._buffer and not self._eof: + yield from self._wait('readany') + + return self._read_nowait(-1) + + @asyncio.coroutine + def readexactly(self, n): + if self._exception is not None: + raise self._exception + + blocks = [] + while n > 0: + block = yield from self.read(n) + if not block: + partial = b''.join(blocks) + raise asyncio.streams.IncompleteReadError( + partial, len(partial) + n) + blocks.append(block) + n -= len(block) + + return b''.join(blocks) + + def read_nowait(self, n=-1): + # default was changed to be consistent with .read(-1) + # + # I believe the most users don't know about the method and + # they are not affected. + assert n is not None, "n should be -1" + if self._exception is not None: + raise self._exception + + if self._waiter and not self._waiter.done(): + raise RuntimeError( + 'Called while some coroutine is waiting for incoming data.') + + return self._read_nowait(n) + + def _read_nowait_chunk(self, n): + first_buffer = self._buffer[0] + offset = self._buffer_offset + if n != -1 and len(first_buffer) - offset > n: + data = first_buffer[offset:offset + n] + self._buffer_offset += n + + elif offset: + self._buffer.popleft() + data = first_buffer[offset:] + self._buffer_offset = 0 + + else: + data = self._buffer.popleft() + + self._buffer_size -= len(data) + return data + + def _read_nowait(self, n): + chunks = [] + + while self._buffer: + chunk = self._read_nowait_chunk(n) + chunks.append(chunk) + if n != -1: + n -= len(chunk) + if n == 0: + break + + return b''.join(chunks) if chunks else EOF_MARKER + + +class EmptyStreamReader(AsyncStreamReaderMixin): + + def exception(self): + return None + + def set_exception(self, exc): + pass + + def feed_eof(self): + pass + + def is_eof(self): + return True + + def at_eof(self): + return True + + @asyncio.coroutine + def wait_eof(self): + return + + def feed_data(self, data): + pass + + @asyncio.coroutine + def readline(self): + return EOF_MARKER + + @asyncio.coroutine + def read(self, n=-1): + return EOF_MARKER + + @asyncio.coroutine + def readany(self): + return EOF_MARKER + + @asyncio.coroutine + def readexactly(self, n): + raise asyncio.streams.IncompleteReadError(b'', n) + + def read_nowait(self): + return EOF_MARKER + + +class DataQueue: + """DataQueue is a general-purpose blocking queue with one reader.""" + + def __init__(self, *, loop=None): + self._loop = loop + self._eof = False + self._waiter = None + self._exception = None + self._size = 0 + self._buffer = collections.deque() + + def is_eof(self): + return self._eof + + def at_eof(self): + return self._eof and not self._buffer + + def exception(self): + return self._exception + + def set_exception(self, exc): + self._exception = exc + + waiter = self._waiter + if waiter is not None: + self._waiter = None + if not waiter.done(): + waiter.set_exception(exc) + + def feed_data(self, data, size=0): + self._size += size + self._buffer.append((data, size)) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + if not waiter.cancelled(): + waiter.set_result(True) + + def feed_eof(self): + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + if not waiter.cancelled(): + waiter.set_result(False) + + @asyncio.coroutine + def read(self): + if not self._buffer and not self._eof: + if self._exception is not None: + raise self._exception + + assert not self._waiter + self._waiter = helpers.create_future(self._loop) + try: + yield from self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + + if self._buffer: + data, size = self._buffer.popleft() + self._size -= size + return data + else: + if self._exception is not None: + raise self._exception + else: + raise EofStream + + if PY_35: + def __aiter__(self): + return AsyncStreamIterator(self.read) + + if not PY_352: # pragma: no cover + __aiter__ = asyncio.coroutine(__aiter__) + + +class ChunksQueue(DataQueue): + """Like a :class:`DataQueue`, but for binary chunked data transfer.""" + + @asyncio.coroutine + def read(self): + try: + return (yield from super().read()) + except EofStream: + return EOF_MARKER + + readany = read + + +def maybe_resume(func): + + if asyncio.iscoroutinefunction(func): + @asyncio.coroutine + @functools.wraps(func) + def wrapper(self, *args, **kw): + result = yield from func(self, *args, **kw) + self._check_buffer_size() + return result + else: + @functools.wraps(func) + def wrapper(self, *args, **kw): + result = func(self, *args, **kw) + self._check_buffer_size() + return result + + return wrapper + + +class FlowControlStreamReader(StreamReader): + + def __init__(self, stream, limit=DEFAULT_LIMIT, *args, **kwargs): + super().__init__(*args, **kwargs) + + self._stream = stream + self._b_limit = limit * 2 + + # resume transport reading + if stream.paused: + try: + self._stream.transport.resume_reading() + except (AttributeError, NotImplementedError): + pass + else: + self._stream.paused = False + + def _check_buffer_size(self): + if self._stream.paused: + if self._buffer_size < self._b_limit: + try: + self._stream.transport.resume_reading() + except (AttributeError, NotImplementedError): + pass + else: + self._stream.paused = False + else: + if self._buffer_size > self._b_limit: + try: + self._stream.transport.pause_reading() + except (AttributeError, NotImplementedError): + pass + else: + self._stream.paused = True + + def feed_data(self, data, size=0): + has_waiter = self._waiter is not None and not self._waiter.cancelled() + + super().feed_data(data) + + if (not self._stream.paused and + not has_waiter and self._buffer_size > self._b_limit): + try: + self._stream.transport.pause_reading() + except (AttributeError, NotImplementedError): + pass + else: + self._stream.paused = True + + @maybe_resume + @asyncio.coroutine + def read(self, n=-1): + return (yield from super().read(n)) + + @maybe_resume + @asyncio.coroutine + def readline(self): + return (yield from super().readline()) + + @maybe_resume + @asyncio.coroutine + def readany(self): + return (yield from super().readany()) + + @maybe_resume + @asyncio.coroutine + def readexactly(self, n): + return (yield from super().readexactly(n)) + + @maybe_resume + def read_nowait(self, n=-1): + return super().read_nowait(n) + + +class FlowControlDataQueue(DataQueue): + """FlowControlDataQueue resumes and pauses an underlying stream. + + It is a destination for parsed data.""" + + def __init__(self, stream, *, limit=DEFAULT_LIMIT, loop=None): + super().__init__(loop=loop) + + self._stream = stream + self._limit = limit * 2 + + # resume transport reading + if stream.paused: + try: + self._stream.transport.resume_reading() + except (AttributeError, NotImplementedError): + pass + else: + self._stream.paused = False + + def feed_data(self, data, size): + has_waiter = self._waiter is not None and not self._waiter.cancelled() + + super().feed_data(data, size) + + if (not self._stream.paused and + not has_waiter and self._size > self._limit): + try: + self._stream.transport.pause_reading() + except (AttributeError, NotImplementedError): + pass + else: + self._stream.paused = True + + @asyncio.coroutine + def read(self): + result = yield from super().read() + + if self._stream.paused: + if self._size < self._limit: + try: + self._stream.transport.resume_reading() + except (AttributeError, NotImplementedError): + pass + else: + self._stream.paused = False + else: + if self._size > self._limit: + try: + self._stream.transport.pause_reading() + except (AttributeError, NotImplementedError): + pass + else: + self._stream.paused = True + + return result + + +class FlowControlChunksQueue(FlowControlDataQueue): + + @asyncio.coroutine + def read(self): + try: + return (yield from super().read()) + except EofStream: + return EOF_MARKER + + readany = read diff --git a/RBXLegacyDiscordBot/lib/aiohttp/test_utils.py b/RBXLegacyDiscordBot/lib/aiohttp/test_utils.py new file mode 100644 index 0000000..8cffb3d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/test_utils.py @@ -0,0 +1,485 @@ +"""Utilities shared by tests.""" + +import asyncio +import contextlib +import functools +import gc +import socket +import sys +import unittest +from unittest import mock + +from multidict import CIMultiDict + +import aiohttp + +from . import ClientSession, hdrs +from .helpers import sentinel +from .protocol import HttpVersion, RawRequestMessage +from .signals import Signal +from .web import Application, Request + +PY_35 = sys.version_info >= (3, 5) + + +def run_briefly(loop): + @asyncio.coroutine + def once(): + pass + t = asyncio.Task(once(), loop=loop) + loop.run_until_complete(t) + + +def unused_port(): + """Return a port that is unused on the current host.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(('127.0.0.1', 0)) + return s.getsockname()[1] + + +class TestServer: + def __init__(self, app, *, scheme="http", host='127.0.0.1'): + self.app = app + self._loop = app.loop + self.port = None + self.server = None + self.handler = None + self._root = None + self.host = host + self.scheme = scheme + self._closed = False + + @asyncio.coroutine + def start_server(self, **kwargs): + if self.server: + return + self.port = unused_port() + self._root = '{}://{}:{}'.format(self.scheme, self.host, self.port) + self.handler = self.app.make_handler(**kwargs) + self.server = yield from self._loop.create_server(self.handler, + self.host, + self.port) + + def make_url(self, path): + return self._root + path + + @asyncio.coroutine + def close(self): + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run when the object is garbage collected, and on + exit when used as a context manager. + + """ + if self.server is not None and not self._closed: + self.server.close() + yield from self.server.wait_closed() + yield from self.app.shutdown() + yield from self.handler.finish_connections() + yield from self.app.cleanup() + self._root = None + self.port = None + self._closed = True + + def __enter__(self): + self._loop.run_until_complete(self.start_server()) + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._loop.run_until_complete(self.close()) + + if PY_35: + @asyncio.coroutine + def __aenter__(self): + yield from self.start_server() + return self + + @asyncio.coroutine + def __aexit__(self, exc_type, exc_value, traceback): + yield from self.close() + + +class TestClient: + """ + A test client implementation, for a aiohttp.web.Application. + + :param app: the aiohttp.web application passed to create_test_server + + :type app: aiohttp.web.Application + + :param protocol: http or https + + :type protocol: str + + TestClient can also be used as a contextmanager, returning + the instance of itself instantiated. + """ + + def __init__(self, app_or_server, *, scheme=sentinel, host=sentinel): + if isinstance(app_or_server, TestServer): + if scheme is not sentinel or host is not sentinel: + raise ValueError("scheme and host are mutable exclusive " + "with TestServer parameter") + self._server = app_or_server + elif isinstance(app_or_server, Application): + scheme = "http" if scheme is sentinel else scheme + host = '127.0.0.1' if host is sentinel else host + self._server = TestServer(app_or_server, + scheme=scheme, host=host) + else: + raise TypeError("app_or_server should be either web.Application " + "or TestServer instance") + self._loop = self._server.app.loop + self._session = ClientSession( + loop=self._loop, + cookie_jar=aiohttp.CookieJar(unsafe=True, + loop=self._loop)) + self._closed = False + self._responses = [] + + @asyncio.coroutine + def start_server(self): + yield from self._server.start_server() + + @property + def app(self): + return self._server.app + + @property + def host(self): + return self._server.host + + @property + def port(self): + return self._server.port + + @property + def handler(self): + return self._server.handler + + @property + def server(self): + return self._server.server + + @property + def session(self): + """A raw handler to the aiohttp.ClientSession. + + Unlike the methods on the TestClient, client session requests + do not automatically include the host in the url queried, and + will require an absolute path to the resource. + + """ + return self._session + + def make_url(self, path): + return self._server.make_url(path) + + @asyncio.coroutine + def request(self, method, path, *args, **kwargs): + """Routes a request to the http server. + + The interface is identical to asyncio.ClientSession.request, + except the loop kwarg is overridden by the instance used by the + application. + + """ + resp = yield from self._session.request( + method, self.make_url(path), *args, **kwargs + ) + # save it to close later + self._responses.append(resp) + return resp + + def get(self, path, *args, **kwargs): + """Perform an HTTP GET request.""" + return self.request(hdrs.METH_GET, path, *args, **kwargs) + + def post(self, path, *args, **kwargs): + """Perform an HTTP POST request.""" + return self.request(hdrs.METH_POST, path, *args, **kwargs) + + def options(self, path, *args, **kwargs): + """Perform an HTTP OPTIONS request.""" + return self.request(hdrs.METH_OPTIONS, path, *args, **kwargs) + + def head(self, path, *args, **kwargs): + """Perform an HTTP HEAD request.""" + return self.request(hdrs.METH_HEAD, path, *args, **kwargs) + + def put(self, path, *args, **kwargs): + """Perform an HTTP PUT request.""" + return self.request(hdrs.METH_PUT, path, *args, **kwargs) + + def patch(self, path, *args, **kwargs): + """Perform an HTTP PATCH request.""" + return self.request(hdrs.METH_PATCH, path, *args, **kwargs) + + def delete(self, path, *args, **kwargs): + """Perform an HTTP PATCH request.""" + return self.request(hdrs.METH_DELETE, path, *args, **kwargs) + + def ws_connect(self, path, *args, **kwargs): + """Initiate websocket connection. + + The api is identical to aiohttp.ClientSession.ws_connect. + + """ + return self._session.ws_connect( + self.make_url(path), *args, **kwargs + ) + + @asyncio.coroutine + def close(self): + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run on exit when used as a(n) (asynchronous) + context manager. + + """ + if not self._closed: + for resp in self._responses: + resp.close() + yield from self._session.close() + yield from self._server.close() + self._closed = True + + def __enter__(self): + self._loop.run_until_complete(self.start_server()) + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._loop.run_until_complete(self.close()) + + if PY_35: + @asyncio.coroutine + def __aenter__(self): + yield from self.start_server() + return self + + @asyncio.coroutine + def __aexit__(self, exc_type, exc_value, traceback): + yield from self.close() + + +class AioHTTPTestCase(unittest.TestCase): + """A base class to allow for unittest web applications using + aiohttp. + + Provides the following: + + * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. + * self.loop (asyncio.BaseEventLoop): the event loop in which the + application and server are running. + * self.app (aiohttp.web.Application): the application returned by + self.get_app() + + Note that the TestClient's methods are asynchronous: you have to + execute function on the test client using asynchronous methods. + """ + + def get_app(self, loop): + """ + This method should be overridden + to return the aiohttp.web.Application + object to test. + + :param loop: the event_loop to use + :type loop: asyncio.BaseEventLoop + """ + pass # pragma: no cover + + def setUp(self): + self.loop = setup_test_loop() + self.app = self.get_app(self.loop) + self.client = TestClient(self.app) + self.loop.run_until_complete(self.client.start_server()) + + def tearDown(self): + self.loop.run_until_complete(self.client.close()) + teardown_test_loop(self.loop) + + +def unittest_run_loop(func): + """A decorator dedicated to use with asynchronous methods of an + AioHTTPTestCase. + + Handles executing an asynchronous function, using + the self.loop of the AioHTTPTestCase. + """ + + @functools.wraps(func) + def new_func(self): + return self.loop.run_until_complete(func(self)) + + return new_func + + +@contextlib.contextmanager +def loop_context(loop_factory=asyncio.new_event_loop): + """A contextmanager that creates an event_loop, for test purposes. + + Handles the creation and cleanup of a test loop. + """ + loop = setup_test_loop(loop_factory) + yield loop + teardown_test_loop(loop) + + +def setup_test_loop(loop_factory=asyncio.new_event_loop): + """Create and return an asyncio.BaseEventLoop + instance. + + The caller should also call teardown_test_loop, + once they are done with the loop. + """ + loop = loop_factory() + asyncio.set_event_loop(None) + return loop + + +def teardown_test_loop(loop): + """Teardown and cleanup an event_loop created + by setup_test_loop. + + :param loop: the loop to teardown + :type loop: asyncio.BaseEventLoop + """ + closed = loop.is_closed() + if not closed: + loop.call_soon(loop.stop) + loop.run_forever() + loop.close() + gc.collect() + asyncio.set_event_loop(None) + + +def _create_app_mock(): + app = mock.Mock() + app._debug = False + app.on_response_prepare = Signal(app) + return app + + +def _create_transport(sslcontext=None): + transport = mock.Mock() + + def get_extra_info(key): + if key == 'sslcontext': + return sslcontext + else: + return None + + transport.get_extra_info.side_effect = get_extra_info + return transport + + +def make_mocked_request(method, path, headers=None, *, + version=HttpVersion(1, 1), closing=False, + app=None, + reader=sentinel, + writer=sentinel, + transport=sentinel, + payload=sentinel, + sslcontext=None, + secure_proxy_ssl_header=None): + """Creates mocked web.Request testing purposes. + + Useful in unit tests, when spinning full web server is overkill or + specific conditions and errors are hard to trigger. + + :param method: str, that represents HTTP method, like; GET, POST. + :type method: str + + :param path: str, The URL including *PATH INFO* without the host or scheme + :type path: str + + :param headers: mapping containing the headers. Can be anything accepted + by the multidict.CIMultiDict constructor. + :type headers: dict, multidict.CIMultiDict, list of pairs + + :param version: namedtuple with encoded HTTP version + :type version: aiohttp.protocol.HttpVersion + + :param closing: flag indicates that connection should be closed after + response. + :type closing: bool + + :param app: the aiohttp.web application attached for fake request + :type app: aiohttp.web.Application + + :param reader: object for storing and managing incoming data + :type reader: aiohttp.parsers.StreamParser + + :param writer: object for managing outcoming data + :type wirter: aiohttp.parsers.StreamWriter + + :param transport: asyncio transport instance + :type transport: asyncio.transports.Transport + + :param payload: raw payload reader object + :type payload: aiohttp.streams.FlowControlStreamReader + + :param sslcontext: ssl.SSLContext object, for HTTPS connection + :type sslcontext: ssl.SSLContext + + :param secure_proxy_ssl_header: A tuple representing a HTTP header/value + combination that signifies a request is secure. + :type secure_proxy_ssl_header: tuple + + """ + + if version < HttpVersion(1, 1): + closing = True + + if headers: + hdrs = CIMultiDict(headers) + raw_hdrs = [ + (k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items()] + else: + hdrs = CIMultiDict() + raw_hdrs = [] + + message = RawRequestMessage(method, path, version, hdrs, + raw_hdrs, closing, False) + if app is None: + app = _create_app_mock() + + if reader is sentinel: + reader = mock.Mock() + + if writer is sentinel: + writer = mock.Mock() + + if transport is sentinel: + transport = _create_transport(sslcontext) + + if payload is sentinel: + payload = mock.Mock() + + req = Request(app, message, payload, + transport, reader, writer, + secure_proxy_ssl_header=secure_proxy_ssl_header) + + return req + + +def make_mocked_coro(return_value=sentinel, raise_exception=sentinel): + """Creates a coroutine mock.""" + @asyncio.coroutine + def mock_coro(*args, **kwargs): + if raise_exception is not sentinel: + raise raise_exception + return return_value + + return mock.Mock(wraps=mock_coro) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/web.py b/RBXLegacyDiscordBot/lib/aiohttp/web.py new file mode 100644 index 0000000..f06535c --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/web.py @@ -0,0 +1,376 @@ +import asyncio +import sys +import warnings +from argparse import ArgumentParser +from importlib import import_module + +from . import hdrs, web_exceptions, web_reqrep, web_urldispatcher, web_ws +from .abc import AbstractMatchInfo, AbstractRouter +from .helpers import sentinel +from .log import web_logger +from .protocol import HttpVersion # noqa +from .server import ServerHttpProtocol +from .signals import PostSignal, PreSignal, Signal +from .web_exceptions import * # noqa +from .web_reqrep import * # noqa +from .web_urldispatcher import * # noqa +from .web_ws import * # noqa + + +__all__ = (web_reqrep.__all__ + + web_exceptions.__all__ + + web_urldispatcher.__all__ + + web_ws.__all__ + + ('Application', 'RequestHandler', + 'RequestHandlerFactory', 'HttpVersion', + 'MsgType')) + + +class RequestHandler(ServerHttpProtocol): + + _meth = 'none' + _path = 'none' + + def __init__(self, manager, app, router, *, + secure_proxy_ssl_header=None, **kwargs): + super().__init__(**kwargs) + + self._manager = manager + self._app = app + self._router = router + self._middlewares = app.middlewares + self._secure_proxy_ssl_header = secure_proxy_ssl_header + + def __repr__(self): + return "<{} {}:{} {}>".format( + self.__class__.__name__, self._meth, self._path, + 'connected' if self.transport is not None else 'disconnected') + + def connection_made(self, transport): + super().connection_made(transport) + + self._manager.connection_made(self, transport) + + def connection_lost(self, exc): + self._manager.connection_lost(self, exc) + + super().connection_lost(exc) + + @asyncio.coroutine + def handle_request(self, message, payload): + self._manager._requests_count += 1 + if self.access_log: + now = self._loop.time() + + app = self._app + request = web_reqrep.Request( + app, message, payload, + self.transport, self.reader, self.writer, + secure_proxy_ssl_header=self._secure_proxy_ssl_header) + self._meth = request.method + self._path = request.path + try: + match_info = yield from self._router.resolve(request) + + assert isinstance(match_info, AbstractMatchInfo), match_info + + resp = None + request._match_info = match_info + expect = request.headers.get(hdrs.EXPECT) + if expect: + resp = ( + yield from match_info.expect_handler(request)) + + if resp is None: + handler = match_info.handler + for factory in reversed(self._middlewares): + handler = yield from factory(app, handler) + resp = yield from handler(request) + + assert isinstance(resp, web_reqrep.StreamResponse), \ + ("Handler {!r} should return response instance, " + "got {!r} [middlewares {!r}]").format( + match_info.handler, type(resp), self._middlewares) + except web_exceptions.HTTPException as exc: + resp = exc + + resp_msg = yield from resp.prepare(request) + yield from resp.write_eof() + + # notify server about keep-alive + self.keep_alive(resp.keep_alive) + + # log access + if self.access_log: + self.log_access(message, None, resp_msg, self._loop.time() - now) + + # for repr + self._meth = 'none' + self._path = 'none' + + +class RequestHandlerFactory: + + def __init__(self, app, router, *, + handler=RequestHandler, loop=None, + secure_proxy_ssl_header=None, **kwargs): + self._app = app + self._router = router + self._handler = handler + self._loop = loop + self._connections = {} + self._secure_proxy_ssl_header = secure_proxy_ssl_header + self._kwargs = kwargs + self._kwargs.setdefault('logger', app.logger) + self._requests_count = 0 + + @property + def requests_count(self): + """Number of processed requests.""" + return self._requests_count + + @property + def secure_proxy_ssl_header(self): + return self._secure_proxy_ssl_header + + @property + def connections(self): + return list(self._connections.keys()) + + def connection_made(self, handler, transport): + self._connections[handler] = transport + + def connection_lost(self, handler, exc=None): + if handler in self._connections: + del self._connections[handler] + + @asyncio.coroutine + def finish_connections(self, timeout=None): + coros = [conn.shutdown(timeout) for conn in self._connections] + yield from asyncio.gather(*coros, loop=self._loop) + self._connections.clear() + + def __call__(self): + return self._handler( + self, self._app, self._router, loop=self._loop, + secure_proxy_ssl_header=self._secure_proxy_ssl_header, + **self._kwargs) + + +class Application(dict): + + def __init__(self, *, logger=web_logger, loop=None, + router=None, handler_factory=RequestHandlerFactory, + middlewares=(), debug=False): + if loop is None: + loop = asyncio.get_event_loop() + if router is None: + router = web_urldispatcher.UrlDispatcher() + assert isinstance(router, AbstractRouter), router + + self._debug = debug + self._router = router + self._handler_factory = handler_factory + self._loop = loop + self.logger = logger + + self._middlewares = list(middlewares) + + self._on_pre_signal = PreSignal() + self._on_post_signal = PostSignal() + self._on_response_prepare = Signal(self) + self._on_startup = Signal(self) + self._on_shutdown = Signal(self) + self._on_cleanup = Signal(self) + + @property + def debug(self): + return self._debug + + @property + def on_response_prepare(self): + return self._on_response_prepare + + @property + def on_pre_signal(self): + return self._on_pre_signal + + @property + def on_post_signal(self): + return self._on_post_signal + + @property + def on_startup(self): + return self._on_startup + + @property + def on_shutdown(self): + return self._on_shutdown + + @property + def on_cleanup(self): + return self._on_cleanup + + @property + def router(self): + return self._router + + @property + def loop(self): + return self._loop + + @property + def middlewares(self): + return self._middlewares + + def make_handler(self, **kwargs): + debug = kwargs.pop('debug', sentinel) + if debug is not sentinel: + warnings.warn( + "`debug` parameter is deprecated. " + "Use Application's debug mode instead", DeprecationWarning) + if debug != self.debug: + raise ValueError( + "The value of `debug` parameter conflicts with the debug " + "settings of the `Application` instance. The " + "application's debug mode setting should be used instead " + "as a single point to setup a debug mode. For more " + "information please check " + "http://aiohttp.readthedocs.io/en/stable/" + "web_reference.html#aiohttp.web.Application" + ) + return self._handler_factory(self, self.router, debug=self.debug, + loop=self.loop, **kwargs) + + @asyncio.coroutine + def startup(self): + """Causes on_startup signal + + Should be called in the event loop along with the request handler. + """ + yield from self.on_startup.send(self) + + @asyncio.coroutine + def shutdown(self): + """Causes on_shutdown signal + + Should be called before cleanup() + """ + yield from self.on_shutdown.send(self) + + @asyncio.coroutine + def cleanup(self): + """Causes on_cleanup signal + + Should be called after shutdown() + """ + yield from self.on_cleanup.send(self) + + @asyncio.coroutine + def finish(self): + """Finalize an application. + + Deprecated alias for .cleanup() + """ + warnings.warn("Use .cleanup() instead", DeprecationWarning) + yield from self.cleanup() + + def register_on_finish(self, func, *args, **kwargs): + warnings.warn("Use .on_cleanup.append() instead", DeprecationWarning) + self.on_cleanup.append(lambda app: func(app, *args, **kwargs)) + + def copy(self): + raise NotImplementedError + + def __call__(self): + """gunicorn compatibility""" + return self + + def __repr__(self): + return "" + + +def run_app(app, *, host='0.0.0.0', port=None, + shutdown_timeout=60.0, ssl_context=None, + print=print, backlog=128): + """Run an app locally""" + if port is None: + if not ssl_context: + port = 8080 + else: + port = 8443 + + loop = app.loop + + handler = app.make_handler() + server = loop.create_server(handler, host, port, ssl=ssl_context, + backlog=backlog) + srv, startup_res = loop.run_until_complete(asyncio.gather(server, + app.startup(), + loop=loop)) + + scheme = 'https' if ssl_context else 'http' + print("======== Running on {scheme}://{host}:{port}/ ========\n" + "(Press CTRL+C to quit)".format( + scheme=scheme, host=host, port=port)) + + try: + loop.run_forever() + except KeyboardInterrupt: # pragma: no cover + pass + finally: + srv.close() + loop.run_until_complete(srv.wait_closed()) + loop.run_until_complete(app.shutdown()) + loop.run_until_complete(handler.finish_connections(shutdown_timeout)) + loop.run_until_complete(app.cleanup()) + loop.close() + + +def main(argv): + arg_parser = ArgumentParser( + description="aiohttp.web Application server", + prog="aiohttp.web" + ) + arg_parser.add_argument( + "entry_func", + help=("Callable returning the `aiohttp.web.Application` instance to " + "run. Should be specified in the 'module:function' syntax."), + metavar="entry-func" + ) + arg_parser.add_argument( + "-H", "--hostname", + help="TCP/IP hostname to serve on (default: %(default)r)", + default="localhost" + ) + arg_parser.add_argument( + "-P", "--port", + help="TCP/IP port to serve on (default: %(default)r)", + type=int, + default="8080" + ) + args, extra_argv = arg_parser.parse_known_args(argv) + + # Import logic + mod_str, _, func_str = args.entry_func.partition(":") + if not func_str or not mod_str: + arg_parser.error( + "'entry-func' not in 'module:function' syntax" + ) + if mod_str.startswith("."): + arg_parser.error("relative module names not supported") + try: + module = import_module(mod_str) + except ImportError: + arg_parser.error("module %r not found" % mod_str) + try: + func = getattr(module, func_str) + except AttributeError: + arg_parser.error("module %r has no attribute %r" % (mod_str, func_str)) + + app = func(extra_argv) + run_app(app, host=args.hostname, port=args.port) + arg_parser.exit(message="Stopped\n") + +if __name__ == "__main__": # pragma: no branch + main(sys.argv[1:]) # pragma: no cover diff --git a/RBXLegacyDiscordBot/lib/aiohttp/web_exceptions.py b/RBXLegacyDiscordBot/lib/aiohttp/web_exceptions.py new file mode 100644 index 0000000..b886697 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/web_exceptions.py @@ -0,0 +1,349 @@ +from .web_reqrep import Response + +__all__ = ( + 'HTTPException', + 'HTTPError', + 'HTTPRedirection', + 'HTTPSuccessful', + 'HTTPOk', + 'HTTPCreated', + 'HTTPAccepted', + 'HTTPNonAuthoritativeInformation', + 'HTTPNoContent', + 'HTTPResetContent', + 'HTTPPartialContent', + 'HTTPMultipleChoices', + 'HTTPMovedPermanently', + 'HTTPFound', + 'HTTPSeeOther', + 'HTTPNotModified', + 'HTTPUseProxy', + 'HTTPTemporaryRedirect', + 'HTTPPermanentRedirect', + 'HTTPClientError', + 'HTTPBadRequest', + 'HTTPUnauthorized', + 'HTTPPaymentRequired', + 'HTTPForbidden', + 'HTTPNotFound', + 'HTTPMethodNotAllowed', + 'HTTPNotAcceptable', + 'HTTPProxyAuthenticationRequired', + 'HTTPRequestTimeout', + 'HTTPConflict', + 'HTTPGone', + 'HTTPLengthRequired', + 'HTTPPreconditionFailed', + 'HTTPRequestEntityTooLarge', + 'HTTPRequestURITooLong', + 'HTTPUnsupportedMediaType', + 'HTTPRequestRangeNotSatisfiable', + 'HTTPExpectationFailed', + 'HTTPMisdirectedRequest', + 'HTTPUpgradeRequired', + 'HTTPPreconditionRequired', + 'HTTPTooManyRequests', + 'HTTPRequestHeaderFieldsTooLarge', + 'HTTPUnavailableForLegalReasons', + 'HTTPServerError', + 'HTTPInternalServerError', + 'HTTPNotImplemented', + 'HTTPBadGateway', + 'HTTPServiceUnavailable', + 'HTTPGatewayTimeout', + 'HTTPVersionNotSupported', + 'HTTPVariantAlsoNegotiates', + 'HTTPNotExtended', + 'HTTPNetworkAuthenticationRequired', +) + + +############################################################ +# HTTP Exceptions +############################################################ + +class HTTPException(Response, Exception): + + # You should set in subclasses: + # status = 200 + + status_code = None + empty_body = False + + def __init__(self, *, headers=None, reason=None, + body=None, text=None, content_type=None): + Response.__init__(self, status=self.status_code, + headers=headers, reason=reason, + body=body, text=text, content_type=content_type) + Exception.__init__(self, self.reason) + if self.body is None and not self.empty_body: + self.text = "{}: {}".format(self.status, self.reason) + + +class HTTPError(HTTPException): + """Base class for exceptions with status codes in the 400s and 500s.""" + + +class HTTPRedirection(HTTPException): + """Base class for exceptions with status codes in the 300s.""" + + +class HTTPSuccessful(HTTPException): + """Base class for exceptions with status codes in the 200s.""" + + +class HTTPOk(HTTPSuccessful): + status_code = 200 + + +class HTTPCreated(HTTPSuccessful): + status_code = 201 + + +class HTTPAccepted(HTTPSuccessful): + status_code = 202 + + +class HTTPNonAuthoritativeInformation(HTTPSuccessful): + status_code = 203 + + +class HTTPNoContent(HTTPSuccessful): + status_code = 204 + empty_body = True + + +class HTTPResetContent(HTTPSuccessful): + status_code = 205 + empty_body = True + + +class HTTPPartialContent(HTTPSuccessful): + status_code = 206 + + +############################################################ +# 3xx redirection +############################################################ + + +class _HTTPMove(HTTPRedirection): + + def __init__(self, location, *, headers=None, reason=None, + body=None, text=None, content_type=None): + if not location: + raise ValueError("HTTP redirects need a location to redirect to.") + super().__init__(headers=headers, reason=reason, + body=body, text=text, content_type=content_type) + self.headers['Location'] = location + self.location = location + + +class HTTPMultipleChoices(_HTTPMove): + status_code = 300 + + +class HTTPMovedPermanently(_HTTPMove): + status_code = 301 + + +class HTTPFound(_HTTPMove): + status_code = 302 + + +# This one is safe after a POST (the redirected location will be +# retrieved with GET): +class HTTPSeeOther(_HTTPMove): + status_code = 303 + + +class HTTPNotModified(HTTPRedirection): + # FIXME: this should include a date or etag header + status_code = 304 + empty_body = True + + +class HTTPUseProxy(_HTTPMove): + # Not a move, but looks a little like one + status_code = 305 + + +class HTTPTemporaryRedirect(_HTTPMove): + status_code = 307 + + +class HTTPPermanentRedirect(_HTTPMove): + status_code = 308 + + +############################################################ +# 4xx client error +############################################################ + + +class HTTPClientError(HTTPError): + pass + + +class HTTPBadRequest(HTTPClientError): + status_code = 400 + + +class HTTPUnauthorized(HTTPClientError): + status_code = 401 + + +class HTTPPaymentRequired(HTTPClientError): + status_code = 402 + + +class HTTPForbidden(HTTPClientError): + status_code = 403 + + +class HTTPNotFound(HTTPClientError): + status_code = 404 + + +class HTTPMethodNotAllowed(HTTPClientError): + status_code = 405 + + def __init__(self, method, allowed_methods, *, headers=None, reason=None, + body=None, text=None, content_type=None): + allow = ','.join(sorted(allowed_methods)) + super().__init__(headers=headers, reason=reason, + body=body, text=text, content_type=content_type) + self.headers['Allow'] = allow + self.allowed_methods = allowed_methods + self.method = method.upper() + + +class HTTPNotAcceptable(HTTPClientError): + status_code = 406 + + +class HTTPProxyAuthenticationRequired(HTTPClientError): + status_code = 407 + + +class HTTPRequestTimeout(HTTPClientError): + status_code = 408 + + +class HTTPConflict(HTTPClientError): + status_code = 409 + + +class HTTPGone(HTTPClientError): + status_code = 410 + + +class HTTPLengthRequired(HTTPClientError): + status_code = 411 + + +class HTTPPreconditionFailed(HTTPClientError): + status_code = 412 + + +class HTTPRequestEntityTooLarge(HTTPClientError): + status_code = 413 + + +class HTTPRequestURITooLong(HTTPClientError): + status_code = 414 + + +class HTTPUnsupportedMediaType(HTTPClientError): + status_code = 415 + + +class HTTPRequestRangeNotSatisfiable(HTTPClientError): + status_code = 416 + + +class HTTPExpectationFailed(HTTPClientError): + status_code = 417 + + +class HTTPMisdirectedRequest(HTTPClientError): + status_code = 421 + + +class HTTPUpgradeRequired(HTTPClientError): + status_code = 426 + + +class HTTPPreconditionRequired(HTTPClientError): + status_code = 428 + + +class HTTPTooManyRequests(HTTPClientError): + status_code = 429 + + +class HTTPRequestHeaderFieldsTooLarge(HTTPClientError): + status_code = 431 + + +class HTTPUnavailableForLegalReasons(HTTPClientError): + status_code = 451 + + def __init__(self, link, *, headers=None, reason=None, + body=None, text=None, content_type=None): + super().__init__(headers=headers, reason=reason, + body=body, text=text, content_type=content_type) + self.headers['Link'] = '<%s>; rel="blocked-by"' % link + self.link = link + + +############################################################ +# 5xx Server Error +############################################################ +# Response status codes beginning with the digit "5" indicate cases in +# which the server is aware that it has erred or is incapable of +# performing the request. Except when responding to a HEAD request, the +# server SHOULD include an entity containing an explanation of the error +# situation, and whether it is a temporary or permanent condition. User +# agents SHOULD display any included entity to the user. These response +# codes are applicable to any request method. + + +class HTTPServerError(HTTPError): + pass + + +class HTTPInternalServerError(HTTPServerError): + status_code = 500 + + +class HTTPNotImplemented(HTTPServerError): + status_code = 501 + + +class HTTPBadGateway(HTTPServerError): + status_code = 502 + + +class HTTPServiceUnavailable(HTTPServerError): + status_code = 503 + + +class HTTPGatewayTimeout(HTTPServerError): + status_code = 504 + + +class HTTPVersionNotSupported(HTTPServerError): + status_code = 505 + + +class HTTPVariantAlsoNegotiates(HTTPServerError): + status_code = 506 + + +class HTTPNotExtended(HTTPServerError): + status_code = 510 + + +class HTTPNetworkAuthenticationRequired(HTTPServerError): + status_code = 511 diff --git a/RBXLegacyDiscordBot/lib/aiohttp/web_reqrep.py b/RBXLegacyDiscordBot/lib/aiohttp/web_reqrep.py new file mode 100644 index 0000000..ba24a40 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/web_reqrep.py @@ -0,0 +1,895 @@ +import asyncio +import binascii +import cgi +import collections +import datetime +import enum +import http.cookies +import io +import json +import math +import time +import warnings +from email.utils import parsedate +from types import MappingProxyType +from urllib.parse import parse_qsl, unquote, urlsplit + +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy + +from . import hdrs, multipart +from .helpers import reify, sentinel +from .protocol import Response as ResponseImpl +from .protocol import HttpVersion10, HttpVersion11 +from .streams import EOF_MARKER + +__all__ = ( + 'ContentCoding', 'Request', 'StreamResponse', 'Response', + 'json_response' +) + + +class HeadersMixin: + + _content_type = None + _content_dict = None + _stored_content_type = sentinel + + def _parse_content_type(self, raw): + self._stored_content_type = raw + if raw is None: + # default value according to RFC 2616 + self._content_type = 'application/octet-stream' + self._content_dict = {} + else: + self._content_type, self._content_dict = cgi.parse_header(raw) + + @property + def content_type(self, _CONTENT_TYPE=hdrs.CONTENT_TYPE): + """The value of content part for Content-Type HTTP header.""" + raw = self.headers.get(_CONTENT_TYPE) + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_type + + @property + def charset(self, _CONTENT_TYPE=hdrs.CONTENT_TYPE): + """The value of charset part for Content-Type HTTP header.""" + raw = self.headers.get(_CONTENT_TYPE) + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_dict.get('charset') + + @property + def content_length(self, _CONTENT_LENGTH=hdrs.CONTENT_LENGTH): + """The value of Content-Length HTTP header.""" + l = self.headers.get(_CONTENT_LENGTH) + if l is None: + return None + else: + return int(l) + +FileField = collections.namedtuple('Field', 'name filename file content_type') + + +class ContentCoding(enum.Enum): + # The content codings that we have support for. + # + # Additional registered codings are listed at: + # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding + deflate = 'deflate' + gzip = 'gzip' + identity = 'identity' + + +############################################################ +# HTTP Request +############################################################ + + +class Request(dict, HeadersMixin): + + POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT, + hdrs.METH_TRACE, hdrs.METH_DELETE} + + def __init__(self, app, message, payload, transport, reader, writer, *, + secure_proxy_ssl_header=None): + self._app = app + self._message = message + self._transport = transport + self._reader = reader + self._writer = writer + self._post = None + self._post_files_cache = None + + # matchdict, route_name, handler + # or information about traversal lookup + self._match_info = None # initialized after route resolving + + self._payload = payload + + self._read_bytes = None + self._has_body = not payload.at_eof() + + self._secure_proxy_ssl_header = secure_proxy_ssl_header + + @reify + def scheme(self): + """A string representing the scheme of the request. + + 'http' or 'https'. + """ + if self._transport.get_extra_info('sslcontext'): + return 'https' + secure_proxy_ssl_header = self._secure_proxy_ssl_header + if secure_proxy_ssl_header is not None: + header, value = secure_proxy_ssl_header + if self.headers.get(header) == value: + return 'https' + return 'http' + + @reify + def method(self): + """Read only property for getting HTTP method. + + The value is upper-cased str like 'GET', 'POST', 'PUT' etc. + """ + return self._message.method + + @reify + def version(self): + """Read only property for getting HTTP version of request. + + Returns aiohttp.protocol.HttpVersion instance. + """ + return self._message.version + + @reify + def host(self): + """Read only property for getting *HOST* header of request. + + Returns str or None if HTTP request has no HOST header. + """ + return self._message.headers.get(hdrs.HOST) + + @reify + def path_qs(self): + """The URL including PATH_INFO and the query string. + + E.g, /app/blog?id=10 + """ + return self._message.path + + @reify + def _splitted_path(self): + url = '{}://{}{}'.format(self.scheme, self.host, self.path_qs) + return urlsplit(url) + + @reify + def raw_path(self): + """ The URL including raw *PATH INFO* without the host or scheme. + Warning, the path is unquoted and may contains non valid URL characters + + E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` + """ + return self._splitted_path.path + + @reify + def path(self): + """The URL including *PATH INFO* without the host or scheme. + + E.g., ``/app/blog`` + """ + return unquote(self.raw_path) + + @reify + def query_string(self): + """The query string in the URL. + + E.g., id=10 + """ + return self._splitted_path.query + + @reify + def GET(self): + """A multidict with all the variables in the query string. + + Lazy property. + """ + return MultiDictProxy(MultiDict(parse_qsl(self.query_string, + keep_blank_values=True))) + + @reify + def POST(self): + """A multidict with all the variables in the POST parameters. + + post() methods has to be called before using this attribute. + """ + if self._post is None: + raise RuntimeError("POST is not available before post()") + return self._post + + @reify + def headers(self): + """A case-insensitive multidict proxy with all headers.""" + return CIMultiDictProxy(self._message.headers) + + @reify + def raw_headers(self): + """A sequence of pars for all headers.""" + return tuple(self._message.raw_headers) + + @reify + def if_modified_since(self, _IF_MODIFIED_SINCE=hdrs.IF_MODIFIED_SINCE): + """The value of If-Modified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + httpdate = self.headers.get(_IF_MODIFIED_SINCE) + if httpdate is not None: + timetuple = parsedate(httpdate) + if timetuple is not None: + return datetime.datetime(*timetuple[:6], + tzinfo=datetime.timezone.utc) + return None + + @reify + def keep_alive(self): + """Is keepalive enabled by client?""" + if self.version < HttpVersion10: + return False + else: + return not self._message.should_close + + @property + def match_info(self): + """Result of route resolving.""" + return self._match_info + + @property + def app(self): + """Application instance.""" + return self._app + + @property + def transport(self): + """Transport used for request processing.""" + return self._transport + + @reify + def cookies(self): + """Return request cookies. + + A read-only dictionary-like object. + """ + raw = self.headers.get(hdrs.COOKIE, '') + parsed = http.cookies.SimpleCookie(raw) + return MappingProxyType( + {key: val.value for key, val in parsed.items()}) + + @property + def content(self): + """Return raw payload stream.""" + return self._payload + + @property + def has_body(self): + """Return True if request has HTTP BODY, False otherwise.""" + return self._has_body + + @asyncio.coroutine + def release(self): + """Release request. + + Eat unread part of HTTP BODY if present. + """ + chunk = yield from self._payload.readany() + while chunk is not EOF_MARKER or chunk: + chunk = yield from self._payload.readany() + + @asyncio.coroutine + def read(self): + """Read request body if present. + + Returns bytes object with full request content. + """ + if self._read_bytes is None: + body = bytearray() + while True: + chunk = yield from self._payload.readany() + body.extend(chunk) + if chunk is EOF_MARKER: + break + self._read_bytes = bytes(body) + return self._read_bytes + + @asyncio.coroutine + def text(self): + """Return BODY as text using encoding from .charset.""" + bytes_body = yield from self.read() + encoding = self.charset or 'utf-8' + return bytes_body.decode(encoding) + + @asyncio.coroutine + def json(self, *, loads=json.loads, loader=None): + """Return BODY as JSON.""" + if loader is not None: + warnings.warn( + "Using loader argument is deprecated, use loads instead", + DeprecationWarning) + loads = loader + body = yield from self.text() + return loads(body) + + @asyncio.coroutine + def multipart(self, *, reader=multipart.MultipartReader): + """Return async iterator to process BODY as multipart.""" + return reader(self.headers, self.content) + + @asyncio.coroutine + def post(self): + """Return POST parameters.""" + if self._post is not None: + return self._post + if self.method not in self.POST_METHODS: + self._post = MultiDictProxy(MultiDict()) + return self._post + + content_type = self.content_type + if (content_type not in ('', + 'application/x-www-form-urlencoded', + 'multipart/form-data')): + self._post = MultiDictProxy(MultiDict()) + return self._post + + if self.content_type.startswith('multipart/'): + warnings.warn('To process multipart requests use .multipart' + ' coroutine instead.', DeprecationWarning) + + body = yield from self.read() + content_charset = self.charset or 'utf-8' + + environ = {'REQUEST_METHOD': self.method, + 'CONTENT_LENGTH': str(len(body)), + 'QUERY_STRING': '', + 'CONTENT_TYPE': self.headers.get(hdrs.CONTENT_TYPE)} + + fs = cgi.FieldStorage(fp=io.BytesIO(body), + environ=environ, + keep_blank_values=True, + encoding=content_charset) + + supported_transfer_encoding = { + 'base64': binascii.a2b_base64, + 'quoted-printable': binascii.a2b_qp + } + + out = MultiDict() + _count = 1 + for field in fs.list or (): + transfer_encoding = field.headers.get( + hdrs.CONTENT_TRANSFER_ENCODING, None) + if field.filename: + ff = FileField(field.name, + field.filename, + field.file, # N.B. file closed error + field.type) + if self._post_files_cache is None: + self._post_files_cache = {} + self._post_files_cache[field.name+str(_count)] = field + _count += 1 + out.add(field.name, ff) + else: + value = field.value + if transfer_encoding in supported_transfer_encoding: + # binascii accepts bytes + value = value.encode('utf-8') + value = supported_transfer_encoding[ + transfer_encoding](value) + out.add(field.name, value) + + self._post = MultiDictProxy(out) + return self._post + + def copy(self): + raise NotImplementedError + + def __repr__(self): + ascii_encodable_path = self.path.encode('ascii', 'backslashreplace') \ + .decode('ascii') + return "<{} {} {} >".format(self.__class__.__name__, + self.method, ascii_encodable_path) + + +############################################################ +# HTTP Response classes +############################################################ + + +class StreamResponse(HeadersMixin): + + def __init__(self, *, status=200, reason=None, headers=None): + self._body = None + self._keep_alive = None + self._chunked = False + self._chunk_size = None + self._compression = False + self._compression_force = False + self._headers = CIMultiDict() + self._cookies = http.cookies.SimpleCookie() + self.set_status(status, reason) + + self._req = None + self._resp_impl = None + self._eof_sent = False + self._tcp_nodelay = True + self._tcp_cork = False + + if headers is not None: + self._headers.extend(headers) + self._parse_content_type(self._headers.get(hdrs.CONTENT_TYPE)) + self._generate_content_type_header() + + def _copy_cookies(self): + for cookie in self._cookies.values(): + value = cookie.output(header='')[1:] + self.headers.add(hdrs.SET_COOKIE, value) + + @property + def prepared(self): + return self._resp_impl is not None + + @property + def started(self): + warnings.warn('use Response.prepared instead', DeprecationWarning) + return self.prepared + + @property + def status(self): + return self._status + + @property + def chunked(self): + return self._chunked + + @property + def compression(self): + return self._compression + + @property + def reason(self): + return self._reason + + def set_status(self, status, reason=None): + self._status = int(status) + if reason is None: + reason = ResponseImpl.calc_reason(status) + self._reason = reason + + @property + def keep_alive(self): + return self._keep_alive + + def force_close(self): + self._keep_alive = False + + def enable_chunked_encoding(self, chunk_size=None): + """Enables automatic chunked transfer encoding.""" + self._chunked = True + self._chunk_size = chunk_size + + def enable_compression(self, force=None): + """Enables response compression encoding.""" + # Backwards compatibility for when force was a bool <0.17. + if type(force) == bool: + force = ContentCoding.deflate if force else ContentCoding.identity + elif force is not None: + assert isinstance(force, ContentCoding), ("force should one of " + "None, bool or " + "ContentEncoding") + + self._compression = True + self._compression_force = force + + @property + def headers(self): + return self._headers + + @property + def cookies(self): + return self._cookies + + def set_cookie(self, name, value, *, expires=None, + domain=None, max_age=None, path='/', + secure=None, httponly=None, version=None): + """Set or update response cookie. + + Sets new cookie or updates existent with new value. + Also updates only those params which are not None. + """ + + old = self._cookies.get(name) + if old is not None and old.coded_value == '': + # deleted cookie + self._cookies.pop(name, None) + + self._cookies[name] = value + c = self._cookies[name] + + if expires is not None: + c['expires'] = expires + elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': + del c['expires'] + + if domain is not None: + c['domain'] = domain + + if max_age is not None: + c['max-age'] = max_age + elif 'max-age' in c: + del c['max-age'] + + c['path'] = path + + if secure is not None: + c['secure'] = secure + if httponly is not None: + c['httponly'] = httponly + if version is not None: + c['version'] = version + + def del_cookie(self, name, *, domain=None, path='/'): + """Delete cookie. + + Creates new empty expired cookie. + """ + # TODO: do we need domain/path here? + self._cookies.pop(name, None) + self.set_cookie(name, '', max_age=0, + expires="Thu, 01 Jan 1970 00:00:00 GMT", + domain=domain, path=path) + + @property + def content_length(self): + # Just a placeholder for adding setter + return super().content_length + + @content_length.setter + def content_length(self, value): + if value is not None: + value = int(value) + # TODO: raise error if chunked enabled + self.headers[hdrs.CONTENT_LENGTH] = str(value) + else: + self.headers.pop(hdrs.CONTENT_LENGTH, None) + + @property + def content_type(self): + # Just a placeholder for adding setter + return super().content_type + + @content_type.setter + def content_type(self, value): + self.content_type # read header values if needed + self._content_type = str(value) + self._generate_content_type_header() + + @property + def charset(self): + # Just a placeholder for adding setter + return super().charset + + @charset.setter + def charset(self, value): + ctype = self.content_type # read header values if needed + if ctype == 'application/octet-stream': + raise RuntimeError("Setting charset for application/octet-stream " + "doesn't make sense, setup content_type first") + if value is None: + self._content_dict.pop('charset', None) + else: + self._content_dict['charset'] = str(value).lower() + self._generate_content_type_header() + + @property + def last_modified(self, _LAST_MODIFIED=hdrs.LAST_MODIFIED): + """The value of Last-Modified HTTP header, or None. + + This header is represented as a `datetime` object. + """ + httpdate = self.headers.get(_LAST_MODIFIED) + if httpdate is not None: + timetuple = parsedate(httpdate) + if timetuple is not None: + return datetime.datetime(*timetuple[:6], + tzinfo=datetime.timezone.utc) + return None + + @last_modified.setter + def last_modified(self, value): + if value is None: + self.headers.pop(hdrs.LAST_MODIFIED, None) + elif isinstance(value, (int, float)): + self.headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))) + elif isinstance(value, datetime.datetime): + self.headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()) + elif isinstance(value, str): + self.headers[hdrs.LAST_MODIFIED] = value + + @property + def tcp_nodelay(self): + return self._tcp_nodelay + + def set_tcp_nodelay(self, value): + value = bool(value) + self._tcp_nodelay = value + if value: + self._tcp_cork = False + if self._resp_impl is None: + return + if value: + self._resp_impl.transport.set_tcp_cork(False) + self._resp_impl.transport.set_tcp_nodelay(value) + + @property + def tcp_cork(self): + return self._tcp_cork + + def set_tcp_cork(self, value): + value = bool(value) + self._tcp_cork = value + if value: + self._tcp_nodelay = False + if self._resp_impl is None: + return + if value: + self._resp_impl.transport.set_tcp_nodelay(False) + self._resp_impl.transport.set_tcp_cork(value) + + def _generate_content_type_header(self, CONTENT_TYPE=hdrs.CONTENT_TYPE): + params = '; '.join("%s=%s" % i for i in self._content_dict.items()) + if params: + ctype = self._content_type + '; ' + params + else: + ctype = self._content_type + self.headers[CONTENT_TYPE] = ctype + + def _start_pre_check(self, request): + if self._resp_impl is not None: + if self._req is not request: + raise RuntimeError( + "Response has been started with different request.") + else: + return self._resp_impl + else: + return None + + def _do_start_compression(self, coding): + if coding != ContentCoding.identity: + self.headers[hdrs.CONTENT_ENCODING] = coding.value + self._resp_impl.add_compression_filter(coding.value) + self.content_length = None + + def _start_compression(self, request): + if self._compression_force: + self._do_start_compression(self._compression_force) + else: + accept_encoding = request.headers.get( + hdrs.ACCEPT_ENCODING, '').lower() + for coding in ContentCoding: + if coding.value in accept_encoding: + self._do_start_compression(coding) + return + + def start(self, request): + warnings.warn('use .prepare(request) instead', DeprecationWarning) + resp_impl = self._start_pre_check(request) + if resp_impl is not None: + return resp_impl + + return self._start(request) + + @asyncio.coroutine + def prepare(self, request): + resp_impl = self._start_pre_check(request) + if resp_impl is not None: + return resp_impl + yield from request.app.on_response_prepare.send(request, self) + + return self._start(request) + + def _start(self, request): + self._req = request + keep_alive = self._keep_alive + if keep_alive is None: + keep_alive = request.keep_alive + self._keep_alive = keep_alive + + resp_impl = self._resp_impl = ResponseImpl( + request._writer, + self._status, + request.version, + not keep_alive, + self._reason) + + self._copy_cookies() + + if self._compression: + self._start_compression(request) + + if self._chunked: + if request.version != HttpVersion11: + raise RuntimeError("Using chunked encoding is forbidden " + "for HTTP/{0.major}.{0.minor}".format( + request.version)) + resp_impl.enable_chunked_encoding() + if self._chunk_size: + resp_impl.add_chunking_filter(self._chunk_size) + + headers = self.headers.items() + for key, val in headers: + resp_impl.add_header(key, val) + + resp_impl.transport.set_tcp_nodelay(self._tcp_nodelay) + resp_impl.transport.set_tcp_cork(self._tcp_cork) + self._send_headers(resp_impl) + return resp_impl + + def _send_headers(self, resp_impl): + # Durty hack required for + # https://github.com/KeepSafe/aiohttp/issues/1093 + # File sender may override it + resp_impl.send_headers() + + def write(self, data): + assert isinstance(data, (bytes, bytearray, memoryview)), \ + "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + raise RuntimeError("Cannot call write() after write_eof()") + if self._resp_impl is None: + raise RuntimeError("Cannot call write() before start()") + + if data: + return self._resp_impl.write(data) + else: + return () + + @asyncio.coroutine + def drain(self): + if self._resp_impl is None: + raise RuntimeError("Response has not been started") + yield from self._resp_impl.transport.drain() + + @asyncio.coroutine + def write_eof(self): + if self._eof_sent: + return + if self._resp_impl is None: + raise RuntimeError("Response has not been started") + + yield from self._resp_impl.write_eof() + self._eof_sent = True + + def __repr__(self): + if self.started: + info = "{} {} ".format(self._req.method, self._req.path) + else: + info = "not started" + return "<{} {} {}>".format(self.__class__.__name__, + self.reason, info) + + +class Response(StreamResponse): + + def __init__(self, *, body=None, status=200, + reason=None, text=None, headers=None, content_type=None, + charset=None): + if body is not None and text is not None: + raise ValueError("body and text are not allowed together") + + if headers is None: + headers = CIMultiDict() + elif not isinstance(headers, (CIMultiDict, CIMultiDictProxy)): + headers = CIMultiDict(headers) + + if content_type is not None and ";" in content_type: + raise ValueError("charset must not be in content_type " + "argument") + + if text is not None: + if hdrs.CONTENT_TYPE in headers: + if content_type or charset: + raise ValueError("passing both Content-Type header and " + "content_type or charset params " + "is forbidden") + else: + # fast path for filling headers + if not isinstance(text, str): + raise TypeError("text argument must be str (%r)" % + type(text)) + if content_type is None: + content_type = 'text/plain' + if charset is None: + charset = 'utf-8' + headers[hdrs.CONTENT_TYPE] = ( + content_type + '; charset=' + charset) + body = text.encode(charset) + text = None + else: + if hdrs.CONTENT_TYPE in headers: + if content_type is not None or charset is not None: + raise ValueError("passing both Content-Type header and " + "content_type or charset params " + "is forbidden") + else: + if content_type is not None: + if charset is not None: + content_type += '; charset=' + charset + headers[hdrs.CONTENT_TYPE] = content_type + + super().__init__(status=status, reason=reason, headers=headers) + self.set_tcp_cork(True) + if text is not None: + self.text = text + else: + self.body = body + + @property + def body(self): + return self._body + + @body.setter + def body(self, body): + if body is not None and not isinstance(body, bytes): + raise TypeError("body argument must be bytes (%r)" % type(body)) + self._body = body + if body is not None: + self.content_length = len(body) + else: + self.content_length = 0 + + @property + def text(self): + if self._body is None: + return None + return self._body.decode(self.charset or 'utf-8') + + @text.setter + def text(self, text): + if text is not None and not isinstance(text, str): + raise TypeError("text argument must be str (%r)" % type(text)) + + if self.content_type == 'application/octet-stream': + self.content_type = 'text/plain' + if self.charset is None: + self.charset = 'utf-8' + + self.body = text.encode(self.charset) + + @asyncio.coroutine + def write_eof(self): + try: + body = self._body + if (body is not None and + self._req.method != hdrs.METH_HEAD and + self._status not in [204, 304]): + self.write(body) + finally: + self.set_tcp_nodelay(True) + yield from super().write_eof() + + +def json_response(data=sentinel, *, text=None, body=None, status=200, + reason=None, headers=None, content_type='application/json', + dumps=json.dumps): + if data is not sentinel: + if text or body: + raise ValueError( + "only one of data, text, or body should be specified" + ) + else: + text = dumps(data) + return Response(text=text, body=body, status=status, reason=reason, + headers=headers, content_type=content_type) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/web_urldispatcher.py b/RBXLegacyDiscordBot/lib/aiohttp/web_urldispatcher.py new file mode 100644 index 0000000..6625854 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/web_urldispatcher.py @@ -0,0 +1,825 @@ +import abc +import asyncio +import collections +import inspect +import keyword +import os +import re +import sys +import warnings +from collections.abc import Container, Iterable, Sized +from pathlib import Path +from types import MappingProxyType +from urllib.parse import unquote, urlencode + +from . import hdrs +from .abc import AbstractMatchInfo, AbstractRouter, AbstractView +from .file_sender import FileSender +from .protocol import HttpVersion11 +from .web_exceptions import (HTTPExpectationFailed, HTTPForbidden, + HTTPMethodNotAllowed, HTTPNotFound) +from .web_reqrep import Response, StreamResponse + +__all__ = ('UrlDispatcher', 'UrlMappingMatchInfo', + 'AbstractResource', 'Resource', 'PlainResource', 'DynamicResource', + 'ResourceAdapter', + 'AbstractRoute', 'ResourceRoute', + 'Route', 'PlainRoute', 'DynamicRoute', 'StaticRoute', 'View') + + +PY_35 = sys.version_info >= (3, 5) + + +HTTP_METHOD_RE = re.compile(r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$") + + +class AbstractResource(Sized, Iterable): + + def __init__(self, *, name=None): + self._name = name + + @property + def name(self): + return self._name + + @abc.abstractmethod # pragma: no branch + def url(self, **kwargs): + """Construct url for resource with additional params.""" + + @asyncio.coroutine + @abc.abstractmethod # pragma: no branch + def resolve(self, method, path): + """Resolve resource + + Return (UrlMappingMatchInfo, allowed_methods) pair.""" + + @abc.abstractmethod + def get_info(self): + """Return a dict with additional info useful for introspection""" + + @staticmethod + def _append_query(url, query): + if query: + return url + "?" + urlencode(query) + else: + return url + + +class AbstractRoute(abc.ABC): + + def __init__(self, method, handler, *, + expect_handler=None, + resource=None): + + if expect_handler is None: + expect_handler = _defaultExpectHandler + + assert asyncio.iscoroutinefunction(expect_handler), \ + 'Coroutine is expected, got {!r}'.format(expect_handler) + + method = method.upper() + if not HTTP_METHOD_RE.match(method): + raise ValueError("{} is not allowed HTTP method".format(method)) + + assert callable(handler), handler + if asyncio.iscoroutinefunction(handler): + pass + elif inspect.isgeneratorfunction(handler): + warnings.warn("Bare generators are deprecated, " + "use @coroutine wrapper", DeprecationWarning) + elif (isinstance(handler, type) and + issubclass(handler, AbstractView)): + pass + else: + @asyncio.coroutine + def handler_wrapper(*args, **kwargs): + result = old_handler(*args, **kwargs) + if asyncio.iscoroutine(result): + result = yield from result + return result + old_handler = handler + handler = handler_wrapper + + self._method = method + self._handler = handler + self._expect_handler = expect_handler + self._resource = resource + + @property + def method(self): + return self._method + + @property + def handler(self): + return self._handler + + @property + @abc.abstractmethod + def name(self): + """Optional route's name, always equals to resource's name.""" + + @property + def resource(self): + return self._resource + + @abc.abstractmethod + def get_info(self): + """Return a dict with additional info useful for introspection""" + + @abc.abstractmethod # pragma: no branch + def url(self, **kwargs): + """Construct url for route with additional params.""" + + @asyncio.coroutine + def handle_expect_header(self, request): + return (yield from self._expect_handler(request)) + + +class UrlMappingMatchInfo(dict, AbstractMatchInfo): + + def __init__(self, match_dict, route): + super().__init__(match_dict) + self._route = route + + @property + def handler(self): + return self._route.handler + + @property + def route(self): + return self._route + + @property + def expect_handler(self): + return self._route.handle_expect_header + + @property + def http_exception(self): + return None + + def get_info(self): + return self._route.get_info() + + def __repr__(self): + return "".format(super().__repr__(), self._route) + + +class MatchInfoError(UrlMappingMatchInfo): + + def __init__(self, http_exception): + self._exception = http_exception + super().__init__({}, SystemRoute(self._exception)) + + @property + def http_exception(self): + return self._exception + + def __repr__(self): + return "".format(self._exception.status, + self._exception.reason) + + +@asyncio.coroutine +def _defaultExpectHandler(request): + """Default handler for Expect header. + + Just send "100 Continue" to client. + raise HTTPExpectationFailed if value of header is not "100-continue" + """ + expect = request.headers.get(hdrs.EXPECT) + if request.version == HttpVersion11: + if expect.lower() == "100-continue": + request.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n") + else: + raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) + + +class ResourceAdapter(AbstractResource): + + def __init__(self, route): + assert isinstance(route, Route), \ + 'Instance of Route class is required, got {!r}'.format(route) + super().__init__(name=route.name) + self._route = route + route._resource = self + + def url(self, **kwargs): + return self._route.url(**kwargs) + + @asyncio.coroutine + def resolve(self, method, path): + route_method = self._route.method + allowed_methods = set() + match_dict = self._route.match(path) + if match_dict is not None: + allowed_methods.add(route_method) + if route_method == hdrs.METH_ANY or route_method == method: + return (UrlMappingMatchInfo(match_dict, self._route), + allowed_methods) + return None, allowed_methods + + def get_info(self): + return self._route.get_info() + + def __len__(self): + return 1 + + def __iter__(self): + yield self._route + + +class Resource(AbstractResource): + + def __init__(self, *, name=None): + super().__init__(name=name) + self._routes = [] + + def add_route(self, method, handler, *, + expect_handler=None): + + for route in self._routes: + if route.method == method or route.method == hdrs.METH_ANY: + raise RuntimeError("Added route will never be executed, " + "method {route.method} is " + "already registered".format(route=route)) + + route = ResourceRoute(method, handler, self, + expect_handler=expect_handler) + self.register_route(route) + return route + + def register_route(self, route): + assert isinstance(route, ResourceRoute), \ + 'Instance of Route class is required, got {!r}'.format(route) + self._routes.append(route) + + @asyncio.coroutine + def resolve(self, method, path): + allowed_methods = set() + + match_dict = self._match(path) + if match_dict is None: + return None, allowed_methods + + for route in self._routes: + route_method = route.method + allowed_methods.add(route_method) + + if route_method == method or route_method == hdrs.METH_ANY: + return UrlMappingMatchInfo(match_dict, route), allowed_methods + else: + return None, allowed_methods + + def __len__(self): + return len(self._routes) + + def __iter__(self): + return iter(self._routes) + + +class PlainResource(Resource): + + def __init__(self, path, *, name=None): + super().__init__(name=name) + self._path = path + + def _match(self, path): + # string comparison is about 10 times faster than regexp matching + if self._path == path: + return {} + else: + return None + + def get_info(self): + return {'path': self._path} + + def url(self, *, query=None): + return self._append_query(self._path, query) + + def __repr__(self): + name = "'" + self.name + "' " if self.name is not None else "" + return " {handler!r}".format( + method=self.method, resource=self._resource, + handler=self.handler) + + @property + def name(self): + return self._resource.name + + def url(self, **kwargs): + """Construct url for route with additional params.""" + return self._resource.url(**kwargs) + + def get_info(self): + return self._resource.get_info() + + _append_query = staticmethod(Resource._append_query) + + +class Route(AbstractRoute): + """Old fashion route""" + + def __init__(self, method, handler, name, *, expect_handler=None): + super().__init__(method, handler, expect_handler=expect_handler) + self._name = name + + @property + def name(self): + return self._name + + @abc.abstractmethod + def match(self, path): + """Return dict with info for given path or + None if route cannot process path.""" + + _append_query = staticmethod(Resource._append_query) + + +class PlainRoute(Route): + + def __init__(self, method, handler, name, path, *, expect_handler=None): + super().__init__(method, handler, name, expect_handler=expect_handler) + self._path = path + + def match(self, path): + # string comparison is about 10 times faster than regexp matching + if self._path == path: + return {} + else: + return None + + def url(self, *, query=None): + return self._append_query(self._path, query) + + def get_info(self): + return {'path': self._path} + + def __repr__(self): + name = "'" + self.name + "' " if self.name is not None else "" + return " {handler!r}".format( + name=name, method=self.method, path=self._path, + handler=self.handler) + + +class DynamicRoute(Route): + + def __init__(self, method, handler, name, pattern, formatter, *, + expect_handler=None): + super().__init__(method, handler, name, expect_handler=expect_handler) + self._pattern = pattern + self._formatter = formatter + + def match(self, path): + match = self._pattern.match(path) + if match is None: + return None + else: + return match.groupdict() + + def url(self, *, parts, query=None): + url = self._formatter.format_map(parts) + return self._append_query(url, query) + + def get_info(self): + return {'formatter': self._formatter, + 'pattern': self._pattern} + + def __repr__(self): + name = "'" + self.name + "' " if self.name is not None else "" + return (" {handler!r}" + .format(name=name, method=self.method, + formatter=self._formatter, handler=self.handler)) + + +class StaticRoute(Route): + + def __init__(self, name, prefix, directory, *, + expect_handler=None, chunk_size=256*1024, + response_factory=StreamResponse, + show_index=False): + assert prefix.startswith('/'), prefix + assert prefix.endswith('/'), prefix + super().__init__( + 'GET', self.handle, name, expect_handler=expect_handler) + self._prefix = prefix + self._prefix_len = len(self._prefix) + try: + directory = Path(directory) + if str(directory).startswith('~'): + directory = Path(os.path.expanduser(str(directory))) + directory = directory.resolve() + if not directory.is_dir(): + raise ValueError('Not a directory') + except (FileNotFoundError, ValueError) as error: + raise ValueError( + "No directory exists at '{}'".format(directory)) from error + self._directory = directory + self._file_sender = FileSender(resp_factory=response_factory, + chunk_size=chunk_size) + self._show_index = show_index + + def match(self, path): + if not path.startswith(self._prefix): + return None + return {'filename': path[self._prefix_len:]} + + def url(self, *, filename, query=None): + if isinstance(filename, Path): + filename = str(filename) + while filename.startswith('/'): + filename = filename[1:] + url = self._prefix + filename + return self._append_query(url, query) + + def get_info(self): + return {'directory': self._directory, + 'prefix': self._prefix} + + @asyncio.coroutine + def handle(self, request): + filename = unquote(request.match_info['filename']) + try: + filepath = self._directory.joinpath(filename).resolve() + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError) as error: + # relatively safe + raise HTTPNotFound() from error + except Exception as error: + # perm error or other kind! + request.app.logger.exception(error) + raise HTTPNotFound() from error + + # on opening a dir, load it's contents if allowed + if filepath.is_dir(): + if self._show_index: + try: + ret = Response(text=self._directory_as_html(filepath), + content_type="text/html") + except PermissionError: + raise HTTPForbidden() + else: + raise HTTPForbidden() + elif filepath.is_file(): + ret = yield from self._file_sender.send(request, filepath) + else: + raise HTTPNotFound + + return ret + + def _directory_as_html(self, filepath): + "returns directory's index as html" + # sanity check + assert filepath.is_dir() + + posix_dir_len = len(self._directory.as_posix()) + + # remove the beginning of posix path, so it would be relative + # to our added static path + relative_path_to_dir = filepath.as_posix()[posix_dir_len:] + index_of = "Index of /{}".format(relative_path_to_dir) + head = "\n{}\n".format(index_of) + h1 = "

{}

".format(index_of) + + index_list = [] + dir_index = filepath.iterdir() + for _file in sorted(dir_index): + # show file url as relative to static path + file_url = _file.as_posix()[posix_dir_len:] + + # if file is a directory, add '/' to the end of the name + if _file.is_dir(): + file_name = "{}/".format(_file.name) + else: + file_name = _file.name + + index_list.append( + '
  • {name}
  • '.format(url=file_url, + name=file_name) + ) + ul = "
      \n{}\n
    ".format('\n'.join(index_list)) + body = "\n{}\n{}\n".format(h1, ul) + + html = "\n{}\n{}\n".format(head, body) + + return html + + def __repr__(self): + name = "'" + self.name + "' " if self.name is not None else "" + return " {directory!r}".format( + name=name, method=self.method, path=self._prefix, + directory=self._directory) + + +class SystemRoute(Route): + + def __init__(self, http_exception): + super().__init__(hdrs.METH_ANY, self._handler, None) + self._http_exception = http_exception + + def url(self, **kwargs): + raise RuntimeError(".url() is not allowed for SystemRoute") + + def match(self, path): + return None + + def get_info(self): + return {'http_exception': self._http_exception} + + @asyncio.coroutine + def _handler(self, request): + raise self._http_exception + + @property + def status(self): + return self._http_exception.status + + @property + def reason(self): + return self._http_exception.reason + + def __repr__(self): + return "".format(self=self) + + +class View(AbstractView): + + @asyncio.coroutine + def __iter__(self): + if self.request.method not in hdrs.METH_ALL: + self._raise_allowed_methods() + method = getattr(self, self.request.method.lower(), None) + if method is None: + self._raise_allowed_methods() + resp = yield from method() + return resp + + if PY_35: + def __await__(self): + return (yield from self.__iter__()) + + def _raise_allowed_methods(self): + allowed_methods = { + m for m in hdrs.METH_ALL if hasattr(self, m.lower())} + raise HTTPMethodNotAllowed(self.request.method, allowed_methods) + + +class ResourcesView(Sized, Iterable, Container): + + def __init__(self, resources): + self._resources = resources + + def __len__(self): + return len(self._resources) + + def __iter__(self): + yield from self._resources + + def __contains__(self, resource): + return resource in self._resources + + +class RoutesView(Sized, Iterable, Container): + + def __init__(self, resources): + self._routes = [] + for resource in resources: + for route in resource: + self._routes.append(route) + + def __len__(self): + return len(self._routes) + + def __iter__(self): + yield from self._routes + + def __contains__(self, route): + return route in self._routes + + +class UrlDispatcher(AbstractRouter, collections.abc.Mapping): + + DYN = re.compile(r'^\{(?P[a-zA-Z][_a-zA-Z0-9]*)\}$') + DYN_WITH_RE = re.compile( + r'^\{(?P[a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}$') + GOOD = r'[^{}/]+' + ROUTE_RE = re.compile(r'(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})') + NAME_SPLIT_RE = re.compile('[.:-]') + + def __init__(self): + super().__init__() + self._resources = [] + self._named_resources = {} + + @asyncio.coroutine + def resolve(self, request): + path = request.raw_path + method = request.method + allowed_methods = set() + + for resource in self._resources: + match_dict, allowed = yield from resource.resolve(method, path) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + else: + if allowed_methods: + return MatchInfoError(HTTPMethodNotAllowed(method, + allowed_methods)) + else: + return MatchInfoError(HTTPNotFound()) + + def __iter__(self): + return iter(self._named_resources) + + def __len__(self): + return len(self._named_resources) + + def __contains__(self, name): + return name in self._named_resources + + def __getitem__(self, name): + return self._named_resources[name] + + def resources(self): + return ResourcesView(self._resources) + + def routes(self): + return RoutesView(self._resources) + + def named_resources(self): + return MappingProxyType(self._named_resources) + + def named_routes(self): + # NB: it's ambiguous but it's really resources. + warnings.warn("Use .named_resources instead", DeprecationWarning) + return self.named_resources() + + def register_route(self, route): + warnings.warn("Use resource-based interface", DeprecationWarning) + resource = ResourceAdapter(route) + self._reg_resource(resource) + + def _reg_resource(self, resource): + assert isinstance(resource, AbstractResource), \ + 'Instance of AbstractResource class is required, got {!r}'.format( + resource) + + name = resource.name + + if name is not None: + parts = self.NAME_SPLIT_RE.split(name) + for part in parts: + if not part.isidentifier() or keyword.iskeyword(part): + raise ValueError('Incorrect route name {!r}, ' + 'the name should be a sequence of ' + 'python identifiers separated ' + 'by dash, dot or column'.format(name)) + if name in self._named_resources: + raise ValueError('Duplicate {!r}, ' + 'already handled by {!r}' + .format(name, self._named_resources[name])) + self._named_resources[name] = resource + self._resources.append(resource) + + def add_resource(self, path, *, name=None): + if not path.startswith('/'): + raise ValueError("path should be started with /") + if not ('{' in path or '}' in path or self.ROUTE_RE.search(path)): + resource = PlainResource(path, name=name) + self._reg_resource(resource) + return resource + + pattern = '' + formatter = '' + for part in self.ROUTE_RE.split(path): + match = self.DYN.match(part) + if match: + pattern += '(?P<{}>{})'.format(match.group('var'), self.GOOD) + formatter += '{' + match.group('var') + '}' + continue + + match = self.DYN_WITH_RE.match(part) + if match: + pattern += '(?P<{var}>{re})'.format(**match.groupdict()) + formatter += '{' + match.group('var') + '}' + continue + + if '{' in part or '}' in part: + raise ValueError("Invalid path '{}'['{}']".format(path, part)) + + formatter += part + pattern += re.escape(part) + + try: + compiled = re.compile('^' + pattern + '$') + except re.error as exc: + raise ValueError( + "Bad pattern '{}': {}".format(pattern, exc)) from None + resource = DynamicResource(compiled, formatter, name=name) + self._reg_resource(resource) + return resource + + def add_route(self, method, path, handler, + *, name=None, expect_handler=None): + resource = self.add_resource(path, name=name) + return resource.add_route(method, handler, + expect_handler=expect_handler) + + def add_static(self, prefix, path, *, name=None, expect_handler=None, + chunk_size=256*1024, response_factory=StreamResponse, + show_index=False): + """Add static files view. + + prefix - url prefix + path - folder with files + + """ + assert prefix.startswith('/') + if not prefix.endswith('/'): + prefix += '/' + route = StaticRoute(name, prefix, path, + expect_handler=expect_handler, + chunk_size=chunk_size, + response_factory=response_factory, + show_index=show_index) + self.register_route(route) + return route + + def add_head(self, *args, **kwargs): + """ + Shortcut for add_route with method HEAD + """ + return self.add_route(hdrs.METH_HEAD, *args, **kwargs) + + def add_get(self, *args, **kwargs): + """ + Shortcut for add_route with method GET + """ + return self.add_route(hdrs.METH_GET, *args, **kwargs) + + def add_post(self, *args, **kwargs): + """ + Shortcut for add_route with method POST + """ + return self.add_route(hdrs.METH_POST, *args, **kwargs) + + def add_put(self, *args, **kwargs): + """ + Shortcut for add_route with method PUT + """ + return self.add_route(hdrs.METH_PUT, *args, **kwargs) + + def add_patch(self, *args, **kwargs): + """ + Shortcut for add_route with method PATCH + """ + return self.add_route(hdrs.METH_PATCH, *args, **kwargs) + + def add_delete(self, *args, **kwargs): + """ + Shortcut for add_route with method DELETE + """ + return self.add_route(hdrs.METH_DELETE, *args, **kwargs) diff --git a/RBXLegacyDiscordBot/lib/aiohttp/web_ws.py b/RBXLegacyDiscordBot/lib/aiohttp/web_ws.py new file mode 100644 index 0000000..8873225 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/web_ws.py @@ -0,0 +1,320 @@ +import asyncio +import json +import sys +import warnings +from collections import namedtuple + +from . import Timeout, hdrs +from ._ws_impl import (CLOSED_MESSAGE, WebSocketError, WSMessage, WSMsgType, + do_handshake) +from .errors import ClientDisconnectedError, HttpProcessingError +from .web_exceptions import (HTTPBadRequest, HTTPInternalServerError, + HTTPMethodNotAllowed) +from .web_reqrep import StreamResponse + +__all__ = ('WebSocketResponse', 'WebSocketReady', 'MsgType', 'WSMsgType',) + +PY_35 = sys.version_info >= (3, 5) +PY_352 = sys.version_info >= (3, 5, 2) + +THRESHOLD_CONNLOST_ACCESS = 5 + + +# deprecated since 1.0 +MsgType = WSMsgType + + +class WebSocketReady(namedtuple('WebSocketReady', 'ok protocol')): + def __bool__(self): + return self.ok + + +class WebSocketResponse(StreamResponse): + + def __init__(self, *, + timeout=10.0, autoclose=True, autoping=True, protocols=()): + super().__init__(status=101) + self._protocols = protocols + self._protocol = None + self._writer = None + self._reader = None + self._closed = False + self._closing = False + self._conn_lost = 0 + self._close_code = None + self._loop = None + self._waiting = False + self._exception = None + self._timeout = timeout + self._autoclose = autoclose + self._autoping = autoping + + @asyncio.coroutine + def prepare(self, request): + # make pre-check to don't hide it by do_handshake() exceptions + resp_impl = self._start_pre_check(request) + if resp_impl is not None: + return resp_impl + + parser, protocol, writer = self._pre_start(request) + resp_impl = yield from super().prepare(request) + self._post_start(request, parser, protocol, writer) + return resp_impl + + def _pre_start(self, request): + try: + status, headers, parser, writer, protocol = do_handshake( + request.method, request.headers, request.transport, + self._protocols) + except HttpProcessingError as err: + if err.code == 405: + raise HTTPMethodNotAllowed( + request.method, [hdrs.METH_GET], body=b'') + elif err.code == 400: + raise HTTPBadRequest(text=err.message, headers=err.headers) + else: # pragma: no cover + raise HTTPInternalServerError() from err + + if self.status != status: + self.set_status(status) + for k, v in headers: + self.headers[k] = v + self.force_close() + return parser, protocol, writer + + def _post_start(self, request, parser, protocol, writer): + self._reader = request._reader.set_parser(parser) + self._writer = writer + self._protocol = protocol + self._loop = request.app.loop + + def start(self, request): + warnings.warn('use .prepare(request) instead', DeprecationWarning) + # make pre-check to don't hide it by do_handshake() exceptions + resp_impl = self._start_pre_check(request) + if resp_impl is not None: + return resp_impl + + parser, protocol, writer = self._pre_start(request) + resp_impl = super().start(request) + self._post_start(request, parser, protocol, writer) + return resp_impl + + def can_prepare(self, request): + if self._writer is not None: + raise RuntimeError('Already started') + try: + _, _, _, _, protocol = do_handshake( + request.method, request.headers, request.transport, + self._protocols) + except HttpProcessingError: + return WebSocketReady(False, None) + else: + return WebSocketReady(True, protocol) + + def can_start(self, request): + warnings.warn('use .can_prepare(request) instead', DeprecationWarning) + return self.can_prepare(request) + + @property + def closed(self): + return self._closed + + @property + def close_code(self): + return self._close_code + + @property + def protocol(self): + return self._protocol + + def exception(self): + return self._exception + + def ping(self, message='b'): + if self._writer is None: + raise RuntimeError('Call .prepare() first') + if self._closed: + raise RuntimeError('websocket connection is closing') + self._writer.ping(message) + + def pong(self, message='b'): + # unsolicited pong + if self._writer is None: + raise RuntimeError('Call .prepare() first') + if self._closed: + raise RuntimeError('websocket connection is closing') + self._writer.pong(message) + + def send_str(self, data): + if self._writer is None: + raise RuntimeError('Call .prepare() first') + if self._closed: + raise RuntimeError('websocket connection is closing') + if not isinstance(data, str): + raise TypeError('data argument must be str (%r)' % type(data)) + self._writer.send(data, binary=False) + + def send_bytes(self, data): + if self._writer is None: + raise RuntimeError('Call .prepare() first') + if self._closed: + raise RuntimeError('websocket connection is closing') + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError('data argument must be byte-ish (%r)' % + type(data)) + self._writer.send(data, binary=True) + + def send_json(self, data, *, dumps=json.dumps): + self.send_str(dumps(data)) + + @asyncio.coroutine + def write_eof(self): + if self._eof_sent: + return + if self._resp_impl is None: + raise RuntimeError("Response has not been started") + + yield from self.close() + self._eof_sent = True + + @asyncio.coroutine + def close(self, *, code=1000, message=b''): + if self._writer is None: + raise RuntimeError('Call .prepare() first') + + if not self._closed: + self._closed = True + try: + self._writer.close(code, message) + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = 1006 + raise + except Exception as exc: + self._close_code = 1006 + self._exception = exc + return True + + if self._closing: + return True + + begin = self._loop.time() + while self._loop.time() - begin < self._timeout: + try: + with Timeout(timeout=self._timeout, + loop=self._loop): + msg = yield from self._reader.read() + except asyncio.CancelledError: + self._close_code = 1006 + raise + except Exception as exc: + self._close_code = 1006 + self._exception = exc + return True + + if msg.type == WSMsgType.CLOSE: + self._close_code = msg.data + return True + + self._close_code = 1006 + self._exception = asyncio.TimeoutError() + return True + else: + return False + + @asyncio.coroutine + def receive(self): + if self._reader is None: + raise RuntimeError('Call .prepare() first') + if self._waiting: + raise RuntimeError('Concurrent call to receive() is not allowed') + + self._waiting = True + try: + while True: + if self._closed: + self._conn_lost += 1 + if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: + raise RuntimeError('WebSocket connection is closed.') + return CLOSED_MESSAGE + + try: + msg = yield from self._reader.read() + except (asyncio.CancelledError, asyncio.TimeoutError): + raise + except WebSocketError as exc: + self._close_code = exc.code + yield from self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except ClientDisconnectedError: + self._closed = True + self._close_code = 1006 + return WSMessage(WSMsgType.CLOSE, None, None) + except Exception as exc: + self._exception = exc + self._closing = True + self._close_code = 1006 + yield from self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type == WSMsgType.CLOSE: + self._closing = True + self._close_code = msg.data + if not self._closed and self._autoclose: + yield from self.close() + return msg + if msg.type == WSMsgType.PING and self._autoping: + self.pong(msg.data) + elif msg.type == WSMsgType.PONG and self._autoping: + continue + else: + return msg + finally: + self._waiting = False + + @asyncio.coroutine + def receive_msg(self): + warnings.warn( + 'receive_msg() coroutine is deprecated. use receive() instead', + DeprecationWarning) + return (yield from self.receive()) + + @asyncio.coroutine + def receive_str(self): + msg = yield from self.receive() + if msg.type != WSMsgType.TEXT: + raise TypeError( + "Received message {}:{!r} is not str".format(msg.type, + msg.data)) + return msg.data + + @asyncio.coroutine + def receive_bytes(self): + msg = yield from self.receive() + if msg.type != WSMsgType.BINARY: + raise TypeError( + "Received message {}:{!r} is not bytes".format(msg.type, + msg.data)) + return msg.data + + @asyncio.coroutine + def receive_json(self, *, loads=json.loads): + data = yield from self.receive_str() + return loads(data) + + def write(self, data): + raise RuntimeError("Cannot call .write() for websocket") + + if PY_35: + def __aiter__(self): + return self + + if not PY_352: # pragma: no cover + __aiter__ = asyncio.coroutine(__aiter__) + + @asyncio.coroutine + def __anext__(self): + msg = yield from self.receive() + if msg.type == WSMsgType.CLOSE: + raise StopAsyncIteration # NOQA + return msg diff --git a/RBXLegacyDiscordBot/lib/aiohttp/worker.py b/RBXLegacyDiscordBot/lib/aiohttp/worker.py new file mode 100644 index 0000000..9d079cc --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/worker.py @@ -0,0 +1,195 @@ +"""Async gunicorn worker for aiohttp.web""" + +import asyncio +import os +import re +import signal +import ssl +import sys + +import gunicorn.workers.base as base +from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat + +from aiohttp.helpers import AccessLogger, ensure_future + +__all__ = ('GunicornWebWorker', 'GunicornUVLoopWebWorker') + + +class GunicornWebWorker(base.Worker): + + DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT + DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default + + def __init__(self, *args, **kw): # pragma: no cover + super().__init__(*args, **kw) + + self.servers = {} + self.exit_code = 0 + + def init_process(self): + # create new event_loop after fork + asyncio.get_event_loop().close() + + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + super().init_process() + + def run(self): + self.loop.run_until_complete(self.wsgi.startup()) + self._runner = ensure_future(self._run(), loop=self.loop) + + try: + self.loop.run_until_complete(self._runner) + finally: + self.loop.close() + + sys.exit(self.exit_code) + + def make_handler(self, app): + return app.make_handler( + logger=self.log, + slow_request_timeout=self.cfg.timeout, + keepalive_timeout=self.cfg.keepalive, + access_log=self.log.access_log, + access_log_format=self._get_valid_log_format( + self.cfg.access_log_format)) + + @asyncio.coroutine + def close(self): + if self.servers: + servers = self.servers + self.servers = None + + # stop accepting connections + for server, handler in servers.items(): + self.log.info("Stopping server: %s, connections: %s", + self.pid, len(handler.connections)) + server.close() + yield from server.wait_closed() + + # send on_shutdown event + yield from self.wsgi.shutdown() + + # stop alive connections + tasks = [ + handler.finish_connections( + timeout=self.cfg.graceful_timeout / 100 * 95) + for handler in servers.values()] + yield from asyncio.gather(*tasks, loop=self.loop) + + # cleanup application + yield from self.wsgi.cleanup() + + @asyncio.coroutine + def _run(self): + + ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None + + for sock in self.sockets: + handler = self.make_handler(self.wsgi) + srv = yield from self.loop.create_server(handler, sock=sock.sock, + ssl=ctx) + self.servers[srv] = handler + + # If our parent changed then we shut down. + pid = os.getpid() + try: + while self.alive: + self.notify() + + cnt = sum(handler.requests_count + for handler in self.servers.values()) + if self.cfg.max_requests and cnt > self.cfg.max_requests: + self.alive = False + self.log.info("Max requests, shutting down: %s", self) + + elif pid == os.getpid() and self.ppid != os.getppid(): + self.alive = False + self.log.info("Parent changed, shutting down: %s", self) + else: + yield from asyncio.sleep(1.0, loop=self.loop) + + except BaseException: + pass + + yield from self.close() + + def init_signals(self): + # Set up signals through the event loop API. + + self.loop.add_signal_handler(signal.SIGQUIT, self.handle_quit, + signal.SIGQUIT, None) + + self.loop.add_signal_handler(signal.SIGTERM, self.handle_exit, + signal.SIGTERM, None) + + self.loop.add_signal_handler(signal.SIGINT, self.handle_quit, + signal.SIGINT, None) + + self.loop.add_signal_handler(signal.SIGWINCH, self.handle_winch, + signal.SIGWINCH, None) + + self.loop.add_signal_handler(signal.SIGUSR1, self.handle_usr1, + signal.SIGUSR1, None) + + self.loop.add_signal_handler(signal.SIGABRT, self.handle_abort, + signal.SIGABRT, None) + + # Don't let SIGTERM and SIGUSR1 disturb active requests + # by interrupting system calls + signal.siginterrupt(signal.SIGTERM, False) + signal.siginterrupt(signal.SIGUSR1, False) + + def handle_quit(self, sig, frame): + self.alive = False + + def handle_abort(self, sig, frame): + self.alive = False + self.exit_code = 1 + + @staticmethod + def _create_ssl_context(cfg): + """ Creates SSLContext instance for usage in asyncio.create_server. + + See ssl.SSLSocket.__init__ for more details. + """ + ctx = ssl.SSLContext(cfg.ssl_version) + ctx.load_cert_chain(cfg.certfile, cfg.keyfile) + ctx.verify_mode = cfg.cert_reqs + if cfg.ca_certs: + ctx.load_verify_locations(cfg.ca_certs) + if cfg.ciphers: + ctx.set_ciphers(cfg.ciphers) + return ctx + + def _get_valid_log_format(self, source_format): + if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: + return self.DEFAULT_AIOHTTP_LOG_FORMAT + elif re.search(r'%\([^\)]+\)', source_format): + raise ValueError( + "Gunicorn's style options in form of `%(name)s` are not " + "supported for the log formatting. Please use aiohttp's " + "format specification to configure access log formatting: " + "http://aiohttp.readthedocs.io/en/stable/logging.html" + "#format-specification" + ) + else: + return source_format + + +class GunicornUVLoopWebWorker(GunicornWebWorker): + + def init_process(self): + import uvloop + + # Close any existing event loop before setting a + # new policy. + asyncio.get_event_loop().close() + + # Setup uvloop policy, so that every + # asyncio.get_event_loop() will create an instance + # of uvloop event loop. + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + + super().init_process() diff --git a/RBXLegacyDiscordBot/lib/aiohttp/wsgi.py b/RBXLegacyDiscordBot/lib/aiohttp/wsgi.py new file mode 100644 index 0000000..251c044 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/aiohttp/wsgi.py @@ -0,0 +1,235 @@ +"""wsgi server. + +TODO: + * proxy protocol + * x-forward security + * wsgi file support (os.sendfile) +""" + +import asyncio +import inspect +import io +import os +import socket +import sys +from urllib.parse import urlsplit + +import aiohttp +from aiohttp import hdrs, server + +__all__ = ('WSGIServerHttpProtocol',) + + +class WSGIServerHttpProtocol(server.ServerHttpProtocol): + """HTTP Server that implements the Python WSGI protocol. + + It uses 'wsgi.async' of 'True'. 'wsgi.input' can behave differently + depends on 'readpayload' constructor parameter. If readpayload is set to + True, wsgi server reads all incoming data into BytesIO object and + sends it as 'wsgi.input' environ var. If readpayload is set to false + 'wsgi.input' is a StreamReader and application should read incoming + data with "yield from environ['wsgi.input'].read()". It defaults to False. + """ + + SCRIPT_NAME = os.environ.get('SCRIPT_NAME', '') + + def __init__(self, app, readpayload=False, is_ssl=False, *args, **kw): + super().__init__(*args, **kw) + + self.wsgi = app + self.is_ssl = is_ssl + self.readpayload = readpayload + + def create_wsgi_response(self, message): + return WsgiResponse(self.writer, message) + + def create_wsgi_environ(self, message, payload): + uri_parts = urlsplit(message.path) + + environ = { + 'wsgi.input': payload, + 'wsgi.errors': sys.stderr, + 'wsgi.version': (1, 0), + 'wsgi.async': True, + 'wsgi.multithread': False, + 'wsgi.multiprocess': False, + 'wsgi.run_once': False, + 'wsgi.file_wrapper': FileWrapper, + 'SERVER_SOFTWARE': aiohttp.HttpMessage.SERVER_SOFTWARE, + 'REQUEST_METHOD': message.method, + 'QUERY_STRING': uri_parts.query or '', + 'RAW_URI': message.path, + 'SERVER_PROTOCOL': 'HTTP/%s.%s' % message.version + } + + script_name = self.SCRIPT_NAME + + for hdr_name, hdr_value in message.headers.items(): + hdr_name = hdr_name.upper() + if hdr_name == 'SCRIPT_NAME': + script_name = hdr_value + elif hdr_name == 'CONTENT-TYPE': + environ['CONTENT_TYPE'] = hdr_value + continue + elif hdr_name == 'CONTENT-LENGTH': + environ['CONTENT_LENGTH'] = hdr_value + continue + + key = 'HTTP_%s' % hdr_name.replace('-', '_') + if key in environ: + hdr_value = '%s,%s' % (environ[key], hdr_value) + + environ[key] = hdr_value + + url_scheme = environ.get('HTTP_X_FORWARDED_PROTO') + if url_scheme is None: + url_scheme = 'https' if self.is_ssl else 'http' + environ['wsgi.url_scheme'] = url_scheme + + # authors should be aware that REMOTE_HOST and REMOTE_ADDR + # may not qualify the remote addr + # also SERVER_PORT variable MUST be set to the TCP/IP port number on + # which this request is received from the client. + # http://www.ietf.org/rfc/rfc3875 + + family = self.transport.get_extra_info('socket').family + if family in (socket.AF_INET, socket.AF_INET6): + peername = self.transport.get_extra_info('peername') + environ['REMOTE_ADDR'] = peername[0] + environ['REMOTE_PORT'] = str(peername[1]) + http_host = message.headers.get("HOST", None) + if http_host: + hostport = http_host.split(":") + environ['SERVER_NAME'] = hostport[0] + if len(hostport) > 1: + environ['SERVER_PORT'] = str(hostport[1]) + else: + environ['SERVER_PORT'] = '80' + else: + # SERVER_NAME should be set to value of Host header, but this + # header is not required. In this case we shoud set it to local + # address of socket + sockname = self.transport.get_extra_info('sockname') + environ['SERVER_NAME'] = sockname[0] + environ['SERVER_PORT'] = str(sockname[1]) + else: + # We are behind reverse proxy, so get all vars from headers + for header in ('REMOTE_ADDR', 'REMOTE_PORT', + 'SERVER_NAME', 'SERVER_PORT'): + environ[header] = message.headers.get(header, '') + + path_info = uri_parts.path + if script_name: + path_info = path_info.split(script_name, 1)[-1] + + environ['PATH_INFO'] = path_info + environ['SCRIPT_NAME'] = script_name + + environ['async.reader'] = self.reader + environ['async.writer'] = self.writer + + return environ + + @asyncio.coroutine + def handle_request(self, message, payload): + """Handle a single HTTP request""" + now = self._loop.time() + + if self.readpayload: + wsgiinput = io.BytesIO() + wsgiinput.write((yield from payload.read())) + wsgiinput.seek(0) + payload = wsgiinput + + environ = self.create_wsgi_environ(message, payload) + response = self.create_wsgi_response(message) + + riter = self.wsgi(environ, response.start_response) + if isinstance(riter, asyncio.Future) or inspect.isgenerator(riter): + riter = yield from riter + + resp = response.response + try: + for item in riter: + if isinstance(item, asyncio.Future): + item = yield from item + yield from resp.write(item) + + yield from resp.write_eof() + finally: + if hasattr(riter, 'close'): + riter.close() + + if resp.keep_alive(): + self.keep_alive(True) + + self.log_access( + message, environ, response.response, self._loop.time() - now) + + +class FileWrapper: + """Custom file wrapper.""" + + def __init__(self, fobj, chunk_size=8192): + self.fobj = fobj + self.chunk_size = chunk_size + if hasattr(fobj, 'close'): + self.close = fobj.close + + def __iter__(self): + return self + + def __next__(self): + data = self.fobj.read(self.chunk_size) + if data: + return data + raise StopIteration + + +class WsgiResponse: + """Implementation of start_response() callable as specified by PEP 3333""" + + status = None + + HOP_HEADERS = { + hdrs.CONNECTION, + hdrs.KEEP_ALIVE, + hdrs.PROXY_AUTHENTICATE, + hdrs.PROXY_AUTHORIZATION, + hdrs.TE, + hdrs.TRAILER, + hdrs.TRANSFER_ENCODING, + hdrs.UPGRADE, + } + + def __init__(self, writer, message): + self.writer = writer + self.message = message + + def start_response(self, status, headers, exc_info=None): + if exc_info: + try: + if self.status: + raise exc_info[1] + finally: + exc_info = None + + status_code = int(status.split(' ', 1)[0]) + + self.status = status + resp = self.response = aiohttp.Response( + self.writer, status_code, + self.message.version, self.message.should_close) + resp.HOP_HEADERS = self.HOP_HEADERS + for name, value in headers: + resp.add_header(name, value) + + if resp.has_chunked_hdr: + resp.enable_chunked_encoding() + + # send headers immediately for websocket connection + if status_code == 101 and resp.upgrade and resp.websocket: + resp.send_headers() + else: + resp._send_headers = True + return self.response.write diff --git a/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/DESCRIPTION.rst b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..b601105 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,76 @@ +async-timeout +============= + +asyncio-compatible timeout context manager. + + +Usage example +------------- + + +The context manager is useful in cases when you want to apply timeout +logic around block of code or in cases when ``asyncio.wait_for()`` is +not suitable. Also it's much faster than ``asyncio.wait_for()`` +because ``timeout`` doesn't create a new task. + +The ``timeout(timeout, *, loop=None)`` call returns a context manager +that cancels a block on *timeout* expiring:: + + with timeout(1.5): + yield from inner() + +1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing + happens. +2. Otherwise ``inner()`` is cancelled internally by sending + ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is + raised outside of context manager scope. + +*timeout* parameter could be ``None`` for skipping timeout functionality. + +Installation +------------ + +:: + + $ pip install async-timeout + +The library is Python 3 only! + + + +Authors and License +------------------- + +The module is written by Andrew Svetlov. + +It's *Apache 2* licensed and freely available. + + +CHANGES +======= + +1.2.1 (2017-05-02) +------------------ + +* Support unpublished event loop's "current_task" api. + + +1.2.0 (2017-03-11) +------------------ + +* Extra check on context manager exit + +* 0 is no-op timeout + + +1.1.0 (2016-10-20) +------------------ + +* Rename to `async-timeout` + +1.0.0 (2016-09-09) +------------------ + +* The first release. + + diff --git a/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/INSTALLER b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/METADATA b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/METADATA new file mode 100644 index 0000000..4809d86 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/METADATA @@ -0,0 +1,95 @@ +Metadata-Version: 2.0 +Name: async-timeout +Version: 1.2.1 +Summary: Timeout context manager for asyncio programs +Home-page: https://github.com/aio-libs/async_timeout/ +Author: Andrew Svetlov +Author-email: andrew.svetlov@gmail.com +License: Apache 2 +Platform: UNKNOWN +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Framework :: AsyncIO + +async-timeout +============= + +asyncio-compatible timeout context manager. + + +Usage example +------------- + + +The context manager is useful in cases when you want to apply timeout +logic around block of code or in cases when ``asyncio.wait_for()`` is +not suitable. Also it's much faster than ``asyncio.wait_for()`` +because ``timeout`` doesn't create a new task. + +The ``timeout(timeout, *, loop=None)`` call returns a context manager +that cancels a block on *timeout* expiring:: + + with timeout(1.5): + yield from inner() + +1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing + happens. +2. Otherwise ``inner()`` is cancelled internally by sending + ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is + raised outside of context manager scope. + +*timeout* parameter could be ``None`` for skipping timeout functionality. + +Installation +------------ + +:: + + $ pip install async-timeout + +The library is Python 3 only! + + + +Authors and License +------------------- + +The module is written by Andrew Svetlov. + +It's *Apache 2* licensed and freely available. + + +CHANGES +======= + +1.2.1 (2017-05-02) +------------------ + +* Support unpublished event loop's "current_task" api. + + +1.2.0 (2017-03-11) +------------------ + +* Extra check on context manager exit + +* 0 is no-op timeout + + +1.1.0 (2016-10-20) +------------------ + +* Rename to `async-timeout` + +1.0.0 (2016-09-09) +------------------ + +* The first release. + + diff --git a/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/RECORD b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/RECORD new file mode 100644 index 0000000..c4d4c00 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/RECORD @@ -0,0 +1,9 @@ +async_timeout/__init__.py,sha256=5ONrYCJMKAzWV1qcK-qhUkRKPGEQfEzCWoxVvwAAru4,1913 +async_timeout-1.2.1.dist-info/DESCRIPTION.rst,sha256=IQuZGR3YfIcIGhWshP8gce8HXNCvMhxA-ov9oroqnI8,1430 +async_timeout-1.2.1.dist-info/METADATA,sha256=cOJx0VKD1jtlzkp0JAlBu4nzZ5cRX35I8PCQW4CG5bE,2117 +async_timeout-1.2.1.dist-info/RECORD,, +async_timeout-1.2.1.dist-info/WHEEL,sha256=rNo05PbNqwnXiIHFsYm0m22u4Zm6YJtugFG2THx4w3g,92 +async_timeout-1.2.1.dist-info/metadata.json,sha256=FwV6Nc2u0faHG1tFFHTGglKZida7muCQA-9_jH0qq5E,889 +async_timeout-1.2.1.dist-info/top_level.txt,sha256=9oM4e7Twq8iD_7_Q3Mz0E6GPIB6vJvRFo-UBwUQtBDU,14 +async_timeout-1.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +async_timeout/__pycache__/__init__.cpython-36.pyc,, diff --git a/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/WHEEL b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/WHEEL new file mode 100644 index 0000000..bb7f7db --- /dev/null +++ b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/top_level.txt b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/top_level.txt new file mode 100644 index 0000000..ad29955 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/async_timeout-1.2.1.dist-info/top_level.txt @@ -0,0 +1 @@ +async_timeout diff --git a/RBXLegacyDiscordBot/lib/async_timeout/__init__.py b/RBXLegacyDiscordBot/lib/async_timeout/__init__.py new file mode 100644 index 0000000..ee0817a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/async_timeout/__init__.py @@ -0,0 +1,62 @@ +import asyncio + + +__version__ = '1.2.1' + + +class timeout: + """timeout context manager. + + Useful in cases when you want to apply timeout logic around block + of code or in cases when asyncio.wait_for is not suitable. For example: + + >>> with timeout(0.001): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + timeout - value in seconds or None to disable timeout logic + loop - asyncio compatible event loop + """ + def __init__(self, timeout, *, loop=None): + if timeout is not None and timeout == 0: + timeout = None + self._timeout = timeout + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self._task = None + self._cancelled = False + self._cancel_handler = None + + def __enter__(self): + if self._timeout is not None: + self._task = current_task(self._loop) + if self._task is None: + raise RuntimeError('Timeout context manager should be used ' + 'inside a task') + self._cancel_handler = self._loop.call_later( + self._timeout, self._cancel_task) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is asyncio.CancelledError and self._cancelled: + self._cancel_handler = None + self._task = None + raise asyncio.TimeoutError from None + if self._timeout is not None and self._cancel_handler is not None: + self._cancel_handler.cancel() + self._cancel_handler = None + self._task = None + + def _cancel_task(self): + self._cancelled = self._task.cancel() + + +def current_task(loop): + task = asyncio.Task.current_task(loop=loop) + if task is None: + if hasattr(loop, 'current_task'): + task = loop.current_task() + + return task diff --git a/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/DESCRIPTION.rst b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..30e2362 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/DESCRIPTION.rst @@ -0,0 +1,49 @@ +Certifi: Python SSL Certificates +================================ + +`Certifi`_ is a carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem' + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +Unfortunately, old versions of OpenSSL (less than 1.0.2) sometimes fail to +validate certificate chains that use the strong roots. For this reason, if you +fail to validate a certificate using the ``certifi.where()`` mechanism, you can +intentionally re-add the 1024-bit roots back into your bundle by calling +``certifi.old_where()`` instead. This is not recommended in production: if at +all possible you should upgrade to a newer OpenSSL. However, if you have no +other option, this may work for you. + +.. _`Certifi`: http://certifi.io/en/latest/ +.. _`Requests`: http://docs.python-requests.org/en/latest/ + + diff --git a/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/INSTALLER b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/METADATA b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/METADATA new file mode 100644 index 0000000..f5dbb47 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/METADATA @@ -0,0 +1,68 @@ +Metadata-Version: 2.0 +Name: certifi +Version: 2017.4.17 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: http://certifi.io/ +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: ISC +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 + +Certifi: Python SSL Certificates +================================ + +`Certifi`_ is a carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem' + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +Unfortunately, old versions of OpenSSL (less than 1.0.2) sometimes fail to +validate certificate chains that use the strong roots. For this reason, if you +fail to validate a certificate using the ``certifi.where()`` mechanism, you can +intentionally re-add the 1024-bit roots back into your bundle by calling +``certifi.old_where()`` instead. This is not recommended in production: if at +all possible you should upgrade to a newer OpenSSL. However, if you have no +other option, this may work for you. + +.. _`Certifi`: http://certifi.io/en/latest/ +.. _`Requests`: http://docs.python-requests.org/en/latest/ + + diff --git a/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/RECORD b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/RECORD new file mode 100644 index 0000000..138499c --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/RECORD @@ -0,0 +1,16 @@ +certifi/__init__.py,sha256=fygqpMx6KPrCIRMY4qcO5Zo60MDyQCYtNaI5BbgZyqE,63 +certifi/__main__.py,sha256=FiOYt1Fltst7wk9DRa6GCoBr8qBUxlNQu_MKJf04E6s,41 +certifi/cacert.pem,sha256=UgTuBXP5FC1mKK2skamUrJKyL8RVMmtUTKJZpTSFn_U,321422 +certifi/core.py,sha256=DqvIINYNNXsp3Srlk_NRaiizaww8po3l8t8ksz-Xt6Q,716 +certifi/old_root.pem,sha256=HT0KIfaM83q0XHFqGEesiGyfmlSWuD2RI0-AVIS2srY,25626 +certifi/weak.pem,sha256=LGe1E3ewgvNAs_yRA9ZKBN6C5KV2Cx34iJFMPi8_hyo,347048 +certifi-2017.4.17.dist-info/DESCRIPTION.rst,sha256=wVWYoH3eovdWFPZnYU2NT4itGRx3eN5C_s1IuNm4qF4,1731 +certifi-2017.4.17.dist-info/METADATA,sha256=ZzDLL1LWFj2SDbZdV_QZ-ZNWd_UDw_NXGbARLwgLYdg,2396 +certifi-2017.4.17.dist-info/RECORD,, +certifi-2017.4.17.dist-info/WHEEL,sha256=5wvfB7GvgZAbKBSE9uX9Zbi6LCL-_KgezgHblXhCRnM,113 +certifi-2017.4.17.dist-info/metadata.json,sha256=8MYPZlDmqjogcs0bl7CuyQwwrSvqaFjpGMYsjFBQBlw,790 +certifi-2017.4.17.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi-2017.4.17.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi/__pycache__/core.cpython-36.pyc,, +certifi/__pycache__/__init__.cpython-36.pyc,, +certifi/__pycache__/__main__.cpython-36.pyc,, diff --git a/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/WHEEL b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/WHEEL new file mode 100644 index 0000000..7bf9daa --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0.a0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/top_level.txt b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi-2017.4.17.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/RBXLegacyDiscordBot/lib/certifi/__init__.py b/RBXLegacyDiscordBot/lib/certifi/__init__.py new file mode 100644 index 0000000..b8cd289 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi/__init__.py @@ -0,0 +1,3 @@ +from .core import where, old_where + +__version__ = "2017.04.17" diff --git a/RBXLegacyDiscordBot/lib/certifi/__main__.py b/RBXLegacyDiscordBot/lib/certifi/__main__.py new file mode 100644 index 0000000..5f1da0d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi/__main__.py @@ -0,0 +1,2 @@ +from certifi import where +print(where()) diff --git a/RBXLegacyDiscordBot/lib/certifi/cacert.pem b/RBXLegacyDiscordBot/lib/certifi/cacert.pem new file mode 100644 index 0000000..e5f0896 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi/cacert.pem @@ -0,0 +1,5246 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 3 Public Primary Certification Authority - G3" +# Serial: 206684696279472310254277870180966723415 +# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 +# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 +# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Low-Value Services Root" +# Serial: 1 +# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc +# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d +# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 +-----BEGIN CERTIFICATE----- +MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw +MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML +QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD +VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA +A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul +CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n +tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl +dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch +PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC ++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O +BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl +MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk +ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB +IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X +7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz +43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY +eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl +pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA +WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Label: "AddTrust External Root" +# Serial: 1 +# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f +# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 +# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs +IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 +MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux +FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h +bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt +H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 +uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX +mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX +a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN +E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 +WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD +VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 +Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU +cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx +IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN +AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH +YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC +Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX +c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a +mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Public Services Root" +# Serial: 1 +# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f +# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5 +# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx +MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB +ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV +BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV +6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX +GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP +dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH +1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF +62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW +BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL +MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU +cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv +b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6 +IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/ +iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao +GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh +4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm +XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Qualified Certificates Root" +# Serial: 1 +# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb +# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf +# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16 +-----BEGIN CERTIFICATE----- +MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1 +MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK +EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh +BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq +xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G +87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i +2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U +WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1 +0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G +A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr +pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL +ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm +aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv +hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm +hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X +dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3 +P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y +iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no +xqE= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. +# Label: "GeoTrust Global CA" +# Serial: 144470 +# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 +# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 +# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i +YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg +R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 +9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq +fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv +iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU +1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ +bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW +MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA +ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l +uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn +Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS +tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF +PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un +hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV +5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Global CA 2" +# Serial: 1 +# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9 +# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d +# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85 +-----BEGIN CERTIFICATE----- +MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs +IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg +R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A +PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8 +Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL +TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL +5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7 +S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe +2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap +EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td +EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv +/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN +A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0 +abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF +I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz +4iIprn2DQKi6bA== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Label: "GeoTrust Universal CA" +# Serial: 1 +# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 +# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 +# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy +c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 +IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV +VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 +cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT +QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh +F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v +c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w +mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd +VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX +teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ +f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe +Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ +nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY +MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG +9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX +IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn +ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z +uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN +Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja +QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW +koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 +ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt +DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm +bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Universal CA 2" +# Serial: 1 +# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 +# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 +# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy +c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD +VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 +c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 +WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG +FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq +XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL +se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb +KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd +IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 +y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt +hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc +QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 +Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV +HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ +KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ +L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr +Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo +ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY +T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz +GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m +1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV +OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH +6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX +QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association +# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association +# Label: "Visa eCommerce Root" +# Serial: 25952180776285836048024890241505565794 +# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 +# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 +# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 +-----BEGIN CERTIFICATE----- +MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr +MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl +cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv +bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw +CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h +dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l +cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h +2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E +lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV +ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq +299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t +vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL +dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF +AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR +zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 +LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd +7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw +++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt +398znM/jra6O1I7mT1GvFpLgXPYHDw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum CA O=Unizeto Sp. z o.o. +# Subject: CN=Certum CA O=Unizeto Sp. z o.o. +# Label: "Certum Root CA" +# Serial: 65568 +# MD5 Fingerprint: 2c:8f:9f:66:1d:18:90:b1:47:26:9d:8e:86:82:8c:a9 +# SHA1 Fingerprint: 62:52:dc:40:f7:11:43:a2:2f:de:9e:f7:34:8e:06:42:51:b1:81:18 +# SHA256 Fingerprint: d8:e0:fe:bc:1d:b2:e3:8d:00:94:0f:37:d2:7d:41:34:4d:99:3e:73:4b:99:d5:65:6d:97:78:d4:d8:14:36:24 +-----BEGIN CERTIFICATE----- +MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBM +MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD +QTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBM +MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD +QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/E +jG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWo +ePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GI +ULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapu +Ob7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUg +AKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7 +HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEA +uI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQa +TOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTg +xSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1q +CjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5x +O/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs +6GAqm4VKQPNriiTsBhYscw== +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=Secure Certificate Services O=Comodo CA Limited +# Subject: CN=Secure Certificate Services O=Comodo CA Limited +# Label: "Comodo Secure Services root" +# Serial: 1 +# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd +# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1 +# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8 +-----BEGIN CERTIFICATE----- +MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp +ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow +fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV +BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM +cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S +HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996 +CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk +3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz +6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV +HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud +EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv +Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw +Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww +DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0 +5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj +Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI +gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ +aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl +izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk= +-----END CERTIFICATE----- + +# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited +# Subject: CN=Trusted Certificate Services O=Comodo CA Limited +# Label: "Comodo Trusted Services root" +# Serial: 1 +# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27 +# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd +# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 +aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla +MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO +BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD +VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW +fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt +TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL +fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW +1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 +kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G +A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v +ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo +dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu +Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ +HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 +pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS +jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ +xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn +dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Label: "QuoVadis Root CA" +# Serial: 985026699 +# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 +# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 +# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 +-----BEGIN CERTIFICATE----- +MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz +MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw +IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR +dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp +li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D +rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ +WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug +F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU +xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC +Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv +dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw +ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl +IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh +c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy +ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh +Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI +KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T +KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq +y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p +dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD +VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk +fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 +7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R +cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y +mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW +xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK +SnQ2+Q== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=Sonera Class2 CA O=Sonera +# Subject: CN=Sonera Class2 CA O=Sonera +# Label: "Sonera Class 2 Root CA" +# Serial: 29 +# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb +# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 +# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 +-----BEGIN CERTIFICATE----- +MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP +MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx +MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV +BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o +Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt +5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s +3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej +vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu +8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw +DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG +MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil +zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ +3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD +FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 +Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 +ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M +-----END CERTIFICATE----- + +# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com +# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com +# Label: "UTN USERFirst Hardware Root CA" +# Serial: 91374294542884704022267039221184531197 +# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39 +# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7 +# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37 +-----BEGIN CERTIFICATE----- +MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB +lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt +SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG +A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe +MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v +d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh +cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn +0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ +M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a +MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd +oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI +DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy +oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD +VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 +dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy +bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF +BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM +//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli +CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE +CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t +3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS +KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== +-----END CERTIFICATE----- + +# Issuer: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org +# Subject: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org +# Label: "Camerfirma Chambers of Commerce Root" +# Serial: 0 +# MD5 Fingerprint: b0:01:ee:14:d9:af:29:18:94:76:8e:f1:69:33:2a:84 +# SHA1 Fingerprint: 6e:3a:55:a4:19:0c:19:5c:93:84:3c:c0:db:72:2e:31:30:61:f0:b1 +# SHA256 Fingerprint: 0c:25:8a:12:a5:67:4a:ef:25:f2:8b:a7:dc:fa:ec:ee:a3:48:e5:41:e6:f5:cc:4e:e6:3b:71:b3:61:60:6a:c3 +-----BEGIN CERTIFICATE----- +MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEn +MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL +ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMg +b2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRa +MH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBB +ODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIw +IAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0B +AQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtb +unXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0d +BmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq +7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM3 +0pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyX +roDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIG +A1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5j +aGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p +26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIA +BzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1Ud +EgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBN +BgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz +aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEB +AAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZd +p0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi +1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEc +XCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0 +eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfu +tGWaIZDgqtCYvDi1czyL+Nw= +-----END CERTIFICATE----- + +# Issuer: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org +# Subject: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org +# Label: "Camerfirma Global Chambersign Root" +# Serial: 0 +# MD5 Fingerprint: c5:e6:7b:bf:06:d0:4f:43:ed:c4:7a:65:8a:fb:6b:19 +# SHA1 Fingerprint: 33:9b:6b:14:50:24:9b:55:7a:01:87:72:84:d9:e0:2f:c3:d2:d8:e9 +# SHA256 Fingerprint: ef:3c:b4:17:fc:8e:bf:6f:97:87:6c:9e:4e:ce:39:de:1e:a5:fe:64:91:41:d1:02:8b:7d:11:c0:b2:29:8c:ed +-----BEGIN CERTIFICATE----- +MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEn +MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL +ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENo +YW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9 +MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgy +NzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4G +A1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUA +A4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0 +Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/s +QJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpV +eAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795 +B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWh +z0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0T +AQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1i +ZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4w +TcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAH +MCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYD +VR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAE +VDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh +bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0B +AQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUM +bKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXi +ryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWG +VwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3c +ecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/ +AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Label: "StartCom Certification Authority" +# Serial: 1 +# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16 +# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f +# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea +-----BEGIN CERTIFICATE----- +MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg +Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9 +MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi +U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh +cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk +pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf +OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C +Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT +Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi +HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM +Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w ++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ +Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 +Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B +26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID +AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE +FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j +ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js +LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM +BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0 +Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy +dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh +cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh +YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg +dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp +bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ +YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT +TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ +9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8 +jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW +FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz +ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1 +ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L +EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu +L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq +yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC +O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V +um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh +NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14= +-----END CERTIFICATE----- + +# Issuer: O=Government Root Certification Authority +# Subject: O=Government Root Certification Authority +# Label: "Taiwan GRCA" +# Serial: 42023070807708724159991140556527066870 +# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e +# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 +# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ +MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow +PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR +IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q +gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy +yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts +F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 +jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx +ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC +VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK +YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH +EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN +Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud +DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE +MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK +UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ +TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf +qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK +ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE +JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 +hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 +EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm +nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX +udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz +ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe +LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl +pYYsfPQS +-----END CERTIFICATE----- + +# Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services +# Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services +# Label: "Swisscom Root CA 1" +# Serial: 122348795730808398873664200247279986742 +# MD5 Fingerprint: f8:38:7c:77:88:df:2c:16:68:2e:c2:e2:52:4b:b8:f9 +# SHA1 Fingerprint: 5f:3a:fc:0a:8b:64:f6:86:67:34:74:df:7e:a9:a2:fe:f9:fa:7a:51 +# SHA256 Fingerprint: 21:db:20:12:36:60:bb:2e:d4:18:20:5d:a1:1e:e7:a8:5a:65:e2:bc:6e:55:b5:af:7e:78:99:c8:a2:66:d9:2e +-----BEGIN CERTIFICATE----- +MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBk +MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0 +YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg +Q0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYT +AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp +Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9 +m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdih +FvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/ +TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3F +EzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbco +kdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBu +HYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNF +vJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo +19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjC +L3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJW +bjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNX +JLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw +FDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j +BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzc +K6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzf +ky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7Ik +Vh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqB +sfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e +3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsR +ls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ip +mXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HH +b6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksf +rK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmms +hFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0Y +zirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6 +MBr1mmz0DlP5OlvRHA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=Class 2 Primary CA O=Certplus +# Subject: CN=Class 2 Primary CA O=Certplus +# Label: "Certplus Class 2 Primary CA" +# Serial: 177770208045934040241468760488327595043 +# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b +# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb +# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb +-----BEGIN CERTIFICATE----- +MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw +PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz +cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 +MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz +IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ +ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR +VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL +kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd +EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas +H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 +HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud +DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 +QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu +Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ +AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 +yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR +FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA +ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB +kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 +l7+ijrRU +-----END CERTIFICATE----- + +# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Label: "DST Root CA X3" +# Serial: 91299735575339953335919266965803778155 +# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 +# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 +# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +# Issuer: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES +# Subject: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES +# Label: "DST ACES CA X6" +# Serial: 17771143917277623872238992636097467865 +# MD5 Fingerprint: 21:d8:4c:82:2b:99:09:33:a2:eb:14:24:8d:8e:5f:e8 +# SHA1 Fingerprint: 40:54:da:6f:1c:3f:40:74:ac:ed:0f:ec:cd:db:79:d1:53:fb:90:1d +# SHA256 Fingerprint: 76:7c:95:5a:76:41:2c:89:af:68:8e:90:a1:c7:0f:55:6c:fd:6b:60:25:db:ea:10:41:6d:7e:b6:83:1f:8c:40 +-----BEGIN CERTIFICATE----- +MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBb +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3Qx +ETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0w +MzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYD +VQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMx +FzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPu +ktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7 +gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZH +fAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4a +ahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEIT +ajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQF +MAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rk +c3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjto +dHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMt +aW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZI +hvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7Uk +QIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/ +h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq +nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpR +rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2 +9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis= +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Label: "GeoTrust Primary Certification Authority" +# Serial: 32798226551256963324313806436981982369 +# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf +# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 +# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY +MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo +R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx +MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 +AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA +ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 +7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W +kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI +mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ +KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 +6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl +4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K +oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj +UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU +AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA" +# Serial: 69529181992039203566298953787712940909 +# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 +# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 +# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" +# Serial: 33037644167568058970164719475676101450 +# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c +# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 +# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1 +# Label: "Security Communication EV RootCA1" +# Serial: 0 +# MD5 Fingerprint: 22:2d:a6:01:ea:7c:0a:f7:f0:6c:56:43:3f:77:76:d3 +# SHA1 Fingerprint: fe:b8:c4:32:dc:f9:76:9a:ce:ae:3d:d8:90:8f:fd:28:86:65:64:7d +# SHA256 Fingerprint: a2:2d:ba:68:1e:97:37:6e:2d:39:7d:72:8a:ae:3a:9b:62:96:b9:fd:ba:60:bc:2e:11:f6:47:f2:c6:75:fb:37 +-----BEGIN CERTIFICATE----- +MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMh +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIz +MloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09N +IFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11 +bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSE +RMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gO +zXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5 +bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDF +MxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1 +VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eC +OKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G +CSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HW +tWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZ +q51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDb +EJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+ +Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9O +VL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490 +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GA CA" +# Serial: 86718877871133159090080555911823548314 +# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 +# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 +# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 +-----BEGIN CERTIFICATE----- +MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB +ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly +aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl +ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w +NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G +A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD +VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX +SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR +VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 +w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF +mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg +4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 +4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw +EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx +SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 +ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 +vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa +hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi +Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ +/L7fCg0= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Label: "Deutsche Telekom Root CA 2" +# Serial: 38 +# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 +# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf +# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 +-----BEGIN CERTIFICATE----- +MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc +MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj +IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB +IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE +RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl +U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 +IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU +ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC +QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr +rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S +NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc +QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH +txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP +BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC +AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp +tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa +IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl +6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ +xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU +Cm26OWMohpLzGITY+9HPBVZkVw== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: CN=T\xdcB\u0130TAK UEKAE K\xf6k Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 - S\xfcr\xfcm 3 O=T\xfcrkiye Bilimsel ve Teknolojik Ara\u015ft\u0131rma Kurumu - T\xdcB\u0130TAK OU=Ulusal Elektronik ve Kriptoloji Ara\u015ft\u0131rma Enstit\xfcs\xfc - UEKAE/Kamu Sertifikasyon Merkezi +# Subject: CN=T\xdcB\u0130TAK UEKAE K\xf6k Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 - S\xfcr\xfcm 3 O=T\xfcrkiye Bilimsel ve Teknolojik Ara\u015ft\u0131rma Kurumu - T\xdcB\u0130TAK OU=Ulusal Elektronik ve Kriptoloji Ara\u015ft\u0131rma Enstit\xfcs\xfc - UEKAE/Kamu Sertifikasyon Merkezi +# Label: "T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3" +# Serial: 17 +# MD5 Fingerprint: ed:41:f5:8c:50:c5:2b:9c:73:e6:ee:6c:eb:c2:a8:26 +# SHA1 Fingerprint: 1b:4b:39:61:26:27:6b:64:91:a2:68:6d:d7:02:43:21:2d:1f:1d:96 +# SHA256 Fingerprint: e4:c7:34:30:d7:a5:b5:09:25:df:43:37:0a:0d:21:6e:9a:79:b9:d6:db:83:73:a0:c6:9e:b1:cc:31:c7:c5:2a +-----BEGIN CERTIFICATE----- +MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRS +MRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJp +bGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSw +VEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFy +YcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2Vy +dGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2 +ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAe +Fw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIx +GDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmls +aW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBU +QUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJh +xZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0 +aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7Zr +IFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIB +IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4h +gb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yK +O7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXO +fJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKw +lZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL +hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQID +AQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmP +NOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7t +wyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM +7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOh +gLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5n +oN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUs +yZyQ2uypQjyttgI= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=CNNIC ROOT O=CNNIC +# Subject: CN=CNNIC ROOT O=CNNIC +# Label: "CNNIC ROOT" +# Serial: 1228079105 +# MD5 Fingerprint: 21:bc:82:ab:49:c4:13:3b:4b:b2:2b:5c:6b:90:9c:19 +# SHA1 Fingerprint: 8b:af:4c:9b:1d:f0:2a:92:f7:da:12:8e:b9:1b:ac:f4:98:60:4b:6f +# SHA256 Fingerprint: e2:83:93:77:3d:a8:45:a6:79:f2:08:0c:c7:fb:44:a3:b7:a1:c3:79:2c:b7:eb:77:29:fd:cb:6a:8d:99:ae:a7 +-----BEGIN CERTIFICATE----- +MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJD +TjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2 +MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMF +Q05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IB +DwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwh +IhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6 +dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZO +V/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrC +GHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gN +v7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIB +AQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMB +Af8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO +76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnK +OOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvH +ugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7Hgvi +yJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fL +buXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj +2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G3" +# Serial: 28809105769928564313984085209975885599 +# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 +# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd +# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB +mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT +MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ +BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 +BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz ++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm +hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn +5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W +JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL +DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC +huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw +HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB +AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB +zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN +kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH +SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G +spki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G2" +# Serial: 71758320672825410020661621085256472406 +# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f +# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 +# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp +IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi +BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw +MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig +YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v +dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ +BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 +papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K +DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 +KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox +XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G3" +# Serial: 127614157056681299805556476275995414779 +# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 +# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 +# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB +rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV +BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa +Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl +LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u +MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl +ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm +gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 +YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf +b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 +9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S +zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk +OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV +HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA +2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW +oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c +KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM +m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu +MdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G2" +# Serial: 80682863203381065782177908751794619243 +# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a +# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 +# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL +MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj +KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 +MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw +NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV +BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL +So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal +tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG +CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT +qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz +rD6ogRLQy7rQkgu2npaqBA+K +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Universal Root Certification Authority" +# Serial: 85209574734084581917763752644031726877 +# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 +# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 +# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB +vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W +ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 +IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y +IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh +bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF +9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH +H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H +LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN +/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT +rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw +WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs +exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 +sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ +seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz +4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ +BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR +lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 +7M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" +# Serial: 63143484348153506665311985501458640051 +# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 +# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a +# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp +U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg +SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln +biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm +GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve +fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ +aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj +aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW +kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC +4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga +FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G2" +# Serial: 10000012 +# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a +# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 +# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f +-----BEGIN CERTIFICATE----- +MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX +DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 +qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp +uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU +Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE +pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp +5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M +UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN +GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy +5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv +6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK +eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 +B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ +BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov +L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG +SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS +CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen +5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 +IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK +gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL ++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL +vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm +bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk +N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC +Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z +ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=ACEDICOM Root O=EDICOM OU=PKI +# Subject: CN=ACEDICOM Root O=EDICOM OU=PKI +# Label: "ACEDICOM Root" +# Serial: 7029493972724711941 +# MD5 Fingerprint: 42:81:a0:e2:1c:e3:55:10:de:55:89:42:65:96:22:e6 +# SHA1 Fingerprint: e0:b4:32:2e:b2:f6:a5:68:b6:54:53:84:48:18:4a:50:36:87:43:84 +# SHA256 Fingerprint: 03:95:0f:b4:9a:53:1f:3e:19:91:94:23:98:df:a9:e0:ea:32:d7:ba:1c:dd:9b:c8:5d:b5:7e:d9:40:0b:43:4a +-----BEGIN CERTIFICATE----- +MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE +AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x +CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW +MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF +RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC +AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7 +09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7 +XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P +Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK +t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb +X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28 +MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU +fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI +2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH +K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae +ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP +BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ +MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw +RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv +bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm +fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3 +gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe +I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i +5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi +ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn +MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ +o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6 +zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN +GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt +r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK +Z05phkOTOPu220+DkdRgfks+KzgHVZhepA== +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Label: "Chambers of Commerce Root - 2008" +# Serial: 11806822484801597146 +# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 +# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c +# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 +-----BEGIN CERTIFICATE----- +MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz +IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz +MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj +dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw +EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp +MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 +28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq +VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q +DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR +5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL +ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a +Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl +UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s ++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 +Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj +ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx +hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV +HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 ++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN +YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t +L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy +ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt +IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV +HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w +DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW +PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF +5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 +glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH +FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 +pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD +xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG +tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq +jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De +fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg +OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ +d0jQ +-----END CERTIFICATE----- + +# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Label: "Global Chambersign Root - 2008" +# Serial: 14541511773111788494 +# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 +# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c +# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca +-----BEGIN CERTIFICATE----- +MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD +aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx +MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy +cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG +A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl +BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI +hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed +KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 +G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 +zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 +ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG +HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 +Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V +yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e +beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r +6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh +wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog +zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW +BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr +ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp +ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk +cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt +YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC +CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow +KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI +hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ +UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz +X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x +fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz +a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd +Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd +SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O +AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso +M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge +v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z +09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=Certinomis - Autorit\xe9 Racine O=Certinomis OU=0002 433998903 +# Subject: CN=Certinomis - Autorit\xe9 Racine O=Certinomis OU=0002 433998903 +# Label: "Certinomis - Autorit\xe9 Racine" +# Serial: 1 +# MD5 Fingerprint: 7f:30:78:8c:03:e3:ca:c9:0a:e2:c9:ea:1e:aa:55:1a +# SHA1 Fingerprint: 2e:14:da:ec:28:f0:fa:1e:8e:38:9a:4e:ab:eb:26:c0:0a:d3:83:c3 +# SHA256 Fingerprint: fc:bf:e2:88:62:06:f7:2b:27:59:3c:8b:07:02:97:e1:2d:76:9e:d1:0e:d7:93:07:05:a8:09:8e:ff:c1:4d:17 +-----BEGIN CERTIFICATE----- +MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjET +MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAk +BgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4 +Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNl +cnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0 +aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jY +F1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N +8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWe +rP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K +/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu +7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC +28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6 +lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1E +nn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB +0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql09 +5gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGj +WzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQN +jLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ +KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9s +ov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZM +OH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q +619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn +2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqj +o3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1v +nxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG +5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWq +pdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZb +dsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0 +BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: O=Trustis Limited OU=Trustis FPS Root CA +# Subject: O=Trustis Limited OU=Trustis FPS Root CA +# Label: "Trustis FPS Root CA" +# Serial: 36053640375399034304724988975563710553 +# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d +# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 +# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d +-----BEGIN CERTIFICATE----- +MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL +ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx +MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc +MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ +AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH +iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj +vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA +0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB +OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ +BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E +FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 +GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW +zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 +1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE +f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F +jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN +ZetX2fNXlrtIzYE= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Label: "StartCom Certification Authority" +# Serial: 45 +# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16 +# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0 +# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11 +-----BEGIN CERTIFICATE----- +MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg +Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9 +MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi +U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh +cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk +pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf +OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C +Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT +Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi +HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM +Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w ++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ +Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 +Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B +26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID +AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul +F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC +ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w +ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk +aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0 +YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg +c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93 +d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG +CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1 +dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF +wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS +Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst +0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc +pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl +CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF +P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK +1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm +KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE +JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ +8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm +fyWl8kgAwKQB2j8= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd. +# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd. +# Label: "StartCom Certification Authority G2" +# Serial: 59 +# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64 +# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17 +# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95 +-----BEGIN CERTIFICATE----- +MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1 +OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG +A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ +JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD +vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo +D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/ +Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW +RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK +HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN +nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM +0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i +UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9 +Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg +TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL +BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K +2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX +UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl +6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK +9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ +HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI +wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY +XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l +IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo +hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr +so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Label: "EE Certification Centre Root CA" +# Serial: 112324828676200291871926431888494945866 +# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f +# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 +# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 +-----BEGIN CERTIFICATE----- +MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 +MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 +czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG +CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy +MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl +ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS +b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy +euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO +bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw +WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d +MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE +1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ +zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB +BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF +BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV +v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG +E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u +uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW +iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v +GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= +-----END CERTIFICATE----- + +# Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. (c) Aral\u0131k 2007 +# Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. (c) Aral\u0131k 2007 +# Label: "TURKTRUST Certificate Services Provider Root 2007" +# Serial: 1 +# MD5 Fingerprint: 2b:70:20:56:86:82:a0:18:c8:07:53:12:28:70:21:72 +# SHA1 Fingerprint: f1:7f:6f:b6:31:dc:99:e3:a3:c8:7f:fe:1c:f1:81:10:88:d9:60:33 +# SHA256 Fingerprint: 97:8c:d9:66:f2:fa:a0:7b:a7:aa:95:00:d9:c0:2e:9d:77:f2:cd:ad:a6:ad:6b:a7:4a:f4:b9:1c:66:59:3c:50 +-----BEGIN CERTIFICATE----- +MIIEPTCCAyWgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvzE/MD0GA1UEAww2VMOc +UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx +c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV4wXAYDVQQKDFVUw5xS +S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg +SGl6bWV0bGVyaSBBLsWeLiAoYykgQXJhbMSxayAyMDA3MB4XDTA3MTIyNTE4Mzcx +OVoXDTE3MTIyMjE4MzcxOVowgb8xPzA9BgNVBAMMNlTDnFJLVFJVU1QgRWxla3Ry +b25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTELMAkGA1UEBhMC +VFIxDzANBgNVBAcMBkFua2FyYTFeMFwGA1UECgxVVMOcUktUUlVTVCBCaWxnaSDE +sGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7F +ni4gKGMpIEFyYWzEsWsgMjAwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAKu3PgqMyKVYFeaK7yc9SrToJdPNM8Ig3BnuiD9NYvDdE3ePYakqtdTyuTFY +KTsvP2qcb3N2Je40IIDu6rfwxArNK4aUyeNgsURSsloptJGXg9i3phQvKUmi8wUG ++7RP2qFsmmaf8EMJyupyj+sA1zU511YXRxcw9L6/P8JorzZAwan0qafoEGsIiveG +HtyaKhUG9qPw9ODHFNRRf8+0222vR5YXm3dx2KdxnSQM9pQ/hTEST7ruToK4uT6P +IzdezKKqdfcYbwnTrqdUKDT74eA7YH2gvnmJhsifLfkKS8RQouf9eRbHegsYz85M +733WB2+Y8a+xwXrXgTW4qhe04MsCAwEAAaNCMEAwHQYDVR0OBBYEFCnFkKslrxHk +Yb+j/4hhkeYO/pyBMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G +CSqGSIb3DQEBBQUAA4IBAQAQDdr4Ouwo0RSVgrESLFF6QSU2TJ/sPx+EnWVUXKgW +AkD6bho3hO9ynYYKVZ1WKKxmLNA6VpM0ByWtCLCPyA8JWcqdmBzlVPi5RX9ql2+I +aE1KBiY3iAIOtsbWcpnOa3faYjGkVh+uX4132l32iPwa2Z61gfAyuOOI0JzzaqC5 +mxRZNTZPz/OOXl0XrRWV2N2y1RVuAE6zS89mlOTgzbUF2mNXi+WzqtvALhyQRNsa +XRik7r4EW5nVcV9VZWRi1aKbBFmGyGJ353yCRWo9F7/snXUMrqNvWtMvmDb08PUZ +qxFdyKbjKlhqQgnDvZImZjINXQhVdP+MmNAKpoRq0Tl9 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Raiz del Estado Venezolano O=Sistema Nacional de Certificacion Electronica OU=Superintendencia de Servicios de Certificacion Electronica +# Subject: CN=PSCProcert O=Sistema Nacional de Certificacion Electronica OU=Proveedor de Certificados PROCERT +# Label: "PSCProcert" +# Serial: 11 +# MD5 Fingerprint: e6:24:e9:12:01:ae:0c:de:8e:85:c4:ce:a3:12:dd:ec +# SHA1 Fingerprint: 70:c1:8d:74:b4:28:81:0a:e4:fd:a5:75:d7:01:9f:99:b0:3d:50:74 +# SHA256 Fingerprint: 3c:fc:3c:14:d1:f6:84:ff:17:e3:8c:43:ca:44:0c:00:b9:67:ec:93:3e:8b:fe:06:4c:a1:d7:2c:90:f2:ad:b0 +-----BEGIN CERTIFICATE----- +MIIJhjCCB26gAwIBAgIBCzANBgkqhkiG9w0BAQsFADCCAR4xPjA8BgNVBAMTNUF1 +dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIFJhaXogZGVsIEVzdGFkbyBWZW5lem9s +YW5vMQswCQYDVQQGEwJWRTEQMA4GA1UEBxMHQ2FyYWNhczEZMBcGA1UECBMQRGlz +dHJpdG8gQ2FwaXRhbDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0 +aWZpY2FjaW9uIEVsZWN0cm9uaWNhMUMwQQYDVQQLEzpTdXBlcmludGVuZGVuY2lh +IGRlIFNlcnZpY2lvcyBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMSUwIwYJ +KoZIhvcNAQkBFhZhY3JhaXpAc3VzY2VydGUuZ29iLnZlMB4XDTEwMTIyODE2NTEw +MFoXDTIwMTIyNTIzNTk1OVowgdExJjAkBgkqhkiG9w0BCQEWF2NvbnRhY3RvQHBy +b2NlcnQubmV0LnZlMQ8wDQYDVQQHEwZDaGFjYW8xEDAOBgNVBAgTB01pcmFuZGEx +KjAoBgNVBAsTIVByb3ZlZWRvciBkZSBDZXJ0aWZpY2Fkb3MgUFJPQ0VSVDE2MDQG +A1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9u +aWNhMQswCQYDVQQGEwJWRTETMBEGA1UEAxMKUFNDUHJvY2VydDCCAiIwDQYJKoZI +hvcNAQEBBQADggIPADCCAgoCggIBANW39KOUM6FGqVVhSQ2oh3NekS1wwQYalNo9 +7BVCwfWMrmoX8Yqt/ICV6oNEolt6Vc5Pp6XVurgfoCfAUFM+jbnADrgV3NZs+J74 +BCXfgI8Qhd19L3uA3VcAZCP4bsm+lU/hdezgfl6VzbHvvnpC2Mks0+saGiKLt38G +ieU89RLAu9MLmV+QfI4tL3czkkohRqipCKzx9hEC2ZUWno0vluYC3XXCFCpa1sl9 +JcLB/KpnheLsvtF8PPqv1W7/U0HU9TI4seJfxPmOEO8GqQKJ/+MMbpfg353bIdD0 +PghpbNjU5Db4g7ayNo+c7zo3Fn2/omnXO1ty0K+qP1xmk6wKImG20qCZyFSTXai2 +0b1dCl53lKItwIKOvMoDKjSuc/HUtQy9vmebVOvh+qBa7Dh+PsHMosdEMXXqP+UH +0quhJZb25uSgXTcYOWEAM11G1ADEtMo88aKjPvM6/2kwLkDd9p+cJsmWN63nOaK/ +6mnbVSKVUyqUtd+tFjiBdWbjxywbk5yqjKPK2Ww8F22c3HxT4CAnQzb5EuE8XL1m +v6JpIzi4mWCZDlZTOpx+FIywBm/xhnaQr/2v/pDGj59/i5IjnOcVdo/Vi5QTcmn7 +K2FjiO/mpF7moxdqWEfLcU8UC17IAggmosvpr2uKGcfLFFb14dq12fy/czja+eev +bqQ34gcnAgMBAAGjggMXMIIDEzASBgNVHRMBAf8ECDAGAQH/AgEBMDcGA1UdEgQw +MC6CD3N1c2NlcnRlLmdvYi52ZaAbBgVghl4CAqASDBBSSUYtRy0yMDAwNDAzNi0w +MB0GA1UdDgQWBBRBDxk4qpl/Qguk1yeYVKIXTC1RVDCCAVAGA1UdIwSCAUcwggFD +gBStuyIdxuDSAaj9dlBSk+2YwU2u06GCASakggEiMIIBHjE+MDwGA1UEAxM1QXV0 +b3JpZGFkIGRlIENlcnRpZmljYWNpb24gUmFpeiBkZWwgRXN0YWRvIFZlbmV6b2xh +bm8xCzAJBgNVBAYTAlZFMRAwDgYDVQQHEwdDYXJhY2FzMRkwFwYDVQQIExBEaXN0 +cml0byBDYXBpdGFsMTYwNAYDVQQKEy1TaXN0ZW1hIE5hY2lvbmFsIGRlIENlcnRp +ZmljYWNpb24gRWxlY3Ryb25pY2ExQzBBBgNVBAsTOlN1cGVyaW50ZW5kZW5jaWEg +ZGUgU2VydmljaW9zIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExJTAjBgkq +hkiG9w0BCQEWFmFjcmFpekBzdXNjZXJ0ZS5nb2IudmWCAQowDgYDVR0PAQH/BAQD +AgEGME0GA1UdEQRGMESCDnByb2NlcnQubmV0LnZloBUGBWCGXgIBoAwMClBTQy0w +MDAwMDKgGwYFYIZeAgKgEgwQUklGLUotMzE2MzUzNzMtNzB2BgNVHR8EbzBtMEag +RKBChkBodHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9sY3IvQ0VSVElGSUNBRE8t +UkFJWi1TSEEzODRDUkxERVIuY3JsMCOgIaAfhh1sZGFwOi8vYWNyYWl6LnN1c2Nl +cnRlLmdvYi52ZTA3BggrBgEFBQcBAQQrMCkwJwYIKwYBBQUHMAGGG2h0dHA6Ly9v +Y3NwLnN1c2NlcnRlLmdvYi52ZTBBBgNVHSAEOjA4MDYGBmCGXgMBAjAsMCoGCCsG +AQUFBwIBFh5odHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9kcGMwDQYJKoZIhvcN +AQELBQADggIBACtZ6yKZu4SqT96QxtGGcSOeSwORR3C7wJJg7ODU523G0+1ng3dS +1fLld6c2suNUvtm7CpsR72H0xpkzmfWvADmNg7+mvTV+LFwxNG9s2/NkAZiqlCxB +3RWGymspThbASfzXg0gTB1GEMVKIu4YXx2sviiCtxQuPcD4quxtxj7mkoP3Yldmv +Wb8lK5jpY5MvYB7Eqvh39YtsL+1+LrVPQA3uvFd359m21D+VJzog1eWuq2w1n8Gh +HVnchIHuTQfiSLaeS5UtQbHh6N5+LwUeaO6/u5BlOsju6rEYNxxik6SgMexxbJHm +pHmJWhSnFFAFTKQAVzAswbVhltw+HoSvOULP5dAssSS830DD7X9jSr3hTxJkhpXz +sOfIt+FTvZLm8wyWuevo5pLtp4EJFAv8lXrPj9Y0TzYS3F7RNHXGRoAvlQSMx4bE +qCaJqD8Zm4G7UaRKhqsLEQ+xrmNTbSjq3TNWOByyrYDT13K9mmyZY+gAu0F2Bbdb +mRiKw7gSXFbPVgx96OLP7bx0R/vu0xdOIk9W/1DzLuY5poLWccret9W6aAjtmcz9 +opLLabid+Qqkpj5PkygqYWwHJgD/ll9ohri4zspV4KuxPX+Y1zMOWj3YeMLEYC/H +YvBhkdI4sPaeVdtAgAUSM84dkpvRabP/v/GSCmE1P93+hvS84Bpxs2Km +-----END CERTIFICATE----- + +# Issuer: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center +# Subject: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center +# Label: "China Internet Network Information Center EV Certificates Root" +# Serial: 1218379777 +# MD5 Fingerprint: 55:5d:63:00:97:bd:6a:97:f5:67:ab:4b:fb:6e:63:15 +# SHA1 Fingerprint: 4f:99:aa:93:fb:2b:d1:37:26:a1:99:4a:ce:7f:f0:05:f2:93:5d:1e +# SHA256 Fingerprint: 1c:01:c6:f4:db:b2:fe:fc:22:55:8b:2b:ca:32:56:3f:49:84:4a:cf:c3:2b:7b:e4:b0:ff:59:9f:9e:8c:7a:f7 +-----BEGIN CERTIFICATE----- +MIID9zCCAt+gAwIBAgIESJ8AATANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMC +Q04xMjAwBgNVBAoMKUNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24g +Q2VudGVyMUcwRQYDVQQDDD5DaGluYSBJbnRlcm5ldCBOZXR3b3JrIEluZm9ybWF0 +aW9uIENlbnRlciBFViBDZXJ0aWZpY2F0ZXMgUm9vdDAeFw0xMDA4MzEwNzExMjVa +Fw0zMDA4MzEwNzExMjVaMIGKMQswCQYDVQQGEwJDTjEyMDAGA1UECgwpQ2hpbmEg +SW50ZXJuZXQgTmV0d29yayBJbmZvcm1hdGlvbiBDZW50ZXIxRzBFBgNVBAMMPkNo +aW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyIEVWIENlcnRp +ZmljYXRlcyBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm35z +7r07eKpkQ0H1UN+U8i6yjUqORlTSIRLIOTJCBumD1Z9S7eVnAztUwYyZmczpwA// +DdmEEbK40ctb3B75aDFk4Zv6dOtouSCV98YPjUesWgbdYavi7NifFy2cyjw1l1Vx +zUOFsUcW9SxTgHbP0wBkvUCZ3czY28Sf1hNfQYOL+Q2HklY0bBoQCxfVWhyXWIQ8 +hBouXJE0bhlffxdpxWXvayHG1VA6v2G5BY3vbzQ6sm8UY78WO5upKv23KzhmBsUs +4qpnHkWnjQRmQvaPK++IIGmPMowUc9orhpFjIpryp9vOiYurXccUwVswah+xt54u +gQEC7c+WXmPbqOY4twIDAQABo2MwYTAfBgNVHSMEGDAWgBR8cks5x8DbYqVPm6oY +NJKiyoOCWTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E +FgQUfHJLOcfA22KlT5uqGDSSosqDglkwDQYJKoZIhvcNAQEFBQADggEBACrDx0M3 +j92tpLIM7twUbY8opJhJywyA6vPtI2Z1fcXTIWd50XPFtQO3WKwMVC/GVhMPMdoG +52U7HW8228gd+f2ABsqjPWYWqJ1MFn3AlUa1UeTiH9fqBk1jjZaM7+czV0I664zB +echNdn3e9rG3geCg+aF4RhcaVpjwTj2rHO3sOdwHSPdj/gauwqRcalsyiMXHM4Ws +ZkJHwlgkmeHlPuV1LI5D1l08eB6olYIpUNHRFrrvwb562bTYzB5MRuF3sTGrvSrI +zo9uoV1/A3U05K2JRVRevq4opbs/eHnrc7MKDf2+yfdWrPa37S+bISnHOLaVxATy +wy39FCqQmbkHzJ8= +-----END CERTIFICATE----- + +# Issuer: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services +# Subject: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services +# Label: "Swisscom Root CA 2" +# Serial: 40698052477090394928831521023204026294 +# MD5 Fingerprint: 5b:04:69:ec:a5:83:94:63:18:a7:86:d0:e4:f2:6e:19 +# SHA1 Fingerprint: 77:47:4f:c6:30:e4:0f:4c:47:64:3f:84:ba:b8:c6:95:4a:8a:41:ec +# SHA256 Fingerprint: f0:9b:12:2c:71:14:f4:a0:9b:d4:ea:4f:4a:99:d5:58:b4:6e:4c:25:cd:81:14:0d:29:c0:56:13:91:4c:38:41 +-----BEGIN CERTIFICATE----- +MIIF2TCCA8GgAwIBAgIQHp4o6Ejy5e/DfEoeWhhntjANBgkqhkiG9w0BAQsFADBk +MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0 +YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg +Q0EgMjAeFw0xMTA2MjQwODM4MTRaFw0zMTA2MjUwNzM4MTRaMGQxCzAJBgNVBAYT +AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp +Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAyMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlUJOhJ1R5tMJ6HJaI2nbeHCOFvEr +jw0DzpPMLgAIe6szjPTpQOYXTKueuEcUMncy3SgM3hhLX3af+Dk7/E6J2HzFZ++r +0rk0X2s682Q2zsKwzxNoysjL67XiPS4h3+os1OD5cJZM/2pYmLcX5BtS5X4HAB1f +2uY+lQS3aYg5oUFgJWFLlTloYhyxCwWJwDaCFCE/rtuh/bxvHGCGtlOUSbkrRsVP +ACu/obvLP+DHVxxX6NZp+MEkUp2IVd3Chy50I9AU/SpHWrumnf2U5NGKpV+GY3aF +y6//SSj8gO1MedK75MDvAe5QQQg1I3ArqRa0jG6F6bYRzzHdUyYb3y1aSgJA/MTA +tukxGggo5WDDH8SQjhBiYEQN7Aq+VRhxLKX0srwVYv8c474d2h5Xszx+zYIdkeNL +6yxSNLCK/RJOlrDrcH+eOfdmQrGrrFLadkBXeyq96G4DsguAhYidDMfCd7Camlf0 +uPoTXGiTOmekl9AbmbeGMktg2M7v0Ax/lZ9vh0+Hio5fCHyqW/xavqGRn1V9TrAL +acywlKinh/LTSlDcX3KwFnUey7QYYpqwpzmqm59m2I2mbJYV4+by+PGDYmy7Velh +k6M99bFXi08jsJvllGov34zflVEpYKELKeRcVVi3qPyZ7iVNTA6z00yPhOgpD/0Q +VAKFyPnlw4vP5w8CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw +FDASBgdghXQBUwIBBgdghXQBUwIBMBIGA1UdEwEB/wQIMAYBAf8CAQcwHQYDVR0O +BBYEFE0mICKJS9PVpAqhb97iEoHF8TwuMB8GA1UdIwQYMBaAFE0mICKJS9PVpAqh +b97iEoHF8TwuMA0GCSqGSIb3DQEBCwUAA4ICAQAyCrKkG8t9voJXiblqf/P0wS4R +fbgZPnm3qKhyN2abGu2sEzsOv2LwnN+ee6FTSA5BesogpxcbtnjsQJHzQq0Qw1zv +/2BZf82Fo4s9SBwlAjxnffUy6S8w5X2lejjQ82YqZh6NM4OKb3xuqFp1mrjX2lhI +REeoTPpMSQpKwhI3qEAMw8jh0FcNlzKVxzqfl9NX+Ave5XLzo9v/tdhZsnPdTSpx +srpJ9csc1fV5yJmz/MFMdOO0vSk3FQQoHt5FRnDsr7p4DooqzgB53MBfGWcsa0vv +aGgLQ+OswWIJ76bdZWGgr4RVSJFSHMYlkSrQwSIjYVmvRRGFHQEkNI/Ps/8XciAT +woCqISxxOQ7Qj1zB09GOInJGTB2Wrk9xseEFKZZZ9LuedT3PDTcNYtsmjGOpI99n +Bjx8Oto0QuFmtEYE3saWmA9LSHokMnWRn6z3aOkquVVlzl1h0ydw2Df+n7mvoC5W +t6NlUe07qxS/TFED6F+KBZvuim6c779o+sjaC+NCydAXFJy3SuCvkychVSa1ZC+N +8f+mQAWFBVzKBxlcCxMoTFh/wqXvRdpg065lYZ1Tg3TCrvJcwhbtkj6EPnNgiLx2 +9CzP0H1907he0ZESEOnN3col49XtmS++dYFLJPlFRpTJKSFTnCZFqhMX5OfNeOI5 +wSsSnqaeG8XmDtkx2Q== +-----END CERTIFICATE----- + +# Issuer: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services +# Subject: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services +# Label: "Swisscom Root EV CA 2" +# Serial: 322973295377129385374608406479535262296 +# MD5 Fingerprint: 7b:30:34:9f:dd:0a:4b:6b:35:ca:31:51:28:5d:ae:ec +# SHA1 Fingerprint: e7:a1:90:29:d3:d5:52:dc:0d:0f:c6:92:d3:ea:88:0d:15:2e:1a:6b +# SHA256 Fingerprint: d9:5f:ea:3c:a4:ee:dc:e7:4c:d7:6e:75:fc:6d:1f:f6:2c:44:1f:0f:a8:bc:77:f0:34:b1:9e:5d:b2:58:01:5d +-----BEGIN CERTIFICATE----- +MIIF4DCCA8igAwIBAgIRAPL6ZOJ0Y9ON/RAdBB92ylgwDQYJKoZIhvcNAQELBQAw +ZzELMAkGA1UEBhMCY2gxETAPBgNVBAoTCFN3aXNzY29tMSUwIwYDVQQLExxEaWdp +dGFsIENlcnRpZmljYXRlIFNlcnZpY2VzMR4wHAYDVQQDExVTd2lzc2NvbSBSb290 +IEVWIENBIDIwHhcNMTEwNjI0MDk0NTA4WhcNMzEwNjI1MDg0NTA4WjBnMQswCQYD +VQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2Vy +dGlmaWNhdGUgU2VydmljZXMxHjAcBgNVBAMTFVN3aXNzY29tIFJvb3QgRVYgQ0Eg +MjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMT3HS9X6lds93BdY7Bx +UglgRCgzo3pOCvrY6myLURYaVa5UJsTMRQdBTxB5f3HSek4/OE6zAMaVylvNwSqD +1ycfMQ4jFrclyxy0uYAyXhqdk/HoPGAsp15XGVhRXrwsVgu42O+LgrQ8uMIkqBPH +oCE2G3pXKSinLr9xJZDzRINpUKTk4RtiGZQJo/PDvO/0vezbE53PnUgJUmfANykR +HvvSEaeFGHR55E+FFOtSN+KxRdjMDUN/rhPSays/p8LiqG12W0OfvrSdsyaGOx9/ +5fLoZigWJdBLlzin5M8J0TbDC77aO0RYjb7xnglrPvMyxyuHxuxenPaHZa0zKcQv +idm5y8kDnftslFGXEBuGCxobP/YCfnvUxVFkKJ3106yDgYjTdLRZncHrYTNaRdHL +OdAGalNgHa/2+2m8atwBz735j9m9W8E6X47aD0upm50qKGsaCnw8qyIL5XctcfaC +NYGu+HuB5ur+rPQam3Rc6I8k9l2dRsQs0h4rIWqDJ2dVSqTjyDKXZpBy2uPUZC5f +46Fq9mDU5zXNysRojddxyNMkM3OxbPlq4SjbX8Y96L5V5jcb7STZDxmPX2MYWFCB +UWVv8p9+agTnNCRxunZLWB4ZvRVgRaoMEkABnRDixzgHcgplwLa7JSnaFp6LNYth +7eVxV4O1PHGf40+/fh6Bn0GXAgMBAAGjgYYwgYMwDgYDVR0PAQH/BAQDAgGGMB0G +A1UdIQQWMBQwEgYHYIV0AVMCAgYHYIV0AVMCAjASBgNVHRMBAf8ECDAGAQH/AgED +MB0GA1UdDgQWBBRF2aWBbj2ITY1x0kbBbkUe88SAnTAfBgNVHSMEGDAWgBRF2aWB +bj2ITY1x0kbBbkUe88SAnTANBgkqhkiG9w0BAQsFAAOCAgEAlDpzBp9SSzBc1P6x +XCX5145v9Ydkn+0UjrgEjihLj6p7jjm02Vj2e6E1CqGdivdj5eu9OYLU43otb98T +PLr+flaYC/NUn81ETm484T4VvwYmneTwkLbUwp4wLh/vx3rEUMfqe9pQy3omywC0 +Wqu1kx+AiYQElY2NfwmTv9SoqORjbdlk5LgpWgi/UOGED1V7XwgiG/W9mR4U9s70 +WBCCswo9GcG/W6uqmdjyMb3lOGbcWAXH7WMaLgqXfIeTK7KK4/HsGOV1timH59yL +Gn602MnTihdsfSlEvoqq9X46Lmgxk7lq2prg2+kupYTNHAq4Sgj5nPFhJpiTt3tm +7JFe3VE/23MPrQRYCd0EApUKPtN236YQHoA96M2kZNEzx5LH4k5E4wnJTsJdhw4S +nr8PyQUQ3nqjsTzyP6WqJ3mtMX0f/fwZacXduT98zca0wjAefm6S139hdlqP65VN +vBFuIXxZN5nQBrz5Bm0yFqXZaajh3DyAHmBR3NdUIR7KYndP+tiPsys6DXhyyWhB +WkdKwqPrGtcKqzwyVcgKEZzfdNbwQBUdyLmPtTbFr/giuMod89a2GQ+fYWVq6nTI +fI/DT11lgh/ZDYnadXL77/FHZxOzyNEZiCcmmpl5fx7kLD977vHeTYuWl8PVP3wb +I+2ksx0WckNLIOFZfsLorSa/ovc= +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R1 O=Disig a.s. +# Subject: CN=CA Disig Root R1 O=Disig a.s. +# Label: "CA Disig Root R1" +# Serial: 14052245610670616104 +# MD5 Fingerprint: be:ec:11:93:9a:f5:69:21:bc:d7:c1:c0:67:89:cc:2a +# SHA1 Fingerprint: 8e:1c:74:f8:a6:20:b9:e5:8a:f4:61:fa:ec:2b:47:56:51:1a:52:c6 +# SHA256 Fingerprint: f9:6f:23:f4:c3:e7:9c:07:7a:46:98:8d:5a:f5:90:06:76:a0:f0:39:cb:64:5d:d1:75:49:b2:16:c8:24:40:ce +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAMMDmu5QkG4oMA0GCSqGSIb3DQEBBQUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIxMB4XDTEyMDcxOTA5MDY1NloXDTQy +MDcxOTA5MDY1NlowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjEw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCqw3j33Jijp1pedxiy3QRk +D2P9m5YJgNXoqqXinCaUOuiZc4yd39ffg/N4T0Dhf9Kn0uXKE5Pn7cZ3Xza1lK/o +OI7bm+V8u8yN63Vz4STN5qctGS7Y1oprFOsIYgrY3LMATcMjfF9DCCMyEtztDK3A +fQ+lekLZWnDZv6fXARz2m6uOt0qGeKAeVjGu74IKgEH3G8muqzIm1Cxr7X1r5OJe +IgpFy4QxTaz+29FHuvlglzmxZcfe+5nkCiKxLU3lSCZpq+Kq8/v8kiky6bM+TR8n +oc2OuRf7JT7JbvN32g0S9l3HuzYQ1VTW8+DiR0jm3hTaYVKvJrT1cU/J19IG32PK +/yHoWQbgCNWEFVP3Q+V8xaCJmGtzxmjOZd69fwX3se72V6FglcXM6pM6vpmumwKj +rckWtc7dXpl4fho5frLABaTAgqWjR56M6ly2vGfb5ipN0gTco65F97yLnByn1tUD +3AjLLhbKXEAz6GfDLuemROoRRRw1ZS0eRWEkG4IupZ0zXWX4Qfkuy5Q/H6MMMSRE +7cderVC6xkGbrPAXZcD4XW9boAo0PO7X6oifmPmvTiT6l7Jkdtqr9O3jw2Dv1fkC +yC2fg69naQanMVXVz0tv/wQFx1isXxYb5dKj6zHbHzMVTdDypVP1y+E9Tmgt2BLd +qvLmTZtJ5cUoobqwWsagtQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUiQq0OJMa5qvum5EY+fU8PjXQ04IwDQYJKoZI +hvcNAQEFBQADggIBADKL9p1Kyb4U5YysOMo6CdQbzoaz3evUuii+Eq5FLAR0rBNR +xVgYZk2C2tXck8An4b58n1KeElb21Zyp9HWc+jcSjxyT7Ff+Bw+r1RL3D65hXlaA +SfX8MPWbTx9BLxyE04nH4toCdu0Jz2zBuByDHBb6lM19oMgY0sidbvW9adRtPTXo +HqJPYNcHKfyyo6SdbhWSVhlMCrDpfNIZTUJG7L399ldb3Zh+pE3McgODWF3vkzpB +emOqfDqo9ayk0d2iLbYq/J8BjuIQscTK5GfbVSUZP/3oNn6z4eGBrxEWi1CXYBmC +AMBrTXO40RMHPuq2MU/wQppt4hF05ZSsjYSVPCGvxdpHyN85YmLLW1AL14FABZyb +7bq2ix4Eb5YgOe2kfSnbSM6C3NQCjR0EMVrHS/BsYVLXtFHCgWzN4funodKSds+x +DzdYpPJScWc/DIh4gInByLUfkmO+p3qKViwaqKactV2zY9ATIKHrkWzQjX2v3wvk +F7mGnjixlAxYjOBVqjtjbZqJYLhkKpLGN/R+Q0O3c+gB53+XD9fyexn9GtePyfqF +a3qdnom2piiZk4hA9z7NUaPK6u95RyG1/jLix8NRb76AdPCkwzryT+lf3xkK8jsT +Q6wxpLPn6/wY1gGp8yqPNg7rtLG8t0zJa7+h89n07eLw4+1knj0vllJPgFOL +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=Certification Authority of WoSign O=WoSign CA Limited +# Subject: CN=Certification Authority of WoSign O=WoSign CA Limited +# Label: "WoSign" +# Serial: 125491772294754854453622855443212256657 +# MD5 Fingerprint: a1:f2:f9:b5:d2:c8:7a:74:b8:f3:05:f1:d7:e1:84:8d +# SHA1 Fingerprint: b9:42:94:bf:91:ea:8f:b6:4b:e6:10:97:c7:fb:00:13:59:b6:76:cb +# SHA256 Fingerprint: 4b:22:d5:a6:ae:c9:9f:3c:db:79:aa:5e:c0:68:38:47:9c:d5:ec:ba:71:64:f7:f2:2d:c1:d6:5f:63:d8:57:08 +-----BEGIN CERTIFICATE----- +MIIFdjCCA16gAwIBAgIQXmjWEXGUY1BWAGjzPsnFkTANBgkqhkiG9w0BAQUFADBV +MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxKjAoBgNV +BAMTIUNlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbjAeFw0wOTA4MDgw +MTAwMDFaFw0zOTA4MDgwMTAwMDFaMFUxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFX +b1NpZ24gQ0EgTGltaXRlZDEqMCgGA1UEAxMhQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgb2YgV29TaWduMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvcqN +rLiRFVaXe2tcesLea9mhsMMQI/qnobLMMfo+2aYpbxY94Gv4uEBf2zmoAHqLoE1U +fcIiePyOCbiohdfMlZdLdNiefvAA5A6JrkkoRBoQmTIPJYhTpA2zDxIIFgsDcScc +f+Hb0v1naMQFXQoOXXDX2JegvFNBmpGN9J42Znp+VsGQX+axaCA2pIwkLCxHC1l2 +ZjC1vt7tj/id07sBMOby8w7gLJKA84X5KIq0VC6a7fd2/BVoFutKbOsuEo/Uz/4M +x1wdC34FMr5esAkqQtXJTpCzWQ27en7N1QhatH/YHGkR+ScPewavVIMYe+HdVHpR +aG53/Ma/UkpmRqGyZxq7o093oL5d//xWC0Nyd5DKnvnyOfUNqfTq1+ezEC8wQjch +zDBwyYaYD8xYTYO7feUapTeNtqwylwA6Y3EkHp43xP901DfA4v6IRmAR3Qg/UDar +uHqklWJqbrDKaiFaafPz+x1wOZXzp26mgYmhiMU7ccqjUu6Du/2gd/Tkb+dC221K +mYo0SLwX3OSACCK28jHAPwQ+658geda4BmRkAjHXqc1S+4RFaQkAKtxVi8QGRkvA +Sh0JWzko/amrzgD5LkhLJuYwTKVYyrREgk/nkR4zw7CT/xH8gdLKH3Ep3XZPkiWv +HYG3Dy+MwwbMLyejSuQOmbp8HkUff6oZRZb9/D0CAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOFmzw7R8bNLtwYgFP6H +EtX2/vs+MA0GCSqGSIb3DQEBBQUAA4ICAQCoy3JAsnbBfnv8rWTjMnvMPLZdRtP1 +LOJwXcgu2AZ9mNELIaCJWSQBnfmvCX0KI4I01fx8cpm5o9dU9OpScA7F9dY74ToJ +MuYhOZO9sxXqT2r09Ys/L3yNWC7F4TmgPsc9SnOeQHrAK2GpZ8nzJLmzbVUsWh2e +JXLOC62qx1ViC777Y7NhRCOjy+EaDveaBk3e1CNOIZZbOVtXHS9dCF4Jef98l7VN +g64N1uajeeAz0JmWAjCnPv/So0M/BVoG6kQC2nz4SNAzqfkHx5Xh9T71XXG68pWp +dIhhWeO/yloTunK0jF02h+mmxTwTv97QRCbut+wucPrXnbes5cVAWubXbHssw1ab +R80LzvobtCHXt2a49CUwi1wNuepnsvRtrtWhnk/Yn+knArAdBtaP4/tIEp9/EaEQ +PkxROpaw0RPxx9gmrjrKkcRpnd8BKWRRb2jaFOwIQZeQjdCygPLPwj2/kWjFgGce +xGATVdVhmVd8upUPYUk6ynW8yQqTP2cOEvIo4jEbwFcW3wh8GcF+Dx+FHgo2fFt+ +J7x6v+Db9NpSvd4MVHAxkUOVyLzwPt0JfjBkUO1/AaQzZ01oT74V77D2AhGiGxMl +OtzCWfHjXEa7ZywCRuoeSKbmW9m1vFGikpbbqsY3Iqb+zCB0oy2pLmvLwIIRIbWT +ee5Ehr7XHuQe+w== +-----END CERTIFICATE----- + +# Issuer: CN=CA \u6c83\u901a\u6839\u8bc1\u4e66 O=WoSign CA Limited +# Subject: CN=CA \u6c83\u901a\u6839\u8bc1\u4e66 O=WoSign CA Limited +# Label: "WoSign China" +# Serial: 106921963437422998931660691310149453965 +# MD5 Fingerprint: 78:83:5b:52:16:76:c4:24:3b:83:78:e8:ac:da:9a:93 +# SHA1 Fingerprint: 16:32:47:8d:89:f9:21:3a:92:00:85:63:f5:a4:a7:d3:12:40:8a:d6 +# SHA256 Fingerprint: d6:f0:34:bd:94:aa:23:3f:02:97:ec:a4:24:5b:28:39:73:e4:47:aa:59:0f:31:0c:77:f4:8f:df:83:11:22:54 +-----BEGIN CERTIFICATE----- +MIIFWDCCA0CgAwIBAgIQUHBrzdgT/BtOOzNy0hFIjTANBgkqhkiG9w0BAQsFADBG +MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNV +BAMMEkNBIOayg+mAmuagueivgeS5pjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgw +MTAwMDFaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRl +ZDEbMBkGA1UEAwwSQ0Eg5rKD6YCa5qC56K+B5LmmMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA0EkhHiX8h8EqwqzbdoYGTufQdDTc7WU1/FDWiD+k8H/r +D195L4mx/bxjWDeTmzj4t1up+thxx7S8gJeNbEvxUNUqKaqoGXqW5pWOdO2XCld1 +9AXbbQs5uQF/qvbW2mzmBeCkTVL829B0txGMe41P/4eDrv8FAxNXUDf+jJZSEExf +v5RxadmWPgxDT74wwJ85dE8GRV2j1lY5aAfMh09Qd5Nx2UQIsYo06Yms25tO4dnk +UkWMLhQfkWsZHWgpLFbE4h4TV2TwYeO5Ed+w4VegG63XX9Gv2ystP9Bojg/qnw+L +NVgbExz03jWhCl3W6t8Sb8D7aQdGctyB9gQjF+BNdeFyb7Ao65vh4YOhn0pdr8yb ++gIgthhid5E7o9Vlrdx8kHccREGkSovrlXLp9glk3Kgtn3R46MGiCWOc76DbT52V +qyBPt7D3h1ymoOQ3OMdc4zUPLK2jgKLsLl3Az+2LBcLmc272idX10kaO6m1jGx6K +yX2m+Jzr5dVjhU1zZmkR/sgO9MHHZklTfuQZa/HpelmjbX7FF+Ynxu8b22/8DU0G +AbQOXDBGVWCvOGU6yke6rCzMRh+yRpY/8+0mBe53oWprfi1tWFxK1I5nuPHa1UaK +J/kR8slC/k7e3x9cxKSGhxYzoacXGKUN5AXlK8IrC6KVkLn9YDxOiT7nnO4fuwEC +AwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFOBNv9ybQV0T6GTwp+kVpOGBwboxMA0GCSqGSIb3DQEBCwUAA4ICAQBqinA4 +WbbaixjIvirTthnVZil6Xc1bL3McJk6jfW+rtylNpumlEYOnOXOvEESS5iVdT2H6 +yAa+Tkvv/vMx/sZ8cApBWNromUuWyXi8mHwCKe0JgOYKOoICKuLJL8hWGSbueBwj +/feTZU7n85iYr83d2Z5AiDEoOqsuC7CsDCT6eiaY8xJhEPRdF/d+4niXVOKM6Cm6 +jBAyvd0zaziGfjk9DgNyp115j0WKWa5bIW4xRtVZjc8VX90xJc/bYNaBRHIpAlf2 +ltTW/+op2znFuCyKGo3Oy+dCMYYFaA6eFN0AkLppRQjbbpCBhqcqBT/mhDn4t/lX +X0ykeVoQDF7Va/81XwVRHmyjdanPUIPTfPRm94KNPQx96N97qA4bLJyuQHCH2u2n +FoJavjVsIE4iYdm8UXrNemHcSxH5/mc0zy4EZmFcV5cjjPOGG0jfKq+nwf/Yjj4D +u9gqsPoUJbJRa4ZDhS4HIxaAjUz7tGM7zMN07RujHv41D198HRaG9Q7DlfEvr10l +O1Hm13ZBONFLAzkopR6RctR9q5czxNM+4Gm2KHmgCY0c0f9BckgG/Jou5yD5m6Le +ie2uPAmvylezkolwQOQvT8Jwg0DXJCxr5wkf09XHwQj02w47HAcLQxGEIYbpgNR1 +2KvxAmLBsX5VYc8T1yaw15zLKYs4SgsOkI26oQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G3" +# Serial: 10003001 +# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 +# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc +# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX +DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP +cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW +IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX +xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy +KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR +9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az +5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 +6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 +Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP +bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt +BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt +XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd +INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD +U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp +LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 +Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp +gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh +/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw +0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A +fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq +4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR +1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ +QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM +94B7IWcnMFk= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. +# Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. +# Label: "T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5" +# Serial: 156233699172481 +# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e +# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb +# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78 +-----BEGIN CERTIFICATE----- +MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE +BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn +aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg +QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg +SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0 +MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD +VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 +dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF +bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB +IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom +/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR +Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3 +4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z +5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0 +hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID +AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX +SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l +VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq +URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf +peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF +Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW ++qtB4Uu2NQvAmxU= +-----END CERTIFICATE----- + +# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Label: "Certinomis - Root CA" +# Serial: 1 +# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f +# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 +# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 +-----BEGIN CERTIFICATE----- +MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET +MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb +BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz +MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx +FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g +Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 +fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl +LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV +WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF +TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb +5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc +CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri +wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ +wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG +m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 +F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng +WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 +2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF +AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ +0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw +F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS +g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj +qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN +h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ +ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V +btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj +Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ +8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW +gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=Certification Authority of WoSign G2 O=WoSign CA Limited +# Subject: CN=Certification Authority of WoSign G2 O=WoSign CA Limited +# Label: "Certification Authority of WoSign G2" +# Serial: 142423943073812161787490648904721057092 +# MD5 Fingerprint: c8:1c:7d:19:aa:cb:71:93:f2:50:f8:52:a8:1e:ba:60 +# SHA1 Fingerprint: fb:ed:dc:90:65:b7:27:20:37:bc:55:0c:9c:56:de:bb:f2:78:94:e1 +# SHA256 Fingerprint: d4:87:a5:6f:83:b0:74:82:e8:5e:96:33:94:c1:ec:c2:c9:e5:1d:09:03:ee:94:6b:02:c3:01:58:1e:d9:9e:16 +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQayXaioidfLwPBbOxemFFRDANBgkqhkiG9w0BAQsFADBY +MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxLTArBgNV +BAMTJENlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbiBHMjAeFw0xNDEx +MDgwMDU4NThaFw00NDExMDgwMDU4NThaMFgxCzAJBgNVBAYTAkNOMRowGAYDVQQK +ExFXb1NpZ24gQ0EgTGltaXRlZDEtMCsGA1UEAxMkQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkgb2YgV29TaWduIEcyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvsXEoCKASU+/2YcRxlPhuw+9YH+v9oIOH9ywjj2X4FA8jzrvZjtFB5sg+OPX +JYY1kBaiXW8wGQiHC38Gsp1ij96vkqVg1CuAmlI/9ZqD6TRay9nVYlzmDuDfBpgO +gHzKtB0TiGsOqCR3A9DuW/PKaZE1OVbFbeP3PU9ekzgkyhjpJMuSA93MHD0JcOQg +5PGurLtzaaNjOg9FD6FKmsLRY6zLEPg95k4ot+vElbGs/V6r+kHLXZ1L3PR8du9n +fwB6jdKgGlxNIuG12t12s9R23164i5jIFFTMaxeSt+BKv0mUYQs4kI9dJGwlezt5 +2eJ+na2fmKEG/HgUYFf47oB3sQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU+mCp62XF3RYUCE4MD42b4Pdkr2cwDQYJ +KoZIhvcNAQELBQADggEBAFfDejaCnI2Y4qtAqkePx6db7XznPWZaOzG73/MWM5H8 +fHulwqZm46qwtyeYP0nXYGdnPzZPSsvxFPpahygc7Y9BMsaV+X3avXtbwrAh449G +3CE4Q3RM+zD4F3LBMvzIkRfEzFg3TgvMWvchNSiDbGAtROtSjFA9tWwS1/oJu2yy +SrHFieT801LYYRf+epSEj3m2M1m6D8QL4nCgS3gu+sif/a+RZQp4OBXllxcU3fng +LDT4ONCEIgDAFFEYKwLcMFrw6AF8NTojrwjkr6qOKEJJLvD1mTS+7Q9LGOHSJDy7 +XUe3IfKN0QqZjuNuPq1w4I+5ysxugTH2e5x6eeRncRg= +-----END CERTIFICATE----- + +# Issuer: CN=CA WoSign ECC Root O=WoSign CA Limited +# Subject: CN=CA WoSign ECC Root O=WoSign CA Limited +# Label: "CA WoSign ECC Root" +# Serial: 138625735294506723296996289575837012112 +# MD5 Fingerprint: 80:c6:53:ee:61:82:28:72:f0:ff:21:b9:17:ca:b2:20 +# SHA1 Fingerprint: d2:7a:d2:be:ed:94:c0:a1:3c:c7:25:21:ea:5d:71:be:81:19:f3:2b +# SHA256 Fingerprint: 8b:45:da:1c:06:f7:91:eb:0c:ab:f2:6b:e5:88:f5:fb:23:16:5c:2e:61:4b:f8:85:56:2d:0d:ce:50:b2:9b:02 +-----BEGIN CERTIFICATE----- +MIICCTCCAY+gAwIBAgIQaEpYcIBr8I8C+vbe6LCQkDAKBggqhkjOPQQDAzBGMQsw +CQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNVBAMT +EkNBIFdvU2lnbiBFQ0MgUm9vdDAeFw0xNDExMDgwMDU4NThaFw00NDExMDgwMDU4 +NThaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRlZDEb +MBkGA1UEAxMSQ0EgV29TaWduIEVDQyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACID +YgAE4f2OuEMkq5Z7hcK6C62N4DrjJLnSsb6IOsq/Srj57ywvr1FQPEd1bPiUt5v8 +KB7FVMxjnRZLU8HnIKvNrCXSf4/CwVqCXjCLelTOA7WRf6qU0NGKSMyCBSah1VES +1ns2o0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUqv3VWqP2h4syhf3RMluARZPzA7gwCgYIKoZIzj0EAwMDaAAwZQIxAOSkhLCB +1T2wdKyUpOgOPQB0TKGXa/kNUTyh2Tv0Daupn75OcsqF1NnstTJFGG+rrQIwfcf3 +aWMvoeGY7xMQ0Xk/0f7qO3/eVvSQsRUR2LIiFdAvwyYua/GRspBl9JrmkO5K +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=Certplus Root CA G1 O=Certplus +# Subject: CN=Certplus Root CA G1 O=Certplus +# Label: "Certplus Root CA G1" +# Serial: 1491911565779898356709731176965615564637713 +# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42 +# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66 +# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA +MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy +dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa +MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy +dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a +iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt +6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP +0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f +6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE +EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN +1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc +h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT +mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV +4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO +WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud +DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd +Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq +hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh +66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7 +/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS +S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j +2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R +Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr +RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy +6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV +V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5 +g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl +++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo= +-----END CERTIFICATE----- + +# Issuer: CN=Certplus Root CA G2 O=Certplus +# Subject: CN=Certplus Root CA G2 O=Certplus +# Label: "Certplus Root CA G2" +# Serial: 1492087096131536844209563509228951875861589 +# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31 +# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a +# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17 +-----BEGIN CERTIFICATE----- +MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x +CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs +dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x +CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs +dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat +93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x +Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P +AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj +FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG +SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch +p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal +U5ORGpOucGpnutee5WEaXw== +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust +# Subject: CN=OpenTrust Root CA G1 O=OpenTrust +# Label: "OpenTrust Root CA G1" +# Serial: 1492036577811947013770400127034825178844775 +# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da +# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e +# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4 +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA +MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w +ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw +MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU +T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b +wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX +/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0 +77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP +uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx +p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx +Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2 +TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W +G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw +vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY +EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1 +2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw +DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E +PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf +gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS +FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0 +V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P +XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I +i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t +TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91 +09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky +Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ +AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj +1oxx +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust +# Subject: CN=OpenTrust Root CA G2 O=OpenTrust +# Label: "OpenTrust Root CA G2" +# Serial: 1492012448042702096986875987676935573415441 +# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb +# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b +# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2 +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA +MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w +ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw +MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU +T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh +/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e +CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6 +1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE +FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS +gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X +G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy +YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH +vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4 +t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/ +gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3 +5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w +DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz +Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0 +nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT +RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT +wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2 +t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa +TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2 +o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU +3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA +iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f +WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM +S1IK +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust +# Subject: CN=OpenTrust Root CA G3 O=OpenTrust +# Label: "OpenTrust Root CA G3" +# Serial: 1492104908271485653071219941864171170455615 +# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24 +# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6 +# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92 +-----BEGIN CERTIFICATE----- +MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx +CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U +cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow +QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl +blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm +3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d +oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G +A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5 +DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK +BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q +j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx +4nxp5V2a+EEfOzmTk51V6s2N8fvB +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Label: "LuxTrust Global Root 2" +# Serial: 59914338225734147123941058376788110305822489521 +# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c +# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f +# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 +-----BEGIN CERTIFICATE----- +MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL +BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV +BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw +MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B +LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F +ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem +hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 +EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn +Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 +zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ +96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m +j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g +DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ +8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j +X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH +hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB +KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 +Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT ++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL +BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 +BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO +jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 +loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c +qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ +2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ +JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre +zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf +LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ +x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 +oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- diff --git a/RBXLegacyDiscordBot/lib/certifi/core.py b/RBXLegacyDiscordBot/lib/certifi/core.py new file mode 100644 index 0000000..e7ae2ee --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi/core.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem. +""" +import os +import warnings + + +class DeprecatedBundleWarning(DeprecationWarning): + """ + The weak security bundle is being deprecated. Please bother your service + provider to get them to stop using cross-signed roots. + """ + + +def where(): + f = os.path.split(__file__)[0] + + return os.path.join(f, 'cacert.pem') + + +def old_where(): + warnings.warn( + "The weak security bundle is being deprecated.", + DeprecatedBundleWarning + ) + f = os.path.split(__file__)[0] + return os.path.join(f, 'weak.pem') + +if __name__ == '__main__': + print(where()) diff --git a/RBXLegacyDiscordBot/lib/certifi/old_root.pem b/RBXLegacyDiscordBot/lib/certifi/old_root.pem new file mode 100644 index 0000000..af30ea7 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi/old_root.pem @@ -0,0 +1,414 @@ +# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Secure Server CA" +# Serial: 927650371 +# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee +# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 +# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 +-----BEGIN CERTIFICATE----- +MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC +VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u +ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc +KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u +ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 +MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE +ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j +b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF +bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg +U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA +A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ +I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 +wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC +AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb +oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 +BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p +dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk +MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp +b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu +dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 +MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi +E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa +MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI +hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN +95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd +2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Label: "ValiCert Class 2 VA" +# Serial: 1 +# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 +# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 +# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy +NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY +dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 +WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS +v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v +UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu +IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC +W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Label: "NetLock Express (Class C) Root" +# Serial: 104 +# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4 +# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b +# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f +-----BEGIN CERTIFICATE----- +MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx +ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 +b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD +EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X +DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw +DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u +c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr +TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN +BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA +OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC +2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW +RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P +AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW +ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0 +YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz +b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO +ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB +IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs +b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs +ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s +YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg +a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g +SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0 +aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg +YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg +Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY +ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g +pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4 +Fp1hBWeAyNDYpQcCNJgEjTME1A== +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Label: "NetLock Business (Class B) Root" +# Serial: 105 +# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6 +# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af +# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12 +-----BEGIN CERTIFICATE----- +MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx +ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 +b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD +EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05 +OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G +A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh +Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l +dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG +SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK +gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX +iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc +Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E +BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G +SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu +b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh +bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv +Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln +aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0 +IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh +c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph +biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo +ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP +UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj +YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo +dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA +bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06 +sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa +n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS +NitjrFgBazMpUIaD8QFI +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Label: "RSA Root Certificate 1" +# Serial: 1 +# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 +# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb +# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy +NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD +cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs +2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY +JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE +Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ +n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A +PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Label: "ValiCert Class 1 VA" +# Serial: 1 +# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb +# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e +# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy +NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y +LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ +TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y +TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 +LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW +I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw +nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure eBusiness CA 1" +# Serial: 4 +# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d +# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 +# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 +-----BEGIN CERTIFICATE----- +MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT +ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw +MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j +LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ +KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo +RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu +WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw +Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK +eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM +zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ +WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN +/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure Global eBusiness CA" +# Serial: 1 +# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc +# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 +# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 +-----BEGIN CERTIFICATE----- +MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT +ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw +MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj +dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l +c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC +UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc +58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ +o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr +aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA +A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA +Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv +8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Premium Server CA" +# Serial: 1 +# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a +# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a +# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 +-----BEGIN CERTIFICATE----- +MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy +dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t +MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB +MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG +A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp +b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl +cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv +bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE +VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ +ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR +uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG +9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI +hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM +pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Server CA" +# Serial: 1 +# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d +# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c +# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 +-----BEGIN CERTIFICATE----- +MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm +MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx +MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT +DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 +dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl +cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 +DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD +gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 +yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX +L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj +EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG +7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e +QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ +qdq5snUb9kLy78fyGPmJvKP/iiMucEc= +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Label: "Verisign Class 3 Public Primary Certification Authority" +# Serial: 149843929435818692848040365716851702463 +# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67 +# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2 +# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70 +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do +lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc +AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Label: "Verisign Class 3 Public Primary Certification Authority" +# Serial: 80507572722862485515306429940691309246 +# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4 +# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b +# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05 +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i +2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ +2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Label: "Verisign Class 3 Public Primary Certification Authority - G2" +# Serial: 167285380242319648451154478808036881606 +# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 +# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f +# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 +pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 +13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk +U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i +F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY +oJ2daZH9 +-----END CERTIFICATE----- + +# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Label: "GTE CyberTrust Global Root" +# Serial: 421 +# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db +# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 +# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 +-----BEGIN CERTIFICATE----- +MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD +VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv +bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv +b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV +UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU +cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds +b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH +iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS +r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 +04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r +GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 +3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P +lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ +-----END CERTIFICATE----- + +# Issuer: C=US, O=Equifax, OU=Equifax Secure Certificate Authority +# Subject: C=US, O=Equifax, OU=Equifax Secure Certificate Authority +# Label: "Equifax Secure Certificate Authority" +# Serial: 903804111 +# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4 +# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a +# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78 +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy +dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 +MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx +dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f +BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A +cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ +MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm +aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw +ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj +IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y +7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh +1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 +-----END CERTIFICATE----- diff --git a/RBXLegacyDiscordBot/lib/certifi/weak.pem b/RBXLegacyDiscordBot/lib/certifi/weak.pem new file mode 100644 index 0000000..7691c07 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/certifi/weak.pem @@ -0,0 +1,5660 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 3 Public Primary Certification Authority - G3" +# Serial: 206684696279472310254277870180966723415 +# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 +# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 +# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Low-Value Services Root" +# Serial: 1 +# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc +# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d +# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 +-----BEGIN CERTIFICATE----- +MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw +MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML +QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD +VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA +A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul +CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n +tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl +dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch +PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC ++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O +BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl +MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk +ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB +IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X +7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz +43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY +eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl +pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA +WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Label: "AddTrust External Root" +# Serial: 1 +# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f +# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 +# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs +IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 +MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux +FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h +bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt +H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 +uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX +mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX +a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN +E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 +WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD +VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 +Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU +cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx +IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN +AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH +YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC +Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX +c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a +mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Public Services Root" +# Serial: 1 +# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f +# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5 +# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx +MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB +ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV +BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV +6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX +GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP +dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH +1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF +62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW +BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL +MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU +cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv +b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6 +IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/ +iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao +GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh +4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm +XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Qualified Certificates Root" +# Serial: 1 +# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb +# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf +# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16 +-----BEGIN CERTIFICATE----- +MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1 +MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK +EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh +BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq +xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G +87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i +2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U +WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1 +0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G +A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr +pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL +ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm +aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv +hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm +hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X +dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3 +P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y +iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no +xqE= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. +# Label: "GeoTrust Global CA" +# Serial: 144470 +# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 +# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 +# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i +YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg +R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 +9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq +fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv +iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU +1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ +bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW +MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA +ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l +uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn +Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS +tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF +PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un +hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV +5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Global CA 2" +# Serial: 1 +# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9 +# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d +# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85 +-----BEGIN CERTIFICATE----- +MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs +IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg +R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A +PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8 +Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL +TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL +5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7 +S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe +2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap +EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td +EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv +/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN +A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0 +abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF +I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz +4iIprn2DQKi6bA== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Label: "GeoTrust Universal CA" +# Serial: 1 +# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 +# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 +# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy +c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 +IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV +VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 +cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT +QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh +F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v +c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w +mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd +VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX +teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ +f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe +Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ +nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY +MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG +9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX +IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn +ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z +uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN +Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja +QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW +koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 +ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt +DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm +bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Universal CA 2" +# Serial: 1 +# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 +# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 +# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy +c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD +VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 +c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 +WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG +FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq +XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL +se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb +KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd +IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 +y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt +hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc +QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 +Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV +HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ +KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ +L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr +Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo +ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY +T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz +GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m +1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV +OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH +6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX +QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association +# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association +# Label: "Visa eCommerce Root" +# Serial: 25952180776285836048024890241505565794 +# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 +# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 +# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 +-----BEGIN CERTIFICATE----- +MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr +MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl +cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv +bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw +CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h +dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l +cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h +2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E +lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV +ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq +299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t +vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL +dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF +AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR +zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 +LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd +7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw +++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt +398znM/jra6O1I7mT1GvFpLgXPYHDw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum CA O=Unizeto Sp. z o.o. +# Subject: CN=Certum CA O=Unizeto Sp. z o.o. +# Label: "Certum Root CA" +# Serial: 65568 +# MD5 Fingerprint: 2c:8f:9f:66:1d:18:90:b1:47:26:9d:8e:86:82:8c:a9 +# SHA1 Fingerprint: 62:52:dc:40:f7:11:43:a2:2f:de:9e:f7:34:8e:06:42:51:b1:81:18 +# SHA256 Fingerprint: d8:e0:fe:bc:1d:b2:e3:8d:00:94:0f:37:d2:7d:41:34:4d:99:3e:73:4b:99:d5:65:6d:97:78:d4:d8:14:36:24 +-----BEGIN CERTIFICATE----- +MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBM +MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD +QTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBM +MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD +QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/E +jG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWo +ePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GI +ULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapu +Ob7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUg +AKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7 +HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEA +uI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQa +TOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTg +xSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1q +CjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5x +O/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs +6GAqm4VKQPNriiTsBhYscw== +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=Secure Certificate Services O=Comodo CA Limited +# Subject: CN=Secure Certificate Services O=Comodo CA Limited +# Label: "Comodo Secure Services root" +# Serial: 1 +# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd +# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1 +# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8 +-----BEGIN CERTIFICATE----- +MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp +ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow +fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV +BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM +cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S +HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996 +CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk +3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz +6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV +HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud +EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv +Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw +Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww +DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0 +5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj +Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI +gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ +aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl +izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk= +-----END CERTIFICATE----- + +# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited +# Subject: CN=Trusted Certificate Services O=Comodo CA Limited +# Label: "Comodo Trusted Services root" +# Serial: 1 +# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27 +# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd +# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 +aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla +MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO +BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD +VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW +fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt +TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL +fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW +1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 +kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G +A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v +ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo +dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu +Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ +HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 +pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS +jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ +xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn +dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Label: "QuoVadis Root CA" +# Serial: 985026699 +# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 +# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 +# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 +-----BEGIN CERTIFICATE----- +MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz +MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw +IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR +dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp +li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D +rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ +WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug +F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU +xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC +Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv +dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw +ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl +IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh +c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy +ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh +Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI +KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T +KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq +y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p +dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD +VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk +fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 +7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R +cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y +mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW +xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK +SnQ2+Q== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=Sonera Class2 CA O=Sonera +# Subject: CN=Sonera Class2 CA O=Sonera +# Label: "Sonera Class 2 Root CA" +# Serial: 29 +# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb +# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 +# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 +-----BEGIN CERTIFICATE----- +MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP +MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx +MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV +BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o +Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt +5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s +3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej +vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu +8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw +DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG +MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil +zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ +3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD +FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 +Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 +ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M +-----END CERTIFICATE----- + +# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com +# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com +# Label: "UTN USERFirst Hardware Root CA" +# Serial: 91374294542884704022267039221184531197 +# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39 +# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7 +# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37 +-----BEGIN CERTIFICATE----- +MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB +lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt +SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG +A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe +MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v +d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh +cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn +0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ +M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a +MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd +oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI +DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy +oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD +VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 +dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy +bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF +BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM +//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli +CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE +CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t +3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS +KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== +-----END CERTIFICATE----- + +# Issuer: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org +# Subject: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org +# Label: "Camerfirma Chambers of Commerce Root" +# Serial: 0 +# MD5 Fingerprint: b0:01:ee:14:d9:af:29:18:94:76:8e:f1:69:33:2a:84 +# SHA1 Fingerprint: 6e:3a:55:a4:19:0c:19:5c:93:84:3c:c0:db:72:2e:31:30:61:f0:b1 +# SHA256 Fingerprint: 0c:25:8a:12:a5:67:4a:ef:25:f2:8b:a7:dc:fa:ec:ee:a3:48:e5:41:e6:f5:cc:4e:e6:3b:71:b3:61:60:6a:c3 +-----BEGIN CERTIFICATE----- +MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEn +MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL +ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMg +b2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRa +MH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBB +ODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIw +IAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0B +AQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtb +unXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0d +BmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq +7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM3 +0pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyX +roDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIG +A1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5j +aGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p +26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIA +BzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1Ud +EgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBN +BgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz +aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEB +AAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZd +p0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi +1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEc +XCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0 +eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfu +tGWaIZDgqtCYvDi1czyL+Nw= +-----END CERTIFICATE----- + +# Issuer: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org +# Subject: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org +# Label: "Camerfirma Global Chambersign Root" +# Serial: 0 +# MD5 Fingerprint: c5:e6:7b:bf:06:d0:4f:43:ed:c4:7a:65:8a:fb:6b:19 +# SHA1 Fingerprint: 33:9b:6b:14:50:24:9b:55:7a:01:87:72:84:d9:e0:2f:c3:d2:d8:e9 +# SHA256 Fingerprint: ef:3c:b4:17:fc:8e:bf:6f:97:87:6c:9e:4e:ce:39:de:1e:a5:fe:64:91:41:d1:02:8b:7d:11:c0:b2:29:8c:ed +-----BEGIN CERTIFICATE----- +MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEn +MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL +ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENo +YW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9 +MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgy +NzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4G +A1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUA +A4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0 +Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/s +QJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpV +eAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795 +B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWh +z0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0T +AQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1i +ZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4w +TcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAH +MCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYD +VR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAE +VDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh +bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0B +AQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUM +bKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXi +ryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWG +VwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3c +ecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/ +AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Label: "StartCom Certification Authority" +# Serial: 1 +# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16 +# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f +# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea +-----BEGIN CERTIFICATE----- +MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg +Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9 +MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi +U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh +cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk +pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf +OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C +Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT +Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi +HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM +Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w ++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ +Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 +Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B +26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID +AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE +FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j +ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js +LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM +BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0 +Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy +dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh +cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh +YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg +dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp +bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ +YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT +TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ +9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8 +jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW +FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz +ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1 +ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L +EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu +L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq +yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC +O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V +um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh +NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14= +-----END CERTIFICATE----- + +# Issuer: O=Government Root Certification Authority +# Subject: O=Government Root Certification Authority +# Label: "Taiwan GRCA" +# Serial: 42023070807708724159991140556527066870 +# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e +# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 +# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ +MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow +PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR +IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q +gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy +yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts +F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 +jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx +ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC +VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK +YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH +EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN +Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud +DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE +MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK +UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ +TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf +qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK +ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE +JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 +hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 +EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm +nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX +udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz +ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe +LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl +pYYsfPQS +-----END CERTIFICATE----- + +# Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services +# Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services +# Label: "Swisscom Root CA 1" +# Serial: 122348795730808398873664200247279986742 +# MD5 Fingerprint: f8:38:7c:77:88:df:2c:16:68:2e:c2:e2:52:4b:b8:f9 +# SHA1 Fingerprint: 5f:3a:fc:0a:8b:64:f6:86:67:34:74:df:7e:a9:a2:fe:f9:fa:7a:51 +# SHA256 Fingerprint: 21:db:20:12:36:60:bb:2e:d4:18:20:5d:a1:1e:e7:a8:5a:65:e2:bc:6e:55:b5:af:7e:78:99:c8:a2:66:d9:2e +-----BEGIN CERTIFICATE----- +MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBk +MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0 +YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg +Q0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYT +AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp +Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9 +m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdih +FvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/ +TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3F +EzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbco +kdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBu +HYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNF +vJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo +19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjC +L3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJW +bjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNX +JLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw +FDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j +BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzc +K6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzf +ky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7Ik +Vh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqB +sfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e +3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsR +ls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ip +mXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HH +b6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksf +rK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmms +hFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0Y +zirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6 +MBr1mmz0DlP5OlvRHA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=Class 2 Primary CA O=Certplus +# Subject: CN=Class 2 Primary CA O=Certplus +# Label: "Certplus Class 2 Primary CA" +# Serial: 177770208045934040241468760488327595043 +# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b +# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb +# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb +-----BEGIN CERTIFICATE----- +MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw +PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz +cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 +MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz +IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ +ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR +VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL +kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd +EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas +H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 +HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud +DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 +QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu +Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ +AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 +yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR +FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA +ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB +kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 +l7+ijrRU +-----END CERTIFICATE----- + +# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Label: "DST Root CA X3" +# Serial: 91299735575339953335919266965803778155 +# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 +# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 +# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +# Issuer: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES +# Subject: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES +# Label: "DST ACES CA X6" +# Serial: 17771143917277623872238992636097467865 +# MD5 Fingerprint: 21:d8:4c:82:2b:99:09:33:a2:eb:14:24:8d:8e:5f:e8 +# SHA1 Fingerprint: 40:54:da:6f:1c:3f:40:74:ac:ed:0f:ec:cd:db:79:d1:53:fb:90:1d +# SHA256 Fingerprint: 76:7c:95:5a:76:41:2c:89:af:68:8e:90:a1:c7:0f:55:6c:fd:6b:60:25:db:ea:10:41:6d:7e:b6:83:1f:8c:40 +-----BEGIN CERTIFICATE----- +MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBb +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3Qx +ETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0w +MzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYD +VQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMx +FzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPu +ktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7 +gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZH +fAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4a +ahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEIT +ajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQF +MAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rk +c3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjto +dHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMt +aW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZI +hvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7Uk +QIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/ +h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq +nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpR +rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2 +9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis= +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Label: "GeoTrust Primary Certification Authority" +# Serial: 32798226551256963324313806436981982369 +# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf +# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 +# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY +MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo +R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx +MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 +AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA +ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 +7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W +kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI +mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ +KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 +6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl +4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K +oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj +UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU +AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA" +# Serial: 69529181992039203566298953787712940909 +# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 +# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 +# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" +# Serial: 33037644167568058970164719475676101450 +# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c +# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 +# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1 +# Label: "Security Communication EV RootCA1" +# Serial: 0 +# MD5 Fingerprint: 22:2d:a6:01:ea:7c:0a:f7:f0:6c:56:43:3f:77:76:d3 +# SHA1 Fingerprint: fe:b8:c4:32:dc:f9:76:9a:ce:ae:3d:d8:90:8f:fd:28:86:65:64:7d +# SHA256 Fingerprint: a2:2d:ba:68:1e:97:37:6e:2d:39:7d:72:8a:ae:3a:9b:62:96:b9:fd:ba:60:bc:2e:11:f6:47:f2:c6:75:fb:37 +-----BEGIN CERTIFICATE----- +MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMh +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIz +MloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09N +IFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11 +bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSE +RMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gO +zXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5 +bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDF +MxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1 +VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eC +OKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G +CSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HW +tWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZ +q51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDb +EJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+ +Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9O +VL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490 +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GA CA" +# Serial: 86718877871133159090080555911823548314 +# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 +# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 +# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 +-----BEGIN CERTIFICATE----- +MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB +ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly +aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl +ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w +NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G +A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD +VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX +SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR +VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 +w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF +mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg +4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 +4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw +EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx +SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 +ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 +vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa +hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi +Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ +/L7fCg0= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Label: "Deutsche Telekom Root CA 2" +# Serial: 38 +# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 +# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf +# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 +-----BEGIN CERTIFICATE----- +MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc +MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj +IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB +IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE +RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl +U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 +IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU +ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC +QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr +rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S +NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc +QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH +txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP +BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC +AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp +tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa +IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl +6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ +xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU +Cm26OWMohpLzGITY+9HPBVZkVw== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: CN=T\xdcB\u0130TAK UEKAE K\xf6k Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 - S\xfcr\xfcm 3 O=T\xfcrkiye Bilimsel ve Teknolojik Ara\u015ft\u0131rma Kurumu - T\xdcB\u0130TAK OU=Ulusal Elektronik ve Kriptoloji Ara\u015ft\u0131rma Enstit\xfcs\xfc - UEKAE/Kamu Sertifikasyon Merkezi +# Subject: CN=T\xdcB\u0130TAK UEKAE K\xf6k Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 - S\xfcr\xfcm 3 O=T\xfcrkiye Bilimsel ve Teknolojik Ara\u015ft\u0131rma Kurumu - T\xdcB\u0130TAK OU=Ulusal Elektronik ve Kriptoloji Ara\u015ft\u0131rma Enstit\xfcs\xfc - UEKAE/Kamu Sertifikasyon Merkezi +# Label: "T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3" +# Serial: 17 +# MD5 Fingerprint: ed:41:f5:8c:50:c5:2b:9c:73:e6:ee:6c:eb:c2:a8:26 +# SHA1 Fingerprint: 1b:4b:39:61:26:27:6b:64:91:a2:68:6d:d7:02:43:21:2d:1f:1d:96 +# SHA256 Fingerprint: e4:c7:34:30:d7:a5:b5:09:25:df:43:37:0a:0d:21:6e:9a:79:b9:d6:db:83:73:a0:c6:9e:b1:cc:31:c7:c5:2a +-----BEGIN CERTIFICATE----- +MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRS +MRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJp +bGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSw +VEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFy +YcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2Vy +dGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2 +ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAe +Fw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIx +GDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmls +aW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBU +QUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJh +xZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0 +aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7Zr +IFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIB +IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4h +gb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yK +O7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXO +fJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKw +lZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL +hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQID +AQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmP +NOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7t +wyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM +7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOh +gLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5n +oN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUs +yZyQ2uypQjyttgI= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=CNNIC ROOT O=CNNIC +# Subject: CN=CNNIC ROOT O=CNNIC +# Label: "CNNIC ROOT" +# Serial: 1228079105 +# MD5 Fingerprint: 21:bc:82:ab:49:c4:13:3b:4b:b2:2b:5c:6b:90:9c:19 +# SHA1 Fingerprint: 8b:af:4c:9b:1d:f0:2a:92:f7:da:12:8e:b9:1b:ac:f4:98:60:4b:6f +# SHA256 Fingerprint: e2:83:93:77:3d:a8:45:a6:79:f2:08:0c:c7:fb:44:a3:b7:a1:c3:79:2c:b7:eb:77:29:fd:cb:6a:8d:99:ae:a7 +-----BEGIN CERTIFICATE----- +MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJD +TjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2 +MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMF +Q05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IB +DwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwh +IhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6 +dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZO +V/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrC +GHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gN +v7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIB +AQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMB +Af8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO +76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnK +OOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvH +ugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7Hgvi +yJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fL +buXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj +2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G3" +# Serial: 28809105769928564313984085209975885599 +# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 +# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd +# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB +mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT +MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ +BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 +BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz ++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm +hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn +5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W +JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL +DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC +huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw +HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB +AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB +zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN +kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH +SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G +spki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G2" +# Serial: 71758320672825410020661621085256472406 +# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f +# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 +# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp +IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi +BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw +MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig +YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v +dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ +BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 +papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K +DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 +KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox +XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G3" +# Serial: 127614157056681299805556476275995414779 +# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 +# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 +# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB +rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV +BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa +Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl +LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u +MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl +ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm +gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 +YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf +b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 +9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S +zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk +OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV +HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA +2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW +oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c +KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM +m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu +MdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G2" +# Serial: 80682863203381065782177908751794619243 +# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a +# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 +# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL +MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj +KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 +MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw +NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV +BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL +So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal +tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG +CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT +qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz +rD6ogRLQy7rQkgu2npaqBA+K +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Universal Root Certification Authority" +# Serial: 85209574734084581917763752644031726877 +# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 +# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 +# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB +vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W +ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 +IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y +IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh +bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF +9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH +H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H +LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN +/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT +rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw +WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs +exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 +sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ +seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz +4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ +BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR +lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 +7M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" +# Serial: 63143484348153506665311985501458640051 +# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 +# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a +# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp +U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg +SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln +biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm +GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve +fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ +aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj +aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW +kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC +4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga +FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G2" +# Serial: 10000012 +# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a +# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 +# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f +-----BEGIN CERTIFICATE----- +MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX +DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 +qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp +uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU +Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE +pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp +5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M +UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN +GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy +5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv +6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK +eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 +B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ +BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov +L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG +SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS +CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen +5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 +IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK +gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL ++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL +vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm +bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk +N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC +Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z +ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=ACEDICOM Root O=EDICOM OU=PKI +# Subject: CN=ACEDICOM Root O=EDICOM OU=PKI +# Label: "ACEDICOM Root" +# Serial: 7029493972724711941 +# MD5 Fingerprint: 42:81:a0:e2:1c:e3:55:10:de:55:89:42:65:96:22:e6 +# SHA1 Fingerprint: e0:b4:32:2e:b2:f6:a5:68:b6:54:53:84:48:18:4a:50:36:87:43:84 +# SHA256 Fingerprint: 03:95:0f:b4:9a:53:1f:3e:19:91:94:23:98:df:a9:e0:ea:32:d7:ba:1c:dd:9b:c8:5d:b5:7e:d9:40:0b:43:4a +-----BEGIN CERTIFICATE----- +MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE +AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x +CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW +MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF +RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC +AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7 +09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7 +XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P +Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK +t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb +X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28 +MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU +fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI +2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH +K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae +ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP +BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ +MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw +RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv +bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm +fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3 +gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe +I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i +5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi +ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn +MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ +o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6 +zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN +GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt +r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK +Z05phkOTOPu220+DkdRgfks+KzgHVZhepA== +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Label: "Chambers of Commerce Root - 2008" +# Serial: 11806822484801597146 +# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 +# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c +# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 +-----BEGIN CERTIFICATE----- +MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz +IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz +MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj +dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw +EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp +MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 +28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq +VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q +DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR +5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL +ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a +Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl +UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s ++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 +Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj +ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx +hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV +HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 ++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN +YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t +L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy +ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt +IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV +HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w +DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW +PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF +5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 +glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH +FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 +pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD +xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG +tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq +jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De +fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg +OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ +d0jQ +-----END CERTIFICATE----- + +# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Label: "Global Chambersign Root - 2008" +# Serial: 14541511773111788494 +# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 +# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c +# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca +-----BEGIN CERTIFICATE----- +MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD +aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx +MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy +cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG +A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl +BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI +hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed +KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 +G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 +zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 +ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG +HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 +Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V +yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e +beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r +6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh +wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog +zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW +BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr +ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp +ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk +cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt +YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC +CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow +KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI +hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ +UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz +X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x +fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz +a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd +Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd +SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O +AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso +M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge +v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z +09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=Certinomis - Autorit\xe9 Racine O=Certinomis OU=0002 433998903 +# Subject: CN=Certinomis - Autorit\xe9 Racine O=Certinomis OU=0002 433998903 +# Label: "Certinomis - Autorit\xe9 Racine" +# Serial: 1 +# MD5 Fingerprint: 7f:30:78:8c:03:e3:ca:c9:0a:e2:c9:ea:1e:aa:55:1a +# SHA1 Fingerprint: 2e:14:da:ec:28:f0:fa:1e:8e:38:9a:4e:ab:eb:26:c0:0a:d3:83:c3 +# SHA256 Fingerprint: fc:bf:e2:88:62:06:f7:2b:27:59:3c:8b:07:02:97:e1:2d:76:9e:d1:0e:d7:93:07:05:a8:09:8e:ff:c1:4d:17 +-----BEGIN CERTIFICATE----- +MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjET +MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAk +BgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4 +Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNl +cnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0 +aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jY +F1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N +8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWe +rP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K +/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu +7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC +28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6 +lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1E +nn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB +0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql09 +5gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGj +WzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQN +jLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ +KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9s +ov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZM +OH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q +619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn +2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqj +o3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1v +nxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG +5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWq +pdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZb +dsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0 +BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: O=Trustis Limited OU=Trustis FPS Root CA +# Subject: O=Trustis Limited OU=Trustis FPS Root CA +# Label: "Trustis FPS Root CA" +# Serial: 36053640375399034304724988975563710553 +# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d +# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 +# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d +-----BEGIN CERTIFICATE----- +MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL +ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx +MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc +MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ +AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH +iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj +vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA +0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB +OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ +BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E +FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 +GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW +zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 +1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE +f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F +jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN +ZetX2fNXlrtIzYE= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Label: "StartCom Certification Authority" +# Serial: 45 +# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16 +# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0 +# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11 +-----BEGIN CERTIFICATE----- +MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg +Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9 +MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi +U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh +cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk +pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf +OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C +Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT +Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi +HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM +Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w ++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ +Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 +Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B +26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID +AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul +F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC +ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w +ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk +aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0 +YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg +c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93 +d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG +CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1 +dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF +wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS +Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst +0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc +pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl +CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF +P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK +1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm +KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE +JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ +8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm +fyWl8kgAwKQB2j8= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd. +# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd. +# Label: "StartCom Certification Authority G2" +# Serial: 59 +# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64 +# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17 +# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95 +-----BEGIN CERTIFICATE----- +MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1 +OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG +A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ +JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD +vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo +D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/ +Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW +RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK +HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN +nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM +0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i +UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9 +Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg +TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL +BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K +2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX +UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl +6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK +9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ +HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI +wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY +XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l +IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo +hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr +so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Label: "EE Certification Centre Root CA" +# Serial: 112324828676200291871926431888494945866 +# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f +# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 +# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 +-----BEGIN CERTIFICATE----- +MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 +MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 +czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG +CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy +MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl +ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS +b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy +euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO +bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw +WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d +MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE +1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ +zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB +BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF +BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV +v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG +E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u +uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW +iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v +GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= +-----END CERTIFICATE----- + +# Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. (c) Aral\u0131k 2007 +# Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. (c) Aral\u0131k 2007 +# Label: "TURKTRUST Certificate Services Provider Root 2007" +# Serial: 1 +# MD5 Fingerprint: 2b:70:20:56:86:82:a0:18:c8:07:53:12:28:70:21:72 +# SHA1 Fingerprint: f1:7f:6f:b6:31:dc:99:e3:a3:c8:7f:fe:1c:f1:81:10:88:d9:60:33 +# SHA256 Fingerprint: 97:8c:d9:66:f2:fa:a0:7b:a7:aa:95:00:d9:c0:2e:9d:77:f2:cd:ad:a6:ad:6b:a7:4a:f4:b9:1c:66:59:3c:50 +-----BEGIN CERTIFICATE----- +MIIEPTCCAyWgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvzE/MD0GA1UEAww2VMOc +UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx +c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV4wXAYDVQQKDFVUw5xS +S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg +SGl6bWV0bGVyaSBBLsWeLiAoYykgQXJhbMSxayAyMDA3MB4XDTA3MTIyNTE4Mzcx +OVoXDTE3MTIyMjE4MzcxOVowgb8xPzA9BgNVBAMMNlTDnFJLVFJVU1QgRWxla3Ry +b25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTELMAkGA1UEBhMC +VFIxDzANBgNVBAcMBkFua2FyYTFeMFwGA1UECgxVVMOcUktUUlVTVCBCaWxnaSDE +sGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7F +ni4gKGMpIEFyYWzEsWsgMjAwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAKu3PgqMyKVYFeaK7yc9SrToJdPNM8Ig3BnuiD9NYvDdE3ePYakqtdTyuTFY +KTsvP2qcb3N2Je40IIDu6rfwxArNK4aUyeNgsURSsloptJGXg9i3phQvKUmi8wUG ++7RP2qFsmmaf8EMJyupyj+sA1zU511YXRxcw9L6/P8JorzZAwan0qafoEGsIiveG +HtyaKhUG9qPw9ODHFNRRf8+0222vR5YXm3dx2KdxnSQM9pQ/hTEST7ruToK4uT6P +IzdezKKqdfcYbwnTrqdUKDT74eA7YH2gvnmJhsifLfkKS8RQouf9eRbHegsYz85M +733WB2+Y8a+xwXrXgTW4qhe04MsCAwEAAaNCMEAwHQYDVR0OBBYEFCnFkKslrxHk +Yb+j/4hhkeYO/pyBMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G +CSqGSIb3DQEBBQUAA4IBAQAQDdr4Ouwo0RSVgrESLFF6QSU2TJ/sPx+EnWVUXKgW +AkD6bho3hO9ynYYKVZ1WKKxmLNA6VpM0ByWtCLCPyA8JWcqdmBzlVPi5RX9ql2+I +aE1KBiY3iAIOtsbWcpnOa3faYjGkVh+uX4132l32iPwa2Z61gfAyuOOI0JzzaqC5 +mxRZNTZPz/OOXl0XrRWV2N2y1RVuAE6zS89mlOTgzbUF2mNXi+WzqtvALhyQRNsa +XRik7r4EW5nVcV9VZWRi1aKbBFmGyGJ353yCRWo9F7/snXUMrqNvWtMvmDb08PUZ +qxFdyKbjKlhqQgnDvZImZjINXQhVdP+MmNAKpoRq0Tl9 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Raiz del Estado Venezolano O=Sistema Nacional de Certificacion Electronica OU=Superintendencia de Servicios de Certificacion Electronica +# Subject: CN=PSCProcert O=Sistema Nacional de Certificacion Electronica OU=Proveedor de Certificados PROCERT +# Label: "PSCProcert" +# Serial: 11 +# MD5 Fingerprint: e6:24:e9:12:01:ae:0c:de:8e:85:c4:ce:a3:12:dd:ec +# SHA1 Fingerprint: 70:c1:8d:74:b4:28:81:0a:e4:fd:a5:75:d7:01:9f:99:b0:3d:50:74 +# SHA256 Fingerprint: 3c:fc:3c:14:d1:f6:84:ff:17:e3:8c:43:ca:44:0c:00:b9:67:ec:93:3e:8b:fe:06:4c:a1:d7:2c:90:f2:ad:b0 +-----BEGIN CERTIFICATE----- +MIIJhjCCB26gAwIBAgIBCzANBgkqhkiG9w0BAQsFADCCAR4xPjA8BgNVBAMTNUF1 +dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIFJhaXogZGVsIEVzdGFkbyBWZW5lem9s +YW5vMQswCQYDVQQGEwJWRTEQMA4GA1UEBxMHQ2FyYWNhczEZMBcGA1UECBMQRGlz +dHJpdG8gQ2FwaXRhbDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0 +aWZpY2FjaW9uIEVsZWN0cm9uaWNhMUMwQQYDVQQLEzpTdXBlcmludGVuZGVuY2lh +IGRlIFNlcnZpY2lvcyBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMSUwIwYJ +KoZIhvcNAQkBFhZhY3JhaXpAc3VzY2VydGUuZ29iLnZlMB4XDTEwMTIyODE2NTEw +MFoXDTIwMTIyNTIzNTk1OVowgdExJjAkBgkqhkiG9w0BCQEWF2NvbnRhY3RvQHBy +b2NlcnQubmV0LnZlMQ8wDQYDVQQHEwZDaGFjYW8xEDAOBgNVBAgTB01pcmFuZGEx +KjAoBgNVBAsTIVByb3ZlZWRvciBkZSBDZXJ0aWZpY2Fkb3MgUFJPQ0VSVDE2MDQG +A1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9u +aWNhMQswCQYDVQQGEwJWRTETMBEGA1UEAxMKUFNDUHJvY2VydDCCAiIwDQYJKoZI +hvcNAQEBBQADggIPADCCAgoCggIBANW39KOUM6FGqVVhSQ2oh3NekS1wwQYalNo9 +7BVCwfWMrmoX8Yqt/ICV6oNEolt6Vc5Pp6XVurgfoCfAUFM+jbnADrgV3NZs+J74 +BCXfgI8Qhd19L3uA3VcAZCP4bsm+lU/hdezgfl6VzbHvvnpC2Mks0+saGiKLt38G +ieU89RLAu9MLmV+QfI4tL3czkkohRqipCKzx9hEC2ZUWno0vluYC3XXCFCpa1sl9 +JcLB/KpnheLsvtF8PPqv1W7/U0HU9TI4seJfxPmOEO8GqQKJ/+MMbpfg353bIdD0 +PghpbNjU5Db4g7ayNo+c7zo3Fn2/omnXO1ty0K+qP1xmk6wKImG20qCZyFSTXai2 +0b1dCl53lKItwIKOvMoDKjSuc/HUtQy9vmebVOvh+qBa7Dh+PsHMosdEMXXqP+UH +0quhJZb25uSgXTcYOWEAM11G1ADEtMo88aKjPvM6/2kwLkDd9p+cJsmWN63nOaK/ +6mnbVSKVUyqUtd+tFjiBdWbjxywbk5yqjKPK2Ww8F22c3HxT4CAnQzb5EuE8XL1m +v6JpIzi4mWCZDlZTOpx+FIywBm/xhnaQr/2v/pDGj59/i5IjnOcVdo/Vi5QTcmn7 +K2FjiO/mpF7moxdqWEfLcU8UC17IAggmosvpr2uKGcfLFFb14dq12fy/czja+eev +bqQ34gcnAgMBAAGjggMXMIIDEzASBgNVHRMBAf8ECDAGAQH/AgEBMDcGA1UdEgQw +MC6CD3N1c2NlcnRlLmdvYi52ZaAbBgVghl4CAqASDBBSSUYtRy0yMDAwNDAzNi0w +MB0GA1UdDgQWBBRBDxk4qpl/Qguk1yeYVKIXTC1RVDCCAVAGA1UdIwSCAUcwggFD +gBStuyIdxuDSAaj9dlBSk+2YwU2u06GCASakggEiMIIBHjE+MDwGA1UEAxM1QXV0 +b3JpZGFkIGRlIENlcnRpZmljYWNpb24gUmFpeiBkZWwgRXN0YWRvIFZlbmV6b2xh +bm8xCzAJBgNVBAYTAlZFMRAwDgYDVQQHEwdDYXJhY2FzMRkwFwYDVQQIExBEaXN0 +cml0byBDYXBpdGFsMTYwNAYDVQQKEy1TaXN0ZW1hIE5hY2lvbmFsIGRlIENlcnRp +ZmljYWNpb24gRWxlY3Ryb25pY2ExQzBBBgNVBAsTOlN1cGVyaW50ZW5kZW5jaWEg +ZGUgU2VydmljaW9zIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExJTAjBgkq +hkiG9w0BCQEWFmFjcmFpekBzdXNjZXJ0ZS5nb2IudmWCAQowDgYDVR0PAQH/BAQD +AgEGME0GA1UdEQRGMESCDnByb2NlcnQubmV0LnZloBUGBWCGXgIBoAwMClBTQy0w +MDAwMDKgGwYFYIZeAgKgEgwQUklGLUotMzE2MzUzNzMtNzB2BgNVHR8EbzBtMEag +RKBChkBodHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9sY3IvQ0VSVElGSUNBRE8t +UkFJWi1TSEEzODRDUkxERVIuY3JsMCOgIaAfhh1sZGFwOi8vYWNyYWl6LnN1c2Nl +cnRlLmdvYi52ZTA3BggrBgEFBQcBAQQrMCkwJwYIKwYBBQUHMAGGG2h0dHA6Ly9v +Y3NwLnN1c2NlcnRlLmdvYi52ZTBBBgNVHSAEOjA4MDYGBmCGXgMBAjAsMCoGCCsG +AQUFBwIBFh5odHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9kcGMwDQYJKoZIhvcN +AQELBQADggIBACtZ6yKZu4SqT96QxtGGcSOeSwORR3C7wJJg7ODU523G0+1ng3dS +1fLld6c2suNUvtm7CpsR72H0xpkzmfWvADmNg7+mvTV+LFwxNG9s2/NkAZiqlCxB +3RWGymspThbASfzXg0gTB1GEMVKIu4YXx2sviiCtxQuPcD4quxtxj7mkoP3Yldmv +Wb8lK5jpY5MvYB7Eqvh39YtsL+1+LrVPQA3uvFd359m21D+VJzog1eWuq2w1n8Gh +HVnchIHuTQfiSLaeS5UtQbHh6N5+LwUeaO6/u5BlOsju6rEYNxxik6SgMexxbJHm +pHmJWhSnFFAFTKQAVzAswbVhltw+HoSvOULP5dAssSS830DD7X9jSr3hTxJkhpXz +sOfIt+FTvZLm8wyWuevo5pLtp4EJFAv8lXrPj9Y0TzYS3F7RNHXGRoAvlQSMx4bE +qCaJqD8Zm4G7UaRKhqsLEQ+xrmNTbSjq3TNWOByyrYDT13K9mmyZY+gAu0F2Bbdb +mRiKw7gSXFbPVgx96OLP7bx0R/vu0xdOIk9W/1DzLuY5poLWccret9W6aAjtmcz9 +opLLabid+Qqkpj5PkygqYWwHJgD/ll9ohri4zspV4KuxPX+Y1zMOWj3YeMLEYC/H +YvBhkdI4sPaeVdtAgAUSM84dkpvRabP/v/GSCmE1P93+hvS84Bpxs2Km +-----END CERTIFICATE----- + +# Issuer: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center +# Subject: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center +# Label: "China Internet Network Information Center EV Certificates Root" +# Serial: 1218379777 +# MD5 Fingerprint: 55:5d:63:00:97:bd:6a:97:f5:67:ab:4b:fb:6e:63:15 +# SHA1 Fingerprint: 4f:99:aa:93:fb:2b:d1:37:26:a1:99:4a:ce:7f:f0:05:f2:93:5d:1e +# SHA256 Fingerprint: 1c:01:c6:f4:db:b2:fe:fc:22:55:8b:2b:ca:32:56:3f:49:84:4a:cf:c3:2b:7b:e4:b0:ff:59:9f:9e:8c:7a:f7 +-----BEGIN CERTIFICATE----- +MIID9zCCAt+gAwIBAgIESJ8AATANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMC +Q04xMjAwBgNVBAoMKUNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24g +Q2VudGVyMUcwRQYDVQQDDD5DaGluYSBJbnRlcm5ldCBOZXR3b3JrIEluZm9ybWF0 +aW9uIENlbnRlciBFViBDZXJ0aWZpY2F0ZXMgUm9vdDAeFw0xMDA4MzEwNzExMjVa +Fw0zMDA4MzEwNzExMjVaMIGKMQswCQYDVQQGEwJDTjEyMDAGA1UECgwpQ2hpbmEg +SW50ZXJuZXQgTmV0d29yayBJbmZvcm1hdGlvbiBDZW50ZXIxRzBFBgNVBAMMPkNo +aW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyIEVWIENlcnRp +ZmljYXRlcyBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm35z +7r07eKpkQ0H1UN+U8i6yjUqORlTSIRLIOTJCBumD1Z9S7eVnAztUwYyZmczpwA// +DdmEEbK40ctb3B75aDFk4Zv6dOtouSCV98YPjUesWgbdYavi7NifFy2cyjw1l1Vx +zUOFsUcW9SxTgHbP0wBkvUCZ3czY28Sf1hNfQYOL+Q2HklY0bBoQCxfVWhyXWIQ8 +hBouXJE0bhlffxdpxWXvayHG1VA6v2G5BY3vbzQ6sm8UY78WO5upKv23KzhmBsUs +4qpnHkWnjQRmQvaPK++IIGmPMowUc9orhpFjIpryp9vOiYurXccUwVswah+xt54u +gQEC7c+WXmPbqOY4twIDAQABo2MwYTAfBgNVHSMEGDAWgBR8cks5x8DbYqVPm6oY +NJKiyoOCWTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E +FgQUfHJLOcfA22KlT5uqGDSSosqDglkwDQYJKoZIhvcNAQEFBQADggEBACrDx0M3 +j92tpLIM7twUbY8opJhJywyA6vPtI2Z1fcXTIWd50XPFtQO3WKwMVC/GVhMPMdoG +52U7HW8228gd+f2ABsqjPWYWqJ1MFn3AlUa1UeTiH9fqBk1jjZaM7+czV0I664zB +echNdn3e9rG3geCg+aF4RhcaVpjwTj2rHO3sOdwHSPdj/gauwqRcalsyiMXHM4Ws +ZkJHwlgkmeHlPuV1LI5D1l08eB6olYIpUNHRFrrvwb562bTYzB5MRuF3sTGrvSrI +zo9uoV1/A3U05K2JRVRevq4opbs/eHnrc7MKDf2+yfdWrPa37S+bISnHOLaVxATy +wy39FCqQmbkHzJ8= +-----END CERTIFICATE----- + +# Issuer: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services +# Subject: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services +# Label: "Swisscom Root CA 2" +# Serial: 40698052477090394928831521023204026294 +# MD5 Fingerprint: 5b:04:69:ec:a5:83:94:63:18:a7:86:d0:e4:f2:6e:19 +# SHA1 Fingerprint: 77:47:4f:c6:30:e4:0f:4c:47:64:3f:84:ba:b8:c6:95:4a:8a:41:ec +# SHA256 Fingerprint: f0:9b:12:2c:71:14:f4:a0:9b:d4:ea:4f:4a:99:d5:58:b4:6e:4c:25:cd:81:14:0d:29:c0:56:13:91:4c:38:41 +-----BEGIN CERTIFICATE----- +MIIF2TCCA8GgAwIBAgIQHp4o6Ejy5e/DfEoeWhhntjANBgkqhkiG9w0BAQsFADBk +MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0 +YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg +Q0EgMjAeFw0xMTA2MjQwODM4MTRaFw0zMTA2MjUwNzM4MTRaMGQxCzAJBgNVBAYT +AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp +Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAyMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlUJOhJ1R5tMJ6HJaI2nbeHCOFvEr +jw0DzpPMLgAIe6szjPTpQOYXTKueuEcUMncy3SgM3hhLX3af+Dk7/E6J2HzFZ++r +0rk0X2s682Q2zsKwzxNoysjL67XiPS4h3+os1OD5cJZM/2pYmLcX5BtS5X4HAB1f +2uY+lQS3aYg5oUFgJWFLlTloYhyxCwWJwDaCFCE/rtuh/bxvHGCGtlOUSbkrRsVP +ACu/obvLP+DHVxxX6NZp+MEkUp2IVd3Chy50I9AU/SpHWrumnf2U5NGKpV+GY3aF +y6//SSj8gO1MedK75MDvAe5QQQg1I3ArqRa0jG6F6bYRzzHdUyYb3y1aSgJA/MTA +tukxGggo5WDDH8SQjhBiYEQN7Aq+VRhxLKX0srwVYv8c474d2h5Xszx+zYIdkeNL +6yxSNLCK/RJOlrDrcH+eOfdmQrGrrFLadkBXeyq96G4DsguAhYidDMfCd7Camlf0 +uPoTXGiTOmekl9AbmbeGMktg2M7v0Ax/lZ9vh0+Hio5fCHyqW/xavqGRn1V9TrAL +acywlKinh/LTSlDcX3KwFnUey7QYYpqwpzmqm59m2I2mbJYV4+by+PGDYmy7Velh +k6M99bFXi08jsJvllGov34zflVEpYKELKeRcVVi3qPyZ7iVNTA6z00yPhOgpD/0Q +VAKFyPnlw4vP5w8CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw +FDASBgdghXQBUwIBBgdghXQBUwIBMBIGA1UdEwEB/wQIMAYBAf8CAQcwHQYDVR0O +BBYEFE0mICKJS9PVpAqhb97iEoHF8TwuMB8GA1UdIwQYMBaAFE0mICKJS9PVpAqh +b97iEoHF8TwuMA0GCSqGSIb3DQEBCwUAA4ICAQAyCrKkG8t9voJXiblqf/P0wS4R +fbgZPnm3qKhyN2abGu2sEzsOv2LwnN+ee6FTSA5BesogpxcbtnjsQJHzQq0Qw1zv +/2BZf82Fo4s9SBwlAjxnffUy6S8w5X2lejjQ82YqZh6NM4OKb3xuqFp1mrjX2lhI +REeoTPpMSQpKwhI3qEAMw8jh0FcNlzKVxzqfl9NX+Ave5XLzo9v/tdhZsnPdTSpx +srpJ9csc1fV5yJmz/MFMdOO0vSk3FQQoHt5FRnDsr7p4DooqzgB53MBfGWcsa0vv +aGgLQ+OswWIJ76bdZWGgr4RVSJFSHMYlkSrQwSIjYVmvRRGFHQEkNI/Ps/8XciAT +woCqISxxOQ7Qj1zB09GOInJGTB2Wrk9xseEFKZZZ9LuedT3PDTcNYtsmjGOpI99n +Bjx8Oto0QuFmtEYE3saWmA9LSHokMnWRn6z3aOkquVVlzl1h0ydw2Df+n7mvoC5W +t6NlUe07qxS/TFED6F+KBZvuim6c779o+sjaC+NCydAXFJy3SuCvkychVSa1ZC+N +8f+mQAWFBVzKBxlcCxMoTFh/wqXvRdpg065lYZ1Tg3TCrvJcwhbtkj6EPnNgiLx2 +9CzP0H1907he0ZESEOnN3col49XtmS++dYFLJPlFRpTJKSFTnCZFqhMX5OfNeOI5 +wSsSnqaeG8XmDtkx2Q== +-----END CERTIFICATE----- + +# Issuer: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services +# Subject: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services +# Label: "Swisscom Root EV CA 2" +# Serial: 322973295377129385374608406479535262296 +# MD5 Fingerprint: 7b:30:34:9f:dd:0a:4b:6b:35:ca:31:51:28:5d:ae:ec +# SHA1 Fingerprint: e7:a1:90:29:d3:d5:52:dc:0d:0f:c6:92:d3:ea:88:0d:15:2e:1a:6b +# SHA256 Fingerprint: d9:5f:ea:3c:a4:ee:dc:e7:4c:d7:6e:75:fc:6d:1f:f6:2c:44:1f:0f:a8:bc:77:f0:34:b1:9e:5d:b2:58:01:5d +-----BEGIN CERTIFICATE----- +MIIF4DCCA8igAwIBAgIRAPL6ZOJ0Y9ON/RAdBB92ylgwDQYJKoZIhvcNAQELBQAw +ZzELMAkGA1UEBhMCY2gxETAPBgNVBAoTCFN3aXNzY29tMSUwIwYDVQQLExxEaWdp +dGFsIENlcnRpZmljYXRlIFNlcnZpY2VzMR4wHAYDVQQDExVTd2lzc2NvbSBSb290 +IEVWIENBIDIwHhcNMTEwNjI0MDk0NTA4WhcNMzEwNjI1MDg0NTA4WjBnMQswCQYD +VQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2Vy +dGlmaWNhdGUgU2VydmljZXMxHjAcBgNVBAMTFVN3aXNzY29tIFJvb3QgRVYgQ0Eg +MjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMT3HS9X6lds93BdY7Bx +UglgRCgzo3pOCvrY6myLURYaVa5UJsTMRQdBTxB5f3HSek4/OE6zAMaVylvNwSqD +1ycfMQ4jFrclyxy0uYAyXhqdk/HoPGAsp15XGVhRXrwsVgu42O+LgrQ8uMIkqBPH +oCE2G3pXKSinLr9xJZDzRINpUKTk4RtiGZQJo/PDvO/0vezbE53PnUgJUmfANykR +HvvSEaeFGHR55E+FFOtSN+KxRdjMDUN/rhPSays/p8LiqG12W0OfvrSdsyaGOx9/ +5fLoZigWJdBLlzin5M8J0TbDC77aO0RYjb7xnglrPvMyxyuHxuxenPaHZa0zKcQv +idm5y8kDnftslFGXEBuGCxobP/YCfnvUxVFkKJ3106yDgYjTdLRZncHrYTNaRdHL +OdAGalNgHa/2+2m8atwBz735j9m9W8E6X47aD0upm50qKGsaCnw8qyIL5XctcfaC +NYGu+HuB5ur+rPQam3Rc6I8k9l2dRsQs0h4rIWqDJ2dVSqTjyDKXZpBy2uPUZC5f +46Fq9mDU5zXNysRojddxyNMkM3OxbPlq4SjbX8Y96L5V5jcb7STZDxmPX2MYWFCB +UWVv8p9+agTnNCRxunZLWB4ZvRVgRaoMEkABnRDixzgHcgplwLa7JSnaFp6LNYth +7eVxV4O1PHGf40+/fh6Bn0GXAgMBAAGjgYYwgYMwDgYDVR0PAQH/BAQDAgGGMB0G +A1UdIQQWMBQwEgYHYIV0AVMCAgYHYIV0AVMCAjASBgNVHRMBAf8ECDAGAQH/AgED +MB0GA1UdDgQWBBRF2aWBbj2ITY1x0kbBbkUe88SAnTAfBgNVHSMEGDAWgBRF2aWB +bj2ITY1x0kbBbkUe88SAnTANBgkqhkiG9w0BAQsFAAOCAgEAlDpzBp9SSzBc1P6x +XCX5145v9Ydkn+0UjrgEjihLj6p7jjm02Vj2e6E1CqGdivdj5eu9OYLU43otb98T +PLr+flaYC/NUn81ETm484T4VvwYmneTwkLbUwp4wLh/vx3rEUMfqe9pQy3omywC0 +Wqu1kx+AiYQElY2NfwmTv9SoqORjbdlk5LgpWgi/UOGED1V7XwgiG/W9mR4U9s70 +WBCCswo9GcG/W6uqmdjyMb3lOGbcWAXH7WMaLgqXfIeTK7KK4/HsGOV1timH59yL +Gn602MnTihdsfSlEvoqq9X46Lmgxk7lq2prg2+kupYTNHAq4Sgj5nPFhJpiTt3tm +7JFe3VE/23MPrQRYCd0EApUKPtN236YQHoA96M2kZNEzx5LH4k5E4wnJTsJdhw4S +nr8PyQUQ3nqjsTzyP6WqJ3mtMX0f/fwZacXduT98zca0wjAefm6S139hdlqP65VN +vBFuIXxZN5nQBrz5Bm0yFqXZaajh3DyAHmBR3NdUIR7KYndP+tiPsys6DXhyyWhB +WkdKwqPrGtcKqzwyVcgKEZzfdNbwQBUdyLmPtTbFr/giuMod89a2GQ+fYWVq6nTI +fI/DT11lgh/ZDYnadXL77/FHZxOzyNEZiCcmmpl5fx7kLD977vHeTYuWl8PVP3wb +I+2ksx0WckNLIOFZfsLorSa/ovc= +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R1 O=Disig a.s. +# Subject: CN=CA Disig Root R1 O=Disig a.s. +# Label: "CA Disig Root R1" +# Serial: 14052245610670616104 +# MD5 Fingerprint: be:ec:11:93:9a:f5:69:21:bc:d7:c1:c0:67:89:cc:2a +# SHA1 Fingerprint: 8e:1c:74:f8:a6:20:b9:e5:8a:f4:61:fa:ec:2b:47:56:51:1a:52:c6 +# SHA256 Fingerprint: f9:6f:23:f4:c3:e7:9c:07:7a:46:98:8d:5a:f5:90:06:76:a0:f0:39:cb:64:5d:d1:75:49:b2:16:c8:24:40:ce +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAMMDmu5QkG4oMA0GCSqGSIb3DQEBBQUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIxMB4XDTEyMDcxOTA5MDY1NloXDTQy +MDcxOTA5MDY1NlowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjEw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCqw3j33Jijp1pedxiy3QRk +D2P9m5YJgNXoqqXinCaUOuiZc4yd39ffg/N4T0Dhf9Kn0uXKE5Pn7cZ3Xza1lK/o +OI7bm+V8u8yN63Vz4STN5qctGS7Y1oprFOsIYgrY3LMATcMjfF9DCCMyEtztDK3A +fQ+lekLZWnDZv6fXARz2m6uOt0qGeKAeVjGu74IKgEH3G8muqzIm1Cxr7X1r5OJe +IgpFy4QxTaz+29FHuvlglzmxZcfe+5nkCiKxLU3lSCZpq+Kq8/v8kiky6bM+TR8n +oc2OuRf7JT7JbvN32g0S9l3HuzYQ1VTW8+DiR0jm3hTaYVKvJrT1cU/J19IG32PK +/yHoWQbgCNWEFVP3Q+V8xaCJmGtzxmjOZd69fwX3se72V6FglcXM6pM6vpmumwKj +rckWtc7dXpl4fho5frLABaTAgqWjR56M6ly2vGfb5ipN0gTco65F97yLnByn1tUD +3AjLLhbKXEAz6GfDLuemROoRRRw1ZS0eRWEkG4IupZ0zXWX4Qfkuy5Q/H6MMMSRE +7cderVC6xkGbrPAXZcD4XW9boAo0PO7X6oifmPmvTiT6l7Jkdtqr9O3jw2Dv1fkC +yC2fg69naQanMVXVz0tv/wQFx1isXxYb5dKj6zHbHzMVTdDypVP1y+E9Tmgt2BLd +qvLmTZtJ5cUoobqwWsagtQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUiQq0OJMa5qvum5EY+fU8PjXQ04IwDQYJKoZI +hvcNAQEFBQADggIBADKL9p1Kyb4U5YysOMo6CdQbzoaz3evUuii+Eq5FLAR0rBNR +xVgYZk2C2tXck8An4b58n1KeElb21Zyp9HWc+jcSjxyT7Ff+Bw+r1RL3D65hXlaA +SfX8MPWbTx9BLxyE04nH4toCdu0Jz2zBuByDHBb6lM19oMgY0sidbvW9adRtPTXo +HqJPYNcHKfyyo6SdbhWSVhlMCrDpfNIZTUJG7L399ldb3Zh+pE3McgODWF3vkzpB +emOqfDqo9ayk0d2iLbYq/J8BjuIQscTK5GfbVSUZP/3oNn6z4eGBrxEWi1CXYBmC +AMBrTXO40RMHPuq2MU/wQppt4hF05ZSsjYSVPCGvxdpHyN85YmLLW1AL14FABZyb +7bq2ix4Eb5YgOe2kfSnbSM6C3NQCjR0EMVrHS/BsYVLXtFHCgWzN4funodKSds+x +DzdYpPJScWc/DIh4gInByLUfkmO+p3qKViwaqKactV2zY9ATIKHrkWzQjX2v3wvk +F7mGnjixlAxYjOBVqjtjbZqJYLhkKpLGN/R+Q0O3c+gB53+XD9fyexn9GtePyfqF +a3qdnom2piiZk4hA9z7NUaPK6u95RyG1/jLix8NRb76AdPCkwzryT+lf3xkK8jsT +Q6wxpLPn6/wY1gGp8yqPNg7rtLG8t0zJa7+h89n07eLw4+1knj0vllJPgFOL +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=Certification Authority of WoSign O=WoSign CA Limited +# Subject: CN=Certification Authority of WoSign O=WoSign CA Limited +# Label: "WoSign" +# Serial: 125491772294754854453622855443212256657 +# MD5 Fingerprint: a1:f2:f9:b5:d2:c8:7a:74:b8:f3:05:f1:d7:e1:84:8d +# SHA1 Fingerprint: b9:42:94:bf:91:ea:8f:b6:4b:e6:10:97:c7:fb:00:13:59:b6:76:cb +# SHA256 Fingerprint: 4b:22:d5:a6:ae:c9:9f:3c:db:79:aa:5e:c0:68:38:47:9c:d5:ec:ba:71:64:f7:f2:2d:c1:d6:5f:63:d8:57:08 +-----BEGIN CERTIFICATE----- +MIIFdjCCA16gAwIBAgIQXmjWEXGUY1BWAGjzPsnFkTANBgkqhkiG9w0BAQUFADBV +MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxKjAoBgNV +BAMTIUNlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbjAeFw0wOTA4MDgw +MTAwMDFaFw0zOTA4MDgwMTAwMDFaMFUxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFX +b1NpZ24gQ0EgTGltaXRlZDEqMCgGA1UEAxMhQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgb2YgV29TaWduMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvcqN +rLiRFVaXe2tcesLea9mhsMMQI/qnobLMMfo+2aYpbxY94Gv4uEBf2zmoAHqLoE1U +fcIiePyOCbiohdfMlZdLdNiefvAA5A6JrkkoRBoQmTIPJYhTpA2zDxIIFgsDcScc +f+Hb0v1naMQFXQoOXXDX2JegvFNBmpGN9J42Znp+VsGQX+axaCA2pIwkLCxHC1l2 +ZjC1vt7tj/id07sBMOby8w7gLJKA84X5KIq0VC6a7fd2/BVoFutKbOsuEo/Uz/4M +x1wdC34FMr5esAkqQtXJTpCzWQ27en7N1QhatH/YHGkR+ScPewavVIMYe+HdVHpR +aG53/Ma/UkpmRqGyZxq7o093oL5d//xWC0Nyd5DKnvnyOfUNqfTq1+ezEC8wQjch +zDBwyYaYD8xYTYO7feUapTeNtqwylwA6Y3EkHp43xP901DfA4v6IRmAR3Qg/UDar +uHqklWJqbrDKaiFaafPz+x1wOZXzp26mgYmhiMU7ccqjUu6Du/2gd/Tkb+dC221K +mYo0SLwX3OSACCK28jHAPwQ+658geda4BmRkAjHXqc1S+4RFaQkAKtxVi8QGRkvA +Sh0JWzko/amrzgD5LkhLJuYwTKVYyrREgk/nkR4zw7CT/xH8gdLKH3Ep3XZPkiWv +HYG3Dy+MwwbMLyejSuQOmbp8HkUff6oZRZb9/D0CAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOFmzw7R8bNLtwYgFP6H +EtX2/vs+MA0GCSqGSIb3DQEBBQUAA4ICAQCoy3JAsnbBfnv8rWTjMnvMPLZdRtP1 +LOJwXcgu2AZ9mNELIaCJWSQBnfmvCX0KI4I01fx8cpm5o9dU9OpScA7F9dY74ToJ +MuYhOZO9sxXqT2r09Ys/L3yNWC7F4TmgPsc9SnOeQHrAK2GpZ8nzJLmzbVUsWh2e +JXLOC62qx1ViC777Y7NhRCOjy+EaDveaBk3e1CNOIZZbOVtXHS9dCF4Jef98l7VN +g64N1uajeeAz0JmWAjCnPv/So0M/BVoG6kQC2nz4SNAzqfkHx5Xh9T71XXG68pWp +dIhhWeO/yloTunK0jF02h+mmxTwTv97QRCbut+wucPrXnbes5cVAWubXbHssw1ab +R80LzvobtCHXt2a49CUwi1wNuepnsvRtrtWhnk/Yn+knArAdBtaP4/tIEp9/EaEQ +PkxROpaw0RPxx9gmrjrKkcRpnd8BKWRRb2jaFOwIQZeQjdCygPLPwj2/kWjFgGce +xGATVdVhmVd8upUPYUk6ynW8yQqTP2cOEvIo4jEbwFcW3wh8GcF+Dx+FHgo2fFt+ +J7x6v+Db9NpSvd4MVHAxkUOVyLzwPt0JfjBkUO1/AaQzZ01oT74V77D2AhGiGxMl +OtzCWfHjXEa7ZywCRuoeSKbmW9m1vFGikpbbqsY3Iqb+zCB0oy2pLmvLwIIRIbWT +ee5Ehr7XHuQe+w== +-----END CERTIFICATE----- + +# Issuer: CN=CA \u6c83\u901a\u6839\u8bc1\u4e66 O=WoSign CA Limited +# Subject: CN=CA \u6c83\u901a\u6839\u8bc1\u4e66 O=WoSign CA Limited +# Label: "WoSign China" +# Serial: 106921963437422998931660691310149453965 +# MD5 Fingerprint: 78:83:5b:52:16:76:c4:24:3b:83:78:e8:ac:da:9a:93 +# SHA1 Fingerprint: 16:32:47:8d:89:f9:21:3a:92:00:85:63:f5:a4:a7:d3:12:40:8a:d6 +# SHA256 Fingerprint: d6:f0:34:bd:94:aa:23:3f:02:97:ec:a4:24:5b:28:39:73:e4:47:aa:59:0f:31:0c:77:f4:8f:df:83:11:22:54 +-----BEGIN CERTIFICATE----- +MIIFWDCCA0CgAwIBAgIQUHBrzdgT/BtOOzNy0hFIjTANBgkqhkiG9w0BAQsFADBG +MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNV +BAMMEkNBIOayg+mAmuagueivgeS5pjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgw +MTAwMDFaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRl +ZDEbMBkGA1UEAwwSQ0Eg5rKD6YCa5qC56K+B5LmmMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA0EkhHiX8h8EqwqzbdoYGTufQdDTc7WU1/FDWiD+k8H/r +D195L4mx/bxjWDeTmzj4t1up+thxx7S8gJeNbEvxUNUqKaqoGXqW5pWOdO2XCld1 +9AXbbQs5uQF/qvbW2mzmBeCkTVL829B0txGMe41P/4eDrv8FAxNXUDf+jJZSEExf +v5RxadmWPgxDT74wwJ85dE8GRV2j1lY5aAfMh09Qd5Nx2UQIsYo06Yms25tO4dnk +UkWMLhQfkWsZHWgpLFbE4h4TV2TwYeO5Ed+w4VegG63XX9Gv2ystP9Bojg/qnw+L +NVgbExz03jWhCl3W6t8Sb8D7aQdGctyB9gQjF+BNdeFyb7Ao65vh4YOhn0pdr8yb ++gIgthhid5E7o9Vlrdx8kHccREGkSovrlXLp9glk3Kgtn3R46MGiCWOc76DbT52V +qyBPt7D3h1ymoOQ3OMdc4zUPLK2jgKLsLl3Az+2LBcLmc272idX10kaO6m1jGx6K +yX2m+Jzr5dVjhU1zZmkR/sgO9MHHZklTfuQZa/HpelmjbX7FF+Ynxu8b22/8DU0G +AbQOXDBGVWCvOGU6yke6rCzMRh+yRpY/8+0mBe53oWprfi1tWFxK1I5nuPHa1UaK +J/kR8slC/k7e3x9cxKSGhxYzoacXGKUN5AXlK8IrC6KVkLn9YDxOiT7nnO4fuwEC +AwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFOBNv9ybQV0T6GTwp+kVpOGBwboxMA0GCSqGSIb3DQEBCwUAA4ICAQBqinA4 +WbbaixjIvirTthnVZil6Xc1bL3McJk6jfW+rtylNpumlEYOnOXOvEESS5iVdT2H6 +yAa+Tkvv/vMx/sZ8cApBWNromUuWyXi8mHwCKe0JgOYKOoICKuLJL8hWGSbueBwj +/feTZU7n85iYr83d2Z5AiDEoOqsuC7CsDCT6eiaY8xJhEPRdF/d+4niXVOKM6Cm6 +jBAyvd0zaziGfjk9DgNyp115j0WKWa5bIW4xRtVZjc8VX90xJc/bYNaBRHIpAlf2 +ltTW/+op2znFuCyKGo3Oy+dCMYYFaA6eFN0AkLppRQjbbpCBhqcqBT/mhDn4t/lX +X0ykeVoQDF7Va/81XwVRHmyjdanPUIPTfPRm94KNPQx96N97qA4bLJyuQHCH2u2n +FoJavjVsIE4iYdm8UXrNemHcSxH5/mc0zy4EZmFcV5cjjPOGG0jfKq+nwf/Yjj4D +u9gqsPoUJbJRa4ZDhS4HIxaAjUz7tGM7zMN07RujHv41D198HRaG9Q7DlfEvr10l +O1Hm13ZBONFLAzkopR6RctR9q5czxNM+4Gm2KHmgCY0c0f9BckgG/Jou5yD5m6Le +ie2uPAmvylezkolwQOQvT8Jwg0DXJCxr5wkf09XHwQj02w47HAcLQxGEIYbpgNR1 +2KvxAmLBsX5VYc8T1yaw15zLKYs4SgsOkI26oQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G3" +# Serial: 10003001 +# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 +# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc +# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX +DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP +cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW +IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX +xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy +KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR +9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az +5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 +6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 +Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP +bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt +BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt +XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd +INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD +U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp +LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 +Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp +gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh +/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw +0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A +fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq +4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR +1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ +QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM +94B7IWcnMFk= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. +# Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. +# Label: "T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5" +# Serial: 156233699172481 +# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e +# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb +# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78 +-----BEGIN CERTIFICATE----- +MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE +BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn +aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg +QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg +SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0 +MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD +VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 +dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF +bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB +IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom +/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR +Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3 +4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z +5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0 +hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID +AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX +SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l +VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq +URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf +peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF +Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW ++qtB4Uu2NQvAmxU= +-----END CERTIFICATE----- + +# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Label: "Certinomis - Root CA" +# Serial: 1 +# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f +# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 +# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 +-----BEGIN CERTIFICATE----- +MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET +MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb +BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz +MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx +FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g +Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 +fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl +LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV +WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF +TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb +5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc +CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri +wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ +wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG +m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 +F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng +WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 +2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF +AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ +0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw +F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS +g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj +qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN +h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ +ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V +btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj +Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ +8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW +gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=Certification Authority of WoSign G2 O=WoSign CA Limited +# Subject: CN=Certification Authority of WoSign G2 O=WoSign CA Limited +# Label: "Certification Authority of WoSign G2" +# Serial: 142423943073812161787490648904721057092 +# MD5 Fingerprint: c8:1c:7d:19:aa:cb:71:93:f2:50:f8:52:a8:1e:ba:60 +# SHA1 Fingerprint: fb:ed:dc:90:65:b7:27:20:37:bc:55:0c:9c:56:de:bb:f2:78:94:e1 +# SHA256 Fingerprint: d4:87:a5:6f:83:b0:74:82:e8:5e:96:33:94:c1:ec:c2:c9:e5:1d:09:03:ee:94:6b:02:c3:01:58:1e:d9:9e:16 +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQayXaioidfLwPBbOxemFFRDANBgkqhkiG9w0BAQsFADBY +MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxLTArBgNV +BAMTJENlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbiBHMjAeFw0xNDEx +MDgwMDU4NThaFw00NDExMDgwMDU4NThaMFgxCzAJBgNVBAYTAkNOMRowGAYDVQQK +ExFXb1NpZ24gQ0EgTGltaXRlZDEtMCsGA1UEAxMkQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkgb2YgV29TaWduIEcyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvsXEoCKASU+/2YcRxlPhuw+9YH+v9oIOH9ywjj2X4FA8jzrvZjtFB5sg+OPX +JYY1kBaiXW8wGQiHC38Gsp1ij96vkqVg1CuAmlI/9ZqD6TRay9nVYlzmDuDfBpgO +gHzKtB0TiGsOqCR3A9DuW/PKaZE1OVbFbeP3PU9ekzgkyhjpJMuSA93MHD0JcOQg +5PGurLtzaaNjOg9FD6FKmsLRY6zLEPg95k4ot+vElbGs/V6r+kHLXZ1L3PR8du9n +fwB6jdKgGlxNIuG12t12s9R23164i5jIFFTMaxeSt+BKv0mUYQs4kI9dJGwlezt5 +2eJ+na2fmKEG/HgUYFf47oB3sQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU+mCp62XF3RYUCE4MD42b4Pdkr2cwDQYJ +KoZIhvcNAQELBQADggEBAFfDejaCnI2Y4qtAqkePx6db7XznPWZaOzG73/MWM5H8 +fHulwqZm46qwtyeYP0nXYGdnPzZPSsvxFPpahygc7Y9BMsaV+X3avXtbwrAh449G +3CE4Q3RM+zD4F3LBMvzIkRfEzFg3TgvMWvchNSiDbGAtROtSjFA9tWwS1/oJu2yy +SrHFieT801LYYRf+epSEj3m2M1m6D8QL4nCgS3gu+sif/a+RZQp4OBXllxcU3fng +LDT4ONCEIgDAFFEYKwLcMFrw6AF8NTojrwjkr6qOKEJJLvD1mTS+7Q9LGOHSJDy7 +XUe3IfKN0QqZjuNuPq1w4I+5ysxugTH2e5x6eeRncRg= +-----END CERTIFICATE----- + +# Issuer: CN=CA WoSign ECC Root O=WoSign CA Limited +# Subject: CN=CA WoSign ECC Root O=WoSign CA Limited +# Label: "CA WoSign ECC Root" +# Serial: 138625735294506723296996289575837012112 +# MD5 Fingerprint: 80:c6:53:ee:61:82:28:72:f0:ff:21:b9:17:ca:b2:20 +# SHA1 Fingerprint: d2:7a:d2:be:ed:94:c0:a1:3c:c7:25:21:ea:5d:71:be:81:19:f3:2b +# SHA256 Fingerprint: 8b:45:da:1c:06:f7:91:eb:0c:ab:f2:6b:e5:88:f5:fb:23:16:5c:2e:61:4b:f8:85:56:2d:0d:ce:50:b2:9b:02 +-----BEGIN CERTIFICATE----- +MIICCTCCAY+gAwIBAgIQaEpYcIBr8I8C+vbe6LCQkDAKBggqhkjOPQQDAzBGMQsw +CQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNVBAMT +EkNBIFdvU2lnbiBFQ0MgUm9vdDAeFw0xNDExMDgwMDU4NThaFw00NDExMDgwMDU4 +NThaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRlZDEb +MBkGA1UEAxMSQ0EgV29TaWduIEVDQyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACID +YgAE4f2OuEMkq5Z7hcK6C62N4DrjJLnSsb6IOsq/Srj57ywvr1FQPEd1bPiUt5v8 +KB7FVMxjnRZLU8HnIKvNrCXSf4/CwVqCXjCLelTOA7WRf6qU0NGKSMyCBSah1VES +1ns2o0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUqv3VWqP2h4syhf3RMluARZPzA7gwCgYIKoZIzj0EAwMDaAAwZQIxAOSkhLCB +1T2wdKyUpOgOPQB0TKGXa/kNUTyh2Tv0Daupn75OcsqF1NnstTJFGG+rrQIwfcf3 +aWMvoeGY7xMQ0Xk/0f7qO3/eVvSQsRUR2LIiFdAvwyYua/GRspBl9JrmkO5K +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=Certplus Root CA G1 O=Certplus +# Subject: CN=Certplus Root CA G1 O=Certplus +# Label: "Certplus Root CA G1" +# Serial: 1491911565779898356709731176965615564637713 +# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42 +# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66 +# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA +MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy +dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa +MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy +dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a +iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt +6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP +0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f +6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE +EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN +1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc +h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT +mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV +4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO +WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud +DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd +Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq +hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh +66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7 +/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS +S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j +2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R +Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr +RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy +6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV +V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5 +g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl +++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo= +-----END CERTIFICATE----- + +# Issuer: CN=Certplus Root CA G2 O=Certplus +# Subject: CN=Certplus Root CA G2 O=Certplus +# Label: "Certplus Root CA G2" +# Serial: 1492087096131536844209563509228951875861589 +# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31 +# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a +# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17 +-----BEGIN CERTIFICATE----- +MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x +CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs +dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x +CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs +dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat +93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x +Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P +AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj +FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG +SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch +p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal +U5ORGpOucGpnutee5WEaXw== +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust +# Subject: CN=OpenTrust Root CA G1 O=OpenTrust +# Label: "OpenTrust Root CA G1" +# Serial: 1492036577811947013770400127034825178844775 +# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da +# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e +# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4 +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA +MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w +ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw +MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU +T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b +wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX +/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0 +77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP +uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx +p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx +Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2 +TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W +G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw +vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY +EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1 +2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw +DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E +PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf +gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS +FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0 +V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P +XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I +i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t +TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91 +09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky +Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ +AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj +1oxx +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust +# Subject: CN=OpenTrust Root CA G2 O=OpenTrust +# Label: "OpenTrust Root CA G2" +# Serial: 1492012448042702096986875987676935573415441 +# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb +# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b +# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2 +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA +MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w +ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw +MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU +T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh +/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e +CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6 +1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE +FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS +gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X +G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy +YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH +vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4 +t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/ +gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3 +5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w +DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz +Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0 +nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT +RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT +wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2 +t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa +TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2 +o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU +3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA +iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f +WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM +S1IK +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust +# Subject: CN=OpenTrust Root CA G3 O=OpenTrust +# Label: "OpenTrust Root CA G3" +# Serial: 1492104908271485653071219941864171170455615 +# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24 +# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6 +# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92 +-----BEGIN CERTIFICATE----- +MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx +CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U +cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow +QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl +blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm +3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d +oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G +A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5 +DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK +BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q +j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx +4nxp5V2a+EEfOzmTk51V6s2N8fvB +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Label: "LuxTrust Global Root 2" +# Serial: 59914338225734147123941058376788110305822489521 +# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c +# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f +# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 +-----BEGIN CERTIFICATE----- +MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL +BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV +BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw +MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B +LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F +ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem +hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 +EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn +Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 +zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ +96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m +j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g +DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ +8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j +X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH +hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB +KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 +Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT ++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL +BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 +BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO +jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 +loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c +qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ +2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ +JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre +zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf +LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ +x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 +oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- +# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Secure Server CA" +# Serial: 927650371 +# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee +# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 +# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 +-----BEGIN CERTIFICATE----- +MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC +VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u +ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc +KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u +ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 +MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE +ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j +b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF +bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg +U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA +A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ +I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 +wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC +AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb +oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 +BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p +dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk +MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp +b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu +dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 +MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi +E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa +MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI +hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN +95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd +2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Label: "ValiCert Class 2 VA" +# Serial: 1 +# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 +# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 +# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy +NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY +dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 +WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS +v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v +UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu +IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC +W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Label: "NetLock Express (Class C) Root" +# Serial: 104 +# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4 +# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b +# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f +-----BEGIN CERTIFICATE----- +MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx +ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 +b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD +EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X +DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw +DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u +c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr +TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN +BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA +OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC +2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW +RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P +AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW +ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0 +YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz +b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO +ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB +IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs +b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs +ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s +YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg +a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g +SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0 +aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg +YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg +Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY +ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g +pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4 +Fp1hBWeAyNDYpQcCNJgEjTME1A== +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok +# Label: "NetLock Business (Class B) Root" +# Serial: 105 +# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6 +# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af +# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12 +-----BEGIN CERTIFICATE----- +MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx +ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 +b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD +EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05 +OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G +A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh +Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l +dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG +SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK +gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX +iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc +Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E +BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G +SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu +b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh +bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv +Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln +aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0 +IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh +c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph +biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo +ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP +UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj +YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo +dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA +bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06 +sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa +n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS +NitjrFgBazMpUIaD8QFI +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Label: "RSA Root Certificate 1" +# Serial: 1 +# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 +# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb +# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy +NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD +cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs +2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY +JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE +Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ +n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A +PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Label: "ValiCert Class 1 VA" +# Serial: 1 +# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb +# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e +# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy +NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y +LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ +TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y +TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 +LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW +I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw +nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure eBusiness CA 1" +# Serial: 4 +# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d +# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 +# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 +-----BEGIN CERTIFICATE----- +MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT +ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw +MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j +LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ +KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo +RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu +WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw +Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK +eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM +zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ +WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN +/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure Global eBusiness CA" +# Serial: 1 +# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc +# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 +# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 +-----BEGIN CERTIFICATE----- +MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT +ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw +MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj +dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l +c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC +UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc +58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ +o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr +aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA +A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA +Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv +8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Premium Server CA" +# Serial: 1 +# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a +# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a +# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 +-----BEGIN CERTIFICATE----- +MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy +dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t +MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB +MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG +A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp +b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl +cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv +bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE +VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ +ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR +uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG +9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI +hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM +pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Server CA" +# Serial: 1 +# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d +# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c +# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 +-----BEGIN CERTIFICATE----- +MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm +MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx +MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT +DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 +dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl +cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 +DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD +gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 +yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX +L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj +EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG +7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e +QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ +qdq5snUb9kLy78fyGPmJvKP/iiMucEc= +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Label: "Verisign Class 3 Public Primary Certification Authority" +# Serial: 149843929435818692848040365716851702463 +# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67 +# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2 +# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70 +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do +lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc +AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Label: "Verisign Class 3 Public Primary Certification Authority" +# Serial: 80507572722862485515306429940691309246 +# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4 +# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b +# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05 +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i +2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ +2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Label: "Verisign Class 3 Public Primary Certification Authority - G2" +# Serial: 167285380242319648451154478808036881606 +# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 +# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f +# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 +pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 +13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk +U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i +F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY +oJ2daZH9 +-----END CERTIFICATE----- + +# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Label: "GTE CyberTrust Global Root" +# Serial: 421 +# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db +# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 +# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 +-----BEGIN CERTIFICATE----- +MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD +VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv +bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv +b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV +UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU +cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds +b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH +iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS +r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 +04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r +GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 +3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P +lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ +-----END CERTIFICATE----- + +# Issuer: C=US, O=Equifax, OU=Equifax Secure Certificate Authority +# Subject: C=US, O=Equifax, OU=Equifax Secure Certificate Authority +# Label: "Equifax Secure Certificate Authority" +# Serial: 903804111 +# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4 +# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a +# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78 +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy +dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 +MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx +dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f +BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A +cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ +MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm +aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw +ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj +IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y +7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh +1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 +-----END CERTIFICATE----- diff --git a/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/DESCRIPTION.rst b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..ef4aa7b --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,13 @@ + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ + + diff --git a/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/INSTALLER b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/METADATA b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/METADATA new file mode 100644 index 0000000..7356013 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/METADATA @@ -0,0 +1,35 @@ +Metadata-Version: 2.0 +Name: cffi +Version: 1.10.0 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Dist: pycparser + + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ + + diff --git a/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/RECORD b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/RECORD new file mode 100644 index 0000000..7c14959 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/RECORD @@ -0,0 +1,42 @@ +_cffi_backend.cp36-win32.pyd,sha256=Go5Po3dC2iO2JU1oFvQuisiCbeIH8GyVYoi2_fJGg-Y,128512 +cffi/__init__.py,sha256=QcAAIPcVY5LsX041WHzW-GYObh691LT5L5FKo0yVwDI,479 +cffi/_cffi_include.h,sha256=8SuGAPe_N8n4Uv2B8aQOqp5JFG0un02SKGvy1e9tihQ,10238 +cffi/_embedding.h,sha256=c_xb0Dw0k7gq9OP6x10yrLBy1offSq5KxJphiU4p3dw,17275 +cffi/api.py,sha256=FHlxuRrwmZbheKY2HY3l1ScFHNuWUBjGu80YPhleICs,39647 +cffi/backend_ctypes.py,sha256=CcGNp1XCa7QHBihowgHu5BojpeQZ32s7tpEjFrtFtME,42078 +cffi/cffi_opcode.py,sha256=hbX3E-hmvcwzfqOhqSfH3ObpmRMNOHf_VZ--32flIEo,5459 +cffi/commontypes.py,sha256=QS4uxCDI7JhtTyjh1hlnCA-gynmaszWxJaRRLGkJa1A,2689 +cffi/cparser.py,sha256=AX4kk4BejnA8erNHzTEyeJWbX_He82MBmpIsZlkmdl8,38507 +cffi/error.py,sha256=yNDakwm_HPJ_T53ivbB7hEts2N-oBGjMLw_25aNi2E8,536 +cffi/ffiplatform.py,sha256=g-6tBT6R2aXkIDaAmni92fXI4rwiCegHU8AyD_wL3wo,3645 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/model.py,sha256=1daM9AYkCFmSMUrgbbe9KIK1jXsJPYZn_A828S-Qbv8,21103 +cffi/parse_c_type.h,sha256=BBca7ODJCzlsK_1c4HT5MFhy1wdUbyHYvKuvCvxaQZ8,5835 +cffi/recompiler.py,sha256=43UEvyl2mtigxa0laEWHLDtv2T8OTZEMj2NXComOoJU,61597 +cffi/setuptools_ext.py,sha256=07n99TzG6QAsFDhf5-cE10pN2FIzKQ-DVFV5YbnN6eA,7463 +cffi/vengine_cpy.py,sha256=Kw_Z38hrBJPUod5R517dRwAvRC7SjQ0qx8fEX1ZaFAM,41325 +cffi/vengine_gen.py,sha256=dLmNdH0MmI_Jog3PlYuCG5RIdx8_xP_RuOjRa84q_N8,26597 +cffi/verifier.py,sha256=Vk8v9fePaHkMmDc-wftJ4gowPErbacfC3soTw_rpT8U,11519 +cffi-1.10.0.dist-info/DESCRIPTION.rst,sha256=9ijQLbcqTWNF-iV0RznFiBeBCNrjArA0P-eutKUPw98,220 +cffi-1.10.0.dist-info/METADATA,sha256=c9-fyjmuNh52W-A4SeBTlgr35GZEsWW2Tskw_3nHCWM,1090 +cffi-1.10.0.dist-info/RECORD,, +cffi-1.10.0.dist-info/WHEEL,sha256=xiHTm3JxoVljPSD6nSGhq3B4VY9iUqMNXwYQ259n1PI,102 +cffi-1.10.0.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76 +cffi-1.10.0.dist-info/metadata.json,sha256=fBsfmNhS5_P6IGaL1mdGMDj8o0NZPotHZeGIB3FprRI,1112 +cffi-1.10.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi-1.10.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cffi/__pycache__/api.cpython-36.pyc,, +cffi/__pycache__/backend_ctypes.cpython-36.pyc,, +cffi/__pycache__/cffi_opcode.cpython-36.pyc,, +cffi/__pycache__/commontypes.cpython-36.pyc,, +cffi/__pycache__/cparser.cpython-36.pyc,, +cffi/__pycache__/error.cpython-36.pyc,, +cffi/__pycache__/ffiplatform.cpython-36.pyc,, +cffi/__pycache__/lock.cpython-36.pyc,, +cffi/__pycache__/model.cpython-36.pyc,, +cffi/__pycache__/recompiler.cpython-36.pyc,, +cffi/__pycache__/setuptools_ext.cpython-36.pyc,, +cffi/__pycache__/vengine_cpy.cpython-36.pyc,, +cffi/__pycache__/vengine_gen.cpython-36.pyc,, +cffi/__pycache__/verifier.cpython-36.pyc,, +cffi/__pycache__/__init__.cpython-36.pyc,, diff --git a/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/WHEEL b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/WHEEL new file mode 100644 index 0000000..7872c33 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: false +Tag: cp36-cp36m-win32 + diff --git a/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/entry_points.txt b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/entry_points.txt new file mode 100644 index 0000000..eee7e0f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules + diff --git a/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/top_level.txt b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/top_level.txt new file mode 100644 index 0000000..f645779 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi-1.10.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/RBXLegacyDiscordBot/lib/cffi/__init__.py b/RBXLegacyDiscordBot/lib/cffi/__init__.py new file mode 100644 index 0000000..e2e70fd --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/__init__.py @@ -0,0 +1,13 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing + +__version__ = "1.10.0" +__version_info__ = (1, 10, 0) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/RBXLegacyDiscordBot/lib/cffi/_cffi_include.h b/RBXLegacyDiscordBot/lib/cffi/_cffi_include.h new file mode 100644 index 0000000..a0e0990 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/_cffi_include.h @@ -0,0 +1,253 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. +*/ +#ifndef _CFFI_USE_EMBEDDING +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) +# define Py_LIMITED_API +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _CFFI_NUM_EXPORTS 26 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/RBXLegacyDiscordBot/lib/cffi/_embedding.h b/RBXLegacyDiscordBot/lib/cffi/_embedding.h new file mode 100644 index 0000000..1142183 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.10.0" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + (void(*)(const void *[]))_CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/RBXLegacyDiscordBot/lib/cffi/api.py b/RBXLegacyDiscordBot/lib/cffi/api.py new file mode 100644 index 0000000..502b376 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/api.py @@ -0,0 +1,916 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in backend.__dict__: + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + """ + self._cdef(csource, override=override, packed=packed) + + def embedding_api(self, csource, packed=False): + self._cdef(csource, packed=packed, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + assert isinstance(name, basestring) or name is None + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, python_buffer): + """Return a that points to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + """ + return self._backend.from_buffer(self.BCharA, python_buffer) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + """ + return self._backend.gcp(cdata, destructor) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from distutils.dir_util import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if name is None: + if sys.platform != "win32": + return backend.load_library(None, flags) + name = "c" # Windows: load_library(None) fails, but this works + # (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + # + if libname is not None: + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/RBXLegacyDiscordBot/lib/cffi/backend_ctypes.py b/RBXLegacyDiscordBot/lib/cffi/backend_ctypes.py new file mode 100644 index 0000000..60344ad --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/backend_ctypes.py @@ -0,0 +1,1114 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return self._value + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/RBXLegacyDiscordBot/lib/cffi/cffi_opcode.py b/RBXLegacyDiscordBot/lib/cffi/cffi_opcode.py new file mode 100644 index 0000000..0cf76c9 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/cffi_opcode.py @@ -0,0 +1,179 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 + +_NUM_PRIM = 48 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/RBXLegacyDiscordBot/lib/cffi/commontypes.py b/RBXLegacyDiscordBot/lib/cffi/commontypes.py new file mode 100644 index 0000000..8ec97c7 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/commontypes.py @@ -0,0 +1,80 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/RBXLegacyDiscordBot/lib/cffi/cparser.py b/RBXLegacyDiscordBot/lib/cffi/cparser.py new file mode 100644 index 0000000..0c8ef3f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/cparser.py @@ -0,0 +1,876 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _preprocess(csource): + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literal! + csource = _r_comment.sub(' ', csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + return csource.replace('...', ' __dotdotdot__ '), macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = ['typedef int %s;' % typename for typename in typenames] + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + csourcelines.append(csource) + csource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(csource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) and try to interpret + # it as a line number + line = None + msg = str(e) + if msg.startswith(':') and ':' in msg[1:]: + linenum = msg[1:msg.find(':',1)] + if linenum.isdigit(): + linenum = int(linenum, 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, dllexport=False): + prev_options = self._options + try: + self._options = {'override': override, + 'packed': packed, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + pass # skip pragma, only in pycparser 2.15 + else: + raise CDefError("unrecognized construct", decl) + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if s.startswith('0'): + if s.startswith('0x') or s.startswith('0X'): + return int(s, 16) + return int(s, 8) + elif '1' <= s[0] <= '9': + return int(s, 10) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and + exprnode.op == '+'): + return (self._parse_constant(exprnode.left) + + self._parse_constant(exprnode.right)) + # + if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and + exprnode.op == '-'): + return (self._parse_constant(exprnode.left) - + self._parse_constant(exprnode.right)) + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/RBXLegacyDiscordBot/lib/cffi/error.py b/RBXLegacyDiscordBot/lib/cffi/error.py new file mode 100644 index 0000000..75a63d9 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/error.py @@ -0,0 +1,20 @@ + +class FFIError(Exception): + pass + +class CDefError(Exception): + def __str__(self): + try: + line = 'line %d: ' % (self.args[1].coord.line,) + except (AttributeError, TypeError, IndexError): + line = '' + return '%s%s' % (line, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ diff --git a/RBXLegacyDiscordBot/lib/cffi/ffiplatform.py b/RBXLegacyDiscordBot/lib/cffi/ffiplatform.py new file mode 100644 index 0000000..27cdde0 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/ffiplatform.py @@ -0,0 +1,115 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + from distutils.core import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from distutils.core import Distribution + import distutils.errors, distutils.log + # + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = distutils.log.set_threshold(0) or 0 + try: + distutils.log.set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + distutils.log.set_threshold(old_level) + except (distutils.errors.CompileError, + distutils.errors.LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() diff --git a/RBXLegacyDiscordBot/lib/cffi/lock.py b/RBXLegacyDiscordBot/lib/cffi/lock.py new file mode 100644 index 0000000..db91b71 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/RBXLegacyDiscordBot/lib/cffi/model.py b/RBXLegacyDiscordBot/lib/cffi/model.py new file mode 100644 index 0000000..41bab0a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/model.py @@ -0,0 +1,601 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + pass + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = qualify(quals, " *&") + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length == '...': + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = False + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def has_anonymous_struct_fields(self): + if self.fldtypes is None: + return False + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + return True + return False + + def enumfields(self): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if name == '' and isinstance(type, StructOrUnion): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + sflags = 0 + if self.packed: + sflags = 8 # SF_PACKED + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, sflags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length == '...': + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._backend.__typecache[key] + except KeyError: + pass + except AttributeError: + # initialize the __typecache attribute, either at the module level + # if ffi._backend is a module, or at the class level if ffi._backend + # is some instance. + if isinstance(ffi._backend, types.ModuleType): + ffi._backend.__typecache = weakref.WeakValueDictionary() + else: + type(ffi._backend).__typecache = weakref.WeakValueDictionary() + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._backend.__typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/RBXLegacyDiscordBot/lib/cffi/parse_c_type.h b/RBXLegacyDiscordBot/lib/cffi/parse_c_type.h new file mode 100644 index 0000000..a01d89e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/parse_c_type.h @@ -0,0 +1,177 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 + +#define _CFFI__NUM_PRIM 48 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/RBXLegacyDiscordBot/lib/cffi/recompiler.py b/RBXLegacyDiscordBot/lib/cffi/recompiler.py new file mode 100644 index 0000000..524d6be --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/recompiler.py @@ -0,0 +1,1524 @@ +import os, sys, io +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION = "0x2601" +VERSION_EMBEDDED = "0x2701" + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in tp.enumfields(): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('#define _CFFI_PYTHON_STARTUP_CODE %s' % + (self._string_literal(self.ffi._embedding),)) + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + prnt(''.join(lines)) + version = VERSION_EMBEDDED + else: + version = VERSION + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if self._num_externpy: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)%s;' % version) + prnt(' p[1] = &_cffi_type_context;') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", %s, &_cffi_type_context);' % ( + self.module_name, version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", %s, &_cffi_type_context);' % ( + self.module_name, version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = %s," % (VERSION,)) + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, model.StructOrUnionOrEnum): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' if (datasize < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' %s = (%s)alloca((size_t)datasize);' % ( + tovar, tp.get_c_name(''))) + self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,)) + self._prnt(' if (_cffi_convert_array_from_object(' + '(char *)%s, _cffi_type(%d), %s) < 0)' % ( + tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type(): + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + for type in tp.args: + self._extra_local_variables(type, localvars) + for decl in localvars: + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' return %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + else: + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if isinstance(tp_result, model.StructOrUnion): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or tp.has_anonymous_struct_fields(): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(tp.enumfields()) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s };' % (self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _string_literal(self, s): + def _char_repr(c): + # escape with a '\' the characters '\', '"' or (for trigraphs) '?' + if c in '\\"?': return '\\' + c + if ' ' <= c < '\x7F': return c + if c == '\n': return '\\n' + return '\\%03o' % ord(c) + lines = [] + for line in s.splitlines(True) or ['']: + lines.append('"%s"' % ''.join([_char_repr(c) for c in line])) + return ' \\\n'.join(lines) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + +def _verify(ffi, module_name, preamble, *args, **kwds): + # FOR TESTS ONLY + from testing.udir import udir + import imp + assert module_name not in sys.modules, "module name conflict: %r" % ( + module_name,) + kwds.setdefault('tmpdir', str(udir)) + outputfilename = recompile(ffi, module_name, preamble, *args, **kwds) + module = imp.load_dynamic(module_name, outputfilename) + # + # hack hack hack: copy all *bound methods* from module.ffi back to the + # ffi instance. Then calls like ffi.new() will invoke module.ffi.new(). + for name in dir(module.ffi): + if not name.startswith('_'): + attr = getattr(module.ffi, name) + if attr is not getattr(ffi, name, object()): + setattr(ffi, name, attr) + def typeof_disabled(*args, **kwds): + raise NotImplementedError + ffi._typeof = typeof_disabled + for name in dir(ffi): + if not name.startswith('_') and not hasattr(module.ffi, name): + setattr(ffi, name, NotImplemented) + return module.lib diff --git a/RBXLegacyDiscordBot/lib/cffi/setuptools_ext.py b/RBXLegacyDiscordBot/lib/cffi/setuptools_ext.py new file mode 100644 index 0000000..5b0f296 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/setuptools_ext.py @@ -0,0 +1,188 @@ +import os +import sys + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from distutils.errors import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + """ + if 'py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount'): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + from distutils.core import Extension + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from distutils.dir_util import mkpath + from distutils import log + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from distutils.dir_util import mkpath + from distutils.command.build_py import build_py + from distutils.command.build_ext import build_ext + from distutils import log + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + dist.cmdclass['build_py'] = build_py_make_mod + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/RBXLegacyDiscordBot/lib/cffi/vengine_cpy.py b/RBXLegacyDiscordBot/lib/cffi/vengine_cpy.py new file mode 100644 index 0000000..af8775f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/vengine_cpy.py @@ -0,0 +1,1011 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, imp +from . import model +from .error import VerificationError + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' if (datasize < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' %s = alloca((size_t)datasize);' % (tovar,)) + self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,)) + self._prnt(' if (_cffi_convert_array_from_object(' + '(char *)%s, _cffi_type(%d), %s) < 0)' % ( + tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type(): + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + for type in tp.args: + self._extra_local_variables(type, localvars) + for decl in localvars: + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' return %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + else: + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = (tp.length == '...')) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) +# include +# endif +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/RBXLegacyDiscordBot/lib/cffi/vengine_gen.py b/RBXLegacyDiscordBot/lib/cffi/vengine_gen.py new file mode 100644 index 0000000..53cab7c --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/vengine_gen.py @@ -0,0 +1,672 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length == '...': + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) +# include +# endif +#endif +''' diff --git a/RBXLegacyDiscordBot/lib/cffi/verifier.py b/RBXLegacyDiscordBot/lib/cffi/verifier.py new file mode 100644 index 0000000..22eb6dc --- /dev/null +++ b/RBXLegacyDiscordBot/lib/cffi/verifier.py @@ -0,0 +1,317 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _hack_at_distutils(): + # Windows-only workaround for some configurations: see + # https://bugs.python.org/issue23246 (Python 2.7 with + # a specific MS compiler suite download) + if sys.platform == "win32": + try: + import setuptools # for side-effects, patches distutils + except ImportError: + pass + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join([sys.version[:3], __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + _hack_at_distutils() # backward compatibility hack + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + try: + os.makedirs(os.path.dirname(filename)) + except OSError: + pass diff --git a/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/DESCRIPTION.rst b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..c0f044d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/DESCRIPTION.rst @@ -0,0 +1,70 @@ +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.6, 2.7, or 3.3+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.6+. + +:maintainer: Dan Blanchard + + diff --git a/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/INSTALLER b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/METADATA b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/METADATA new file mode 100644 index 0000000..1427867 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/METADATA @@ -0,0 +1,96 @@ +Metadata-Version: 2.0 +Name: chardet +Version: 3.0.4 +Summary: Universal encoding detector for Python 2 and 3 +Home-page: https://github.com/chardet/chardet +Author: Daniel Blanchard +Author-email: dan.blanchard@gmail.com +License: LGPL +Keywords: encoding,i18n,xml +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Linguistic + +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.6, 2.7, or 3.3+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.6+. + +:maintainer: Dan Blanchard + + diff --git a/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/RECORD b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/RECORD new file mode 100644 index 0000000..97ddf89 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/RECORD @@ -0,0 +1,91 @@ +chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 +chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 +chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 +chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 +chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 +chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 +chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 +chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 +chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 +chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 +chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 +chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 +chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 +chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 +chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +chardet/cli/chardetect.py,sha256=YBO8L4mXo0WR6_-Fjh_8QxPBoEBNqB9oNxNrdc54AQs,2738 +chardet-3.0.4.dist-info/DESCRIPTION.rst,sha256=PQ4sBsMyKFZkjC6QpmbpLn0UtCNyeb-ZqvCGEgyZMGk,2174 +chardet-3.0.4.dist-info/METADATA,sha256=RV_2I4B1Z586DL8oVO5Kp7X5bUdQ5EuKAvNoAEF8wSw,3239 +chardet-3.0.4.dist-info/RECORD,, +chardet-3.0.4.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +chardet-3.0.4.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60 +chardet-3.0.4.dist-info/metadata.json,sha256=0htbRM18ujyGZDdfowgAqj6Hq2eQtwzwyhaEveKntgo,1375 +chardet-3.0.4.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8 +../../bin/chardetect.exe,sha256=0Hy35yJ12rn5R6DMWJTa7q6VyPOYQVgn-Lv5VTLs48w,89485 +chardet-3.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +chardet/cli/__pycache__/chardetect.cpython-36.pyc,, +chardet/cli/__pycache__/__init__.cpython-36.pyc,, +chardet/__pycache__/big5freq.cpython-36.pyc,, +chardet/__pycache__/big5prober.cpython-36.pyc,, +chardet/__pycache__/chardistribution.cpython-36.pyc,, +chardet/__pycache__/charsetgroupprober.cpython-36.pyc,, +chardet/__pycache__/charsetprober.cpython-36.pyc,, +chardet/__pycache__/codingstatemachine.cpython-36.pyc,, +chardet/__pycache__/compat.cpython-36.pyc,, +chardet/__pycache__/cp949prober.cpython-36.pyc,, +chardet/__pycache__/enums.cpython-36.pyc,, +chardet/__pycache__/escprober.cpython-36.pyc,, +chardet/__pycache__/escsm.cpython-36.pyc,, +chardet/__pycache__/eucjpprober.cpython-36.pyc,, +chardet/__pycache__/euckrfreq.cpython-36.pyc,, +chardet/__pycache__/euckrprober.cpython-36.pyc,, +chardet/__pycache__/euctwfreq.cpython-36.pyc,, +chardet/__pycache__/euctwprober.cpython-36.pyc,, +chardet/__pycache__/gb2312freq.cpython-36.pyc,, +chardet/__pycache__/gb2312prober.cpython-36.pyc,, +chardet/__pycache__/hebrewprober.cpython-36.pyc,, +chardet/__pycache__/jisfreq.cpython-36.pyc,, +chardet/__pycache__/jpcntx.cpython-36.pyc,, +chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,, +chardet/__pycache__/langcyrillicmodel.cpython-36.pyc,, +chardet/__pycache__/langgreekmodel.cpython-36.pyc,, +chardet/__pycache__/langhebrewmodel.cpython-36.pyc,, +chardet/__pycache__/langhungarianmodel.cpython-36.pyc,, +chardet/__pycache__/langthaimodel.cpython-36.pyc,, +chardet/__pycache__/langturkishmodel.cpython-36.pyc,, +chardet/__pycache__/latin1prober.cpython-36.pyc,, +chardet/__pycache__/mbcharsetprober.cpython-36.pyc,, +chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,, +chardet/__pycache__/mbcssm.cpython-36.pyc,, +chardet/__pycache__/sbcharsetprober.cpython-36.pyc,, +chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,, +chardet/__pycache__/sjisprober.cpython-36.pyc,, +chardet/__pycache__/universaldetector.cpython-36.pyc,, +chardet/__pycache__/utf8prober.cpython-36.pyc,, +chardet/__pycache__/version.cpython-36.pyc,, +chardet/__pycache__/__init__.cpython-36.pyc,, diff --git a/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/WHEEL b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/entry_points.txt b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/entry_points.txt new file mode 100644 index 0000000..a884269 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +chardetect = chardet.cli.chardetect:main + diff --git a/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/top_level.txt b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/top_level.txt new file mode 100644 index 0000000..79236f2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet-3.0.4.dist-info/top_level.txt @@ -0,0 +1 @@ +chardet diff --git a/RBXLegacyDiscordBot/lib/chardet/__init__.py b/RBXLegacyDiscordBot/lib/chardet/__init__.py new file mode 100644 index 0000000..0f9f820 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/__init__.py @@ -0,0 +1,39 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +from .compat import PY2, PY3 +from .universaldetector import UniversalDetector +from .version import __version__, VERSION + + +def detect(byte_str): + """ + Detect the encoding of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{0}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + detector = UniversalDetector() + detector.feed(byte_str) + return detector.close() diff --git a/RBXLegacyDiscordBot/lib/chardet/big5freq.py b/RBXLegacyDiscordBot/lib/chardet/big5freq.py new file mode 100644 index 0000000..38f3251 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/big5freq.py @@ -0,0 +1,386 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) + diff --git a/RBXLegacyDiscordBot/lib/chardet/big5prober.py b/RBXLegacyDiscordBot/lib/chardet/big5prober.py new file mode 100644 index 0000000..98f9970 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/RBXLegacyDiscordBot/lib/chardet/chardistribution.py b/RBXLegacyDiscordBot/lib/chardet/chardistribution.py new file mode 100644 index 0000000..c0395f4 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/chardistribution.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) + + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order = None + self._table_size = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = (self._freq_chars / ((self._total_chars - self._freq_chars) + * self.typical_distribution_ratio)) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 + else: + return -1 diff --git a/RBXLegacyDiscordBot/lib/chardet/charsetgroupprober.py b/RBXLegacyDiscordBot/lib/chardet/charsetgroupprober.py new file mode 100644 index 0000000..8b3738e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/charsetgroupprober.py @@ -0,0 +1,106 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + return self.state + elif state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug('%s not active', prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/RBXLegacyDiscordBot/lib/chardet/charsetprober.py b/RBXLegacyDiscordBot/lib/chardet/charsetprober.py new file mode 100644 index 0000000..eac4e59 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/charsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re + +from .enums import ProbingState + + +class CharSetProber(object): + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + return None + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7F])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + + for curr in range(len(buf)): + # Slice here to get bytes instead of an int with Python 3 + buf_char = buf[curr:curr + 1] + # Check if we're coming out of or entering an HTML tag + if buf_char == b'>': + in_tag = False + elif buf_char == b'<': + in_tag = True + + # If current character is not extended-ASCII and not alphabetic... + if buf_char < b'\x80' and not buf_char.isalpha(): + # ...and we're not in a tag + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b' ') + prev = curr + 1 + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/RBXLegacyDiscordBot/lib/chardet/cli/__init__.py b/RBXLegacyDiscordBot/lib/chardet/cli/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/cli/__init__.py @@ -0,0 +1 @@ + diff --git a/RBXLegacyDiscordBot/lib/chardet/cli/chardetect.py b/RBXLegacyDiscordBot/lib/chardet/cli/chardetect.py new file mode 100644 index 0000000..f0a4cc5 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/cli/chardetect.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys + +from chardet import __version__ +from chardet.compat import PY2 +from chardet.universaldetector import UniversalDetector + + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), 'ignore') + if result['encoding']: + return '{0}: {1} with confidence {2}'.format(name, result['encoding'], + result['confidence']) + else: + return '{0}: no result'.format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings") + parser.add_argument('input', + help='File whose encoding we would like to determine. \ + (default: stdin)', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument('--version', action='version', + version='%(prog)s {0}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) + + +if __name__ == '__main__': + main() diff --git a/RBXLegacyDiscordBot/lib/chardet/codingstatemachine.py b/RBXLegacyDiscordBot/lib/chardet/codingstatemachine.py new file mode 100644 index 0000000..68fba44 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/codingstatemachine.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .enums import MachineState + + +class CodingStateMachine(object): + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model['class_table'][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model['char_len_table'][byte_class] + # from byte's class and state_table, we get its next state + curr_state = (self._curr_state * self._model['class_factor'] + + byte_class) + self._curr_state = self._model['state_table'][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model['name'] + + @property + def language(self): + return self._model['language'] diff --git a/RBXLegacyDiscordBot/lib/chardet/compat.py b/RBXLegacyDiscordBot/lib/chardet/compat.py new file mode 100644 index 0000000..ddd7468 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/compat.py @@ -0,0 +1,34 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + base_str = (str, unicode) + text_type = unicode +else: + PY2 = False + PY3 = True + base_str = (bytes, str) + text_type = str diff --git a/RBXLegacyDiscordBot/lib/chardet/cp949prober.py b/RBXLegacyDiscordBot/lib/chardet/cp949prober.py new file mode 100644 index 0000000..efd793a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/RBXLegacyDiscordBot/lib/chardet/enums.py b/RBXLegacyDiscordBot/lib/chardet/enums.py new file mode 100644 index 0000000..0451207 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/enums.py @@ -0,0 +1,76 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + + +class InputState(object): + """ + This enum represents the different states a universal detector can be in. + """ + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + """ + This enum represents the different states a prober can be in. + """ + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + """ + This enum represents the different states a state machine can be in. + """ + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + """ + This enum represents the likelihood of a character following the previous one. + """ + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/RBXLegacyDiscordBot/lib/chardet/escprober.py b/RBXLegacyDiscordBot/lib/chardet/escprober.py new file mode 100644 index 0000000..c70493f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/escprober.py @@ -0,0 +1,101 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + else: + return 0.00 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm or not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/RBXLegacyDiscordBot/lib/chardet/escsm.py b/RBXLegacyDiscordBot/lib/chardet/escsm.py new file mode 100644 index 0000000..0069523 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/escsm.py @@ -0,0 +1,246 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +HZ_CLS = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_ST = ( +MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 + 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f + 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 + 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f +) + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL = {'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': "HZ-GB-2312", + 'language': 'Chinese'} + +ISO2022CN_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 + 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f +) + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': "ISO-2022-CN", + 'language': 'Chinese'} + +ISO2022JP_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 +) + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': "ISO-2022-JP", + 'language': 'Japanese'} + +ISO2022KR_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 +) + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': "ISO-2022-KR", + 'language': 'Korean'} + + diff --git a/RBXLegacyDiscordBot/lib/chardet/eucjpprober.py b/RBXLegacyDiscordBot/lib/chardet/eucjpprober.py new file mode 100644 index 0000000..20ce8f7 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/eucjpprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/RBXLegacyDiscordBot/lib/chardet/euckrfreq.py b/RBXLegacyDiscordBot/lib/chardet/euckrfreq.py new file mode 100644 index 0000000..b68078c --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/euckrfreq.py @@ -0,0 +1,195 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) + diff --git a/RBXLegacyDiscordBot/lib/chardet/euckrprober.py b/RBXLegacyDiscordBot/lib/chardet/euckrprober.py new file mode 100644 index 0000000..345a060 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/RBXLegacyDiscordBot/lib/chardet/euctwfreq.py b/RBXLegacyDiscordBot/lib/chardet/euctwfreq.py new file mode 100644 index 0000000..ed7a995 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/euctwfreq.py @@ -0,0 +1,387 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 5376 + +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +) + diff --git a/RBXLegacyDiscordBot/lib/chardet/euctwprober.py b/RBXLegacyDiscordBot/lib/chardet/euctwprober.py new file mode 100644 index 0000000..35669cc --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/euctwprober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/RBXLegacyDiscordBot/lib/chardet/gb2312freq.py b/RBXLegacyDiscordBot/lib/chardet/gb2312freq.py new file mode 100644 index 0000000..697837b --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/gb2312freq.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) + diff --git a/RBXLegacyDiscordBot/lib/chardet/gb2312prober.py b/RBXLegacyDiscordBot/lib/chardet/gb2312prober.py new file mode 100644 index 0000000..8446d2d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/gb2312prober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/RBXLegacyDiscordBot/lib/chardet/hebrewprober.py b/RBXLegacyDiscordBot/lib/chardet/hebrewprober.py new file mode 100644 index 0000000..b0e1bf4 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/hebrewprober.py @@ -0,0 +1,292 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +class HebrewProber(CharSetProber): + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xea + NORMAL_KAF = 0xeb + FINAL_MEM = 0xed + NORMAL_MEM = 0xee + FINAL_NUN = 0xef + NORMAL_NUN = 0xf0 + FINAL_PE = 0xf3 + NORMAL_PE = 0xf4 + FINAL_TSADI = 0xf5 + NORMAL_TSADI = 0xf6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = ' ' + self._before_prev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == ' ': + # We stand on a space - a word just ended + if self._before_prev != ' ': + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ((self._before_prev == ' ') and + (self.is_final(self._prev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._logical_prober.get_confidence() + - self._visual_prober.get_confidence()) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return 'Hebrew' + + @property + def state(self): + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and \ + (self._visual_prober.state == ProbingState.NOT_ME): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/RBXLegacyDiscordBot/lib/chardet/jisfreq.py b/RBXLegacyDiscordBot/lib/chardet/jisfreq.py new file mode 100644 index 0000000..83fc082 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) + + diff --git a/RBXLegacyDiscordBot/lib/chardet/jpcntx.py b/RBXLegacyDiscordBot/lib/chardet/jpcntx.py new file mode 100644 index 0000000..20044e4 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/jpcntx.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i:i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + else: + return self.DONT_KNOW + + def get_order(self, byte_str): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len + + diff --git a/RBXLegacyDiscordBot/lib/chardet/langbulgarianmodel.py b/RBXLegacyDiscordBot/lib/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..2aa4fb2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/langbulgarianmodel.py @@ -0,0 +1,228 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +# this table is modified base on win1251BulgarianCharToOrderMap, so +# only number <64 is sure valid + +Latin5_BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 +210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 + 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 + 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 +) + +win1251BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 +221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 + 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 + 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 96.9392% +# first 1024 sequences:3.0618% +# rest sequences: 0.2992% +# negative sequences: 0.0020% +BulgarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, +3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, +0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, +0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, +0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, +0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, +0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, +2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, +3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, +1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, +3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, +1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, +2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, +2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, +3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, +1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, +2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, +2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, +1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, +2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, +2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, +2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, +1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, +2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, +1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, +3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, +1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, +3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, +1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, +2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, +1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, +2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, +1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, +2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, +1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, +2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, +1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, +0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, +1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, +1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, +1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, +0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, +1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, +1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +) + +Latin5BulgarianModel = { + 'char_to_order_map': Latin5_BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Bulgairan', +} + +Win1251BulgarianModel = { + 'char_to_order_map': win1251BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Bulgarian', +} diff --git a/RBXLegacyDiscordBot/lib/chardet/langcyrillicmodel.py b/RBXLegacyDiscordBot/lib/chardet/langcyrillicmodel.py new file mode 100644 index 0000000..e5f9a1f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/langcyrillicmodel.py @@ -0,0 +1,333 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# KOI8-R language model +# Character Mapping Table: +KOI8R_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 +223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 +238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 + 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 + 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 + 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 + 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 +) + +win1251_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +) + +latin5_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +macCyrillic_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, +) + +IBM855_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, +206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, + 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, +220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, +230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, + 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, + 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, +250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, +) + +IBM866_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 97.6601% +# first 1024 sequences: 2.3389% +# rest sequences: 0.1237% +# negative sequences: 0.0009% +RussianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, +1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, +1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, +2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, +1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, +3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, +1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, +2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, +1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, +1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, +1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, +1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, +3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, +1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, +2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, +1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, +2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, +1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, +1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, +1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, +3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, +3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, +1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, +1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, +0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, +1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, +1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, +0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, +1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, +2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, +1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, +1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, +2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, +1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, +1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, +1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, +0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, +0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, +0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, +2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, +0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +) + +Koi8rModel = { + 'char_to_order_map': KOI8R_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "KOI8-R", + 'language': 'Russian', +} + +Win1251CyrillicModel = { + 'char_to_order_map': win1251_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Russian', +} + +Latin5CyrillicModel = { + 'char_to_order_map': latin5_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Russian', +} + +MacCyrillicModel = { + 'char_to_order_map': macCyrillic_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "MacCyrillic", + 'language': 'Russian', +} + +Ibm866Model = { + 'char_to_order_map': IBM866_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM866", + 'language': 'Russian', +} + +Ibm855Model = { + 'char_to_order_map': IBM855_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM855", + 'language': 'Russian', +} diff --git a/RBXLegacyDiscordBot/lib/chardet/langgreekmodel.py b/RBXLegacyDiscordBot/lib/chardet/langgreekmodel.py new file mode 100644 index 0000000..5332221 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/langgreekmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin7_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +win1253_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.2851% +# first 1024 sequences:1.7001% +# rest sequences: 0.0359% +# negative sequences: 0.0148% +GreekLangModel = ( +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, +2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, +2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, +2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, +0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, +3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, +2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, +0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, +0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, +0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, +0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, +0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, +0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, +0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, +0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, +0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, +0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, +0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, +0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, +0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, +0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, +0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, +0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, +0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, +0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin7GreekModel = { + 'char_to_order_map': Latin7_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-7", + 'language': 'Greek', +} + +Win1253GreekModel = { + 'char_to_order_map': win1253_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "windows-1253", + 'language': 'Greek', +} diff --git a/RBXLegacyDiscordBot/lib/chardet/langhebrewmodel.py b/RBXLegacyDiscordBot/lib/chardet/langhebrewmodel.py new file mode 100644 index 0000000..58f4c87 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/langhebrewmodel.py @@ -0,0 +1,200 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Simon Montagu +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Shoshannah Forbes - original C code (?) +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Windows-1255 language model +# Character Mapping Table: +WIN1255_CHAR_TO_ORDER_MAP = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 + 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 +253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 + 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 +124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, +215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, + 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, +106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, + 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, +238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, + 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, + 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.4004% +# first 1024 sequences: 1.5981% +# rest sequences: 0.087% +# negative sequences: 0.0015% +HEBREW_LANG_MODEL = ( +0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, +3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, +1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, +1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, +1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, +1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, +0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, +0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, +0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, +0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, +0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, +0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, +0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, +0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, +0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, +0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, +0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, +0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, +0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, +1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, +1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, +2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, +0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, +0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, +) + +Win1255HebrewModel = { + 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, + 'precedence_matrix': HEBREW_LANG_MODEL, + 'typical_positive_ratio': 0.984004, + 'keep_english_letter': False, + 'charset_name': "windows-1255", + 'language': 'Hebrew', +} diff --git a/RBXLegacyDiscordBot/lib/chardet/langhungarianmodel.py b/RBXLegacyDiscordBot/lib/chardet/langhungarianmodel.py new file mode 100644 index 0000000..bb7c095 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/langhungarianmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin2_HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, +175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, + 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, + 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, +245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +win1250HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, +177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, + 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, + 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, +245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 94.7368% +# first 1024 sequences:5.2623% +# rest sequences: 0.8894% +# negative sequences: 0.0009% +HungarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, +3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, +0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, +1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, +1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, +3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, +2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, +2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, +2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, +2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, +1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, +1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, +3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, +1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, +1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, +2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, +2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, +2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, +3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, +1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, +1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, +1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, +2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, +1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, +2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, +2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, +1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, +1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, +0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, +2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, +2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, +1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, +1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, +2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, +2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, +2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, +1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, +0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +) + +Latin2HungarianModel = { + 'char_to_order_map': Latin2_HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-2", + 'language': 'Hungarian', +} + +Win1250HungarianModel = { + 'char_to_order_map': win1250HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "windows-1250", + 'language': 'Hungarian', +} diff --git a/RBXLegacyDiscordBot/lib/chardet/langthaimodel.py b/RBXLegacyDiscordBot/lib/chardet/langthaimodel.py new file mode 100644 index 0000000..15f94c2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/langthaimodel.py @@ -0,0 +1,199 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# The following result for thai was collected from a limited sample (1M). + +# Character Mapping Table: +TIS620CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 +188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 +253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 + 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 +209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, +223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, +236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, + 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, + 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, + 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, + 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, + 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 92.6386% +# first 1024 sequences:7.3177% +# rest sequences: 1.0230% +# negative sequences: 0.0436% +ThaiLangModel = ( +0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, +0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, +3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, +0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, +3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, +3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, +3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, +3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, +2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, +3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, +1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, +3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, +1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, +0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, +0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, +2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, +0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, +3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, +2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, +3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, +2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, +3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, +3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, +3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, +3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, +1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, +0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, +0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, +3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, +3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, +1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, +3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, +3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, +0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, +0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, +1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, +1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, +3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, +0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, +3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, +0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, +0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, +0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, +0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, +0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, +0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, +0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, +0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, +3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, +2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, +0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, +3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, +1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, +1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +TIS620ThaiModel = { + 'char_to_order_map': TIS620CharToOrderMap, + 'precedence_matrix': ThaiLangModel, + 'typical_positive_ratio': 0.926386, + 'keep_english_letter': False, + 'charset_name': "TIS-620", + 'language': 'Thai', +} diff --git a/RBXLegacyDiscordBot/lib/chardet/langturkishmodel.py b/RBXLegacyDiscordBot/lib/chardet/langturkishmodel.py new file mode 100644 index 0000000..a427a45 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/langturkishmodel.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Özgür Baskın - Turkish Language Model +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin5_TurkishCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, + 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, +255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, + 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, +180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, +164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, +150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, + 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, +124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, + 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, + 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, + 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, +) + +TurkishLangModel = ( +3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, +3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, +3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, +3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, +3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, +3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, +2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, +3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, +1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, +3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, +3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, +2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, +2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, +3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, +0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, +3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, +3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, +0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, +1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, +3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, +1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, +3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, +0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, +3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, +1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, +1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, +2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, +2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, +3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, +1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, +0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, +3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, +0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, +3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, +1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, +2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, +0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, +3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, +0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, +0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, +3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, +0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, +0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, +3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, +0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, +3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, +0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, +0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, +3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, +0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, +3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, +0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, +0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, +0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, +0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, +0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, +0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, +1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, +0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, +0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, +3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, +0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, +2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, +2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, +0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, +0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin5TurkishModel = { + 'char_to_order_map': Latin5_TurkishCharToOrderMap, + 'precedence_matrix': TurkishLangModel, + 'typical_positive_ratio': 0.970290, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-9", + 'language': 'Turkish', +} diff --git a/RBXLegacyDiscordBot/lib/chardet/latin1prober.py b/RBXLegacyDiscordBot/lib/chardet/latin1prober.py new file mode 100644 index 0000000..7d1e8c2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/latin1prober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) + / total) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.73 + return confidence diff --git a/RBXLegacyDiscordBot/lib/chardet/mbcharsetprober.py b/RBXLegacyDiscordBot/lib/chardet/mbcharsetprober.py new file mode 100644 index 0000000..6256ecf --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/mbcharsetprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.distribution_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/RBXLegacyDiscordBot/lib/chardet/mbcsgroupprober.py b/RBXLegacyDiscordBot/lib/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..530abe7 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/RBXLegacyDiscordBot/lib/chardet/mbcssm.py b/RBXLegacyDiscordBot/lib/chardet/mbcssm.py new file mode 100644 index 0000000..8360d0f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/mbcssm.py @@ -0,0 +1,572 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +# BIG5 + +BIG5_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL = {'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': 'Big5'} + +# CP949 + +CP949_CLS = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL = {'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_CLS = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL = {'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,2,2,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff + + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL = {'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL = {'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': 'UTF-8'} diff --git a/RBXLegacyDiscordBot/lib/chardet/sbcharsetprober.py b/RBXLegacyDiscordBot/lib/chardet/sbcharsetprober.py new file mode 100644 index 0000000..0adb51d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/sbcharsetprober.py @@ -0,0 +1,132 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + else: + return self._model['charset_name'] + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + else: + return self._model.get('language') + + def feed(self, byte_str): + if not self._model['keep_english_letter']: + byte_str = self.filter_international_words(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model['char_to_order_map'] + for i, c in enumerate(byte_str): + # XXX: Order is in range 1-64, so one would think we want 0-63 here, + # but that leads to 27 more test failures than before. + order = char_to_order_map[c] + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + i = (self._last_order * self.SAMPLE_SIZE) + order + model = self._model['precedence_matrix'][i] + else: # reverse the order of the letters in the lookup + i = (order * self.SAMPLE_SIZE) + self._last_order + model = self._model['precedence_matrix'][i] + self._seq_counters[model] += 1 + self._last_order = order + + charset_name = self._model['charset_name'] + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / + self._total_seqs / self._model['typical_positive_ratio']) + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/RBXLegacyDiscordBot/lib/chardet/sbcsgroupprober.py b/RBXLegacyDiscordBot/lib/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..98e95dc --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/sbcsgroupprober.py @@ -0,0 +1,73 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .sbcharsetprober import SingleByteCharSetProber +from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, + Latin5CyrillicModel, MacCyrillicModel, + Ibm866Model, Ibm855Model) +from .langgreekmodel import Latin7GreekModel, Win1253GreekModel +from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel +# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel +from .langthaimodel import TIS620ThaiModel +from .langhebrewmodel import Win1255HebrewModel +from .hebrewprober import HebrewProber +from .langturkishmodel import Latin5TurkishModel + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + self.probers = [ + SingleByteCharSetProber(Win1251CyrillicModel), + SingleByteCharSetProber(Koi8rModel), + SingleByteCharSetProber(Latin5CyrillicModel), + SingleByteCharSetProber(MacCyrillicModel), + SingleByteCharSetProber(Ibm866Model), + SingleByteCharSetProber(Ibm855Model), + SingleByteCharSetProber(Latin7GreekModel), + SingleByteCharSetProber(Win1253GreekModel), + SingleByteCharSetProber(Latin5BulgarianModel), + SingleByteCharSetProber(Win1251BulgarianModel), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(Latin2HungarianModel), + # SingleByteCharSetProber(Win1250HungarianModel), + SingleByteCharSetProber(TIS620ThaiModel), + SingleByteCharSetProber(Latin5TurkishModel), + ] + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, + False, hebrew_prober) + visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, + hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) + self.probers.extend([hebrew_prober, logical_hebrew_prober, + visual_hebrew_prober]) + + self.reset() diff --git a/RBXLegacyDiscordBot/lib/chardet/sjisprober.py b/RBXLegacyDiscordBot/lib/chardet/sjisprober.py new file mode 100644 index 0000000..9e29623 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/sjisprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[2 - char_len:], + char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 + - char_len], char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/RBXLegacyDiscordBot/lib/chardet/universaldetector.py b/RBXLegacyDiscordBot/lib/chardet/universaldetector.py new file mode 100644 index 0000000..7b4e92d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/universaldetector.py @@ -0,0 +1,286 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re + +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + + +class UniversalDetector(object): + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') + ESC_DETECTOR = re.compile(b'(\033|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') + ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', + 'iso-8859-2': 'Windows-1250', + 'iso-8859-5': 'Windows-1251', + 'iso-8859-6': 'Windows-1256', + 'iso-8859-7': 'Windows-1253', + 'iso-8859-8': 'Windows-1255', + 'iso-8859-9': 'Windows-1254', + 'iso-8859-13': 'Windows-1257'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding': None, 'confidence': 0.0, 'language': None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not len(byte_str): + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8-SIG", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\xFE\xFF\x00\x00'): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = {'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\x00\x00\xFF\xFE'): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = {'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16", + 'confidence': 1.0, + 'language': ''} + + self._got_data = True + if self.result['encoding'] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif self._input_state == InputState.PURE_ASCII and \ + self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': + self._esc_charset_prober.charset_name, + 'confidence': + self._esc_charset_prober.get_confidence(), + 'language': + self._esc_charset_prober.language} + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': prober.charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language} + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug('no data received!') + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {'encoding': 'ascii', + 'confidence': 1.0, + 'language': ''} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + self.result = {'encoding': charset_name, + 'confidence': confidence, + 'language': max_prober.language} + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() == logging.DEBUG: + if self.result['encoding'] is None: + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + return self.result diff --git a/RBXLegacyDiscordBot/lib/chardet/utf8prober.py b/RBXLegacyDiscordBot/lib/chardet/utf8prober.py new file mode 100644 index 0000000..6c3196c --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + else: + return unlike diff --git a/RBXLegacyDiscordBot/lib/chardet/version.py b/RBXLegacyDiscordBot/lib/chardet/version.py new file mode 100644 index 0000000..bb2a34a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "3.0.4" +VERSION = __version__.split('.') diff --git a/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/PKG-INFO b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..5ad0fe8 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/PKG-INFO @@ -0,0 +1,128 @@ +Metadata-Version: 1.1 +Name: discord.py +Version: 0.16.8 +Summary: A python wrapper for the Discord API +Home-page: https://github.com/Rapptz/discord.py +Author: Rapptz +Author-email: UNKNOWN +License: MIT +Description: # discord.py + + [![PyPI](https://img.shields.io/pypi/v/discord.py.svg)](https://pypi.python.org/pypi/discord.py/) + [![PyPI](https://img.shields.io/pypi/pyversions/discord.py.svg)](https://pypi.python.org/pypi/discord.py/) + + discord.py is an API wrapper for Discord written in Python. + + This was written to allow easier writing of bots or chat logs. Make sure to familiarise yourself with the API using the [documentation][doc]. + + [doc]: http://discordpy.rtfd.org/en/latest + + ### Breaking Changes + + The discord API is constantly changing and the wrapper API is as well. There will be no effort to keep backwards compatibility in versions before `v1.0.0`. + + I recommend that you follow the discussion in the [unofficial Discord API discord channel][ch] and update your installation periodically. I will attempt to make note of breaking changes in the API channel so make sure to subscribe to library news by typing `?sub news` in the channel. + + [ch]: https://discord.gg/0SBTUU1wZTUzBx2q + + ## Installing + + To install the library without full voice support, you can just run the following command: + + ``` + python3 -m pip install -U discord.py + ``` + + Otherwise to get voice support you should run the following command: + + ``` + python3 -m pip install -U discord.py[voice] + ``` + + To install the development version, do the following: + + ``` + python3 -m pip install -U https://github.com/Rapptz/discord.py/archive/master.zip#egg=discord.py[voice] + ``` + + or the more long winded from cloned source: + + ``` + $ git clone https://github.com/Rapptz/discord.py + $ cd discord.py + $ python3 -m pip install -U .[voice] + ``` + + Please note that on Linux installing voice you must install the following packages via your favourite package manager (e.g. `apt`, `yum`, etc) before running the above command: + + - libffi-dev (or `libffi-devel` on some systems) + - python-dev (e.g. `python3.5-dev` for Python 3.5) + + ## Quick Example + + ```py + import discord + import asyncio + + client = discord.Client() + + @client.event + async def on_ready(): + print('Logged in as') + print(client.user.name) + print(client.user.id) + print('------') + + @client.event + async def on_message(message): + if message.content.startswith('!test'): + counter = 0 + tmp = await client.send_message(message.channel, 'Calculating messages...') + async for log in client.logs_from(message.channel, limit=100): + if log.author == message.author: + counter += 1 + + await client.edit_message(tmp, 'You have {} messages.'.format(counter)) + elif message.content.startswith('!sleep'): + await asyncio.sleep(5) + await client.send_message(message.channel, 'Done sleeping') + + client.run('token') + ``` + + Note that in Python 3.4 you use `@asyncio.coroutine` instead of `async def` and `yield from` instead of `await`. + + You can find examples in the examples directory. + + ## Requirements + + - Python 3.4.2+ + - `aiohttp` library + - `websockets` library + - `PyNaCl` library (optional, for voice only) + - On Linux systems this requires the `libffi` library. You can install in + debian based systems by doing `sudo apt-get install libffi-dev`. + + Usually `pip` will handle these for you. + + ## Related Projects + + - [discord.js](https://github.com/discord-js/discord.js) + - [discord.io](https://github.com/izy521/discord.io) + - [Discord.NET](https://github.com/RogueException/Discord.Net) + - [DiscordSharp](https://github.com/Luigifan/DiscordSharp) + - [Discord4J](https://github.com/knobody/Discord4J) + - [discordrb](https://github.com/meew0/discordrb) + +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities diff --git a/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/SOURCES.txt b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..905b03d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,49 @@ +LICENSE +MANIFEST.in +README.md +requirements.txt +discord/__init__.py +discord/calls.py +discord/channel.py +discord/client.py +discord/colour.py +discord/compat.py +discord/embeds.py +discord/emoji.py +discord/enums.py +discord/errors.py +discord/game.py +discord/gateway.py +discord/http.py +discord/invite.py +discord/iterators.py +discord/member.py +discord/message.py +discord/mixins.py +discord/object.py +discord/opus.py +discord/permissions.py +discord/reaction.py +discord/role.py +discord/server.py +discord/state.py +discord/user.py +discord/utils.py +discord/voice_client.py +discord.py.egg-info/PKG-INFO +discord.py.egg-info/SOURCES.txt +discord.py.egg-info/dependency_links.txt +discord.py.egg-info/requires.txt +discord.py.egg-info/top_level.txt +discord/bin/libopus-0.x64.dll +discord/bin/libopus-0.x86.dll +discord/ext/__init__.py +discord/ext/commands/__init__.py +discord/ext/commands/bot.py +discord/ext/commands/context.py +discord/ext/commands/converter.py +discord/ext/commands/cooldowns.py +discord/ext/commands/core.py +discord/ext/commands/errors.py +discord/ext/commands/formatter.py +discord/ext/commands/view.py \ No newline at end of file diff --git a/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/dependency_links.txt b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/installed-files.txt b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..86ee53a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/installed-files.txt @@ -0,0 +1,83 @@ +..\discord\calls.py +..\discord\channel.py +..\discord\client.py +..\discord\colour.py +..\discord\compat.py +..\discord\embeds.py +..\discord\emoji.py +..\discord\enums.py +..\discord\errors.py +..\discord\game.py +..\discord\gateway.py +..\discord\http.py +..\discord\invite.py +..\discord\iterators.py +..\discord\member.py +..\discord\message.py +..\discord\mixins.py +..\discord\object.py +..\discord\opus.py +..\discord\permissions.py +..\discord\reaction.py +..\discord\role.py +..\discord\server.py +..\discord\state.py +..\discord\user.py +..\discord\utils.py +..\discord\voice_client.py +..\discord\__init__.py +..\discord\ext\__init__.py +..\discord\ext\commands\bot.py +..\discord\ext\commands\context.py +..\discord\ext\commands\converter.py +..\discord\ext\commands\cooldowns.py +..\discord\ext\commands\core.py +..\discord\ext\commands\errors.py +..\discord\ext\commands\formatter.py +..\discord\ext\commands\view.py +..\discord\ext\commands\__init__.py +..\discord\bin\libopus-0.x64.dll +..\discord\bin\libopus-0.x86.dll +..\discord\__pycache__\calls.cpython-36.pyc +..\discord\__pycache__\channel.cpython-36.pyc +..\discord\__pycache__\client.cpython-36.pyc +..\discord\__pycache__\colour.cpython-36.pyc +..\discord\__pycache__\compat.cpython-36.pyc +..\discord\__pycache__\embeds.cpython-36.pyc +..\discord\__pycache__\emoji.cpython-36.pyc +..\discord\__pycache__\enums.cpython-36.pyc +..\discord\__pycache__\errors.cpython-36.pyc +..\discord\__pycache__\game.cpython-36.pyc +..\discord\__pycache__\gateway.cpython-36.pyc +..\discord\__pycache__\http.cpython-36.pyc +..\discord\__pycache__\invite.cpython-36.pyc +..\discord\__pycache__\iterators.cpython-36.pyc +..\discord\__pycache__\member.cpython-36.pyc +..\discord\__pycache__\message.cpython-36.pyc +..\discord\__pycache__\mixins.cpython-36.pyc +..\discord\__pycache__\object.cpython-36.pyc +..\discord\__pycache__\opus.cpython-36.pyc +..\discord\__pycache__\permissions.cpython-36.pyc +..\discord\__pycache__\reaction.cpython-36.pyc +..\discord\__pycache__\role.cpython-36.pyc +..\discord\__pycache__\server.cpython-36.pyc +..\discord\__pycache__\state.cpython-36.pyc +..\discord\__pycache__\user.cpython-36.pyc +..\discord\__pycache__\utils.cpython-36.pyc +..\discord\__pycache__\voice_client.cpython-36.pyc +..\discord\__pycache__\__init__.cpython-36.pyc +..\discord\ext\__pycache__\__init__.cpython-36.pyc +..\discord\ext\commands\__pycache__\bot.cpython-36.pyc +..\discord\ext\commands\__pycache__\context.cpython-36.pyc +..\discord\ext\commands\__pycache__\converter.cpython-36.pyc +..\discord\ext\commands\__pycache__\cooldowns.cpython-36.pyc +..\discord\ext\commands\__pycache__\core.cpython-36.pyc +..\discord\ext\commands\__pycache__\errors.cpython-36.pyc +..\discord\ext\commands\__pycache__\formatter.cpython-36.pyc +..\discord\ext\commands\__pycache__\view.cpython-36.pyc +..\discord\ext\commands\__pycache__\__init__.cpython-36.pyc +dependency_links.txt +PKG-INFO +requires.txt +SOURCES.txt +top_level.txt diff --git a/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/requires.txt b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..eda5bcf --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/requires.txt @@ -0,0 +1,5 @@ +aiohttp>=1.0.0,<1.1.0 +websockets>=3.1,<4.0 + +[voice] +PyNaCl==1.0.1 diff --git a/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/top_level.txt b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..e46fba2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord.py-0.16.8-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +discord diff --git a/RBXLegacyDiscordBot/lib/discord/__init__.py b/RBXLegacyDiscordBot/lib/discord/__init__.py new file mode 100644 index 0000000..c2e0244 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/__init__.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- + +""" +Discord API Wrapper +~~~~~~~~~~~~~~~~~~~ + +A basic wrapper for the Discord API. + +:copyright: (c) 2015-2016 Rapptz +:license: MIT, see LICENSE for more details. + +""" + +__title__ = 'discord' +__author__ = 'Rapptz' +__license__ = 'MIT' +__copyright__ = 'Copyright 2015-2016 Rapptz' +__version__ = '0.16.8' + +from .client import Client, AppInfo, ChannelPermissions +from .user import User +from .game import Game +from .emoji import Emoji +from .channel import Channel, PrivateChannel +from .server import Server +from .member import Member, VoiceState +from .message import Message +from .errors import * +from .calls import CallMessage, GroupCall +from .permissions import Permissions, PermissionOverwrite +from .role import Role +from .colour import Color, Colour +from .invite import Invite +from .object import Object +from .reaction import Reaction +from . import utils, opus, compat +from .voice_client import VoiceClient +from .enums import ChannelType, ServerRegion, Status, MessageType, VerificationLevel +from collections import namedtuple +from .embeds import Embed + +import logging + +VersionInfo = namedtuple('VersionInfo', 'major minor micro releaselevel serial') + +version_info = VersionInfo(major=0, minor=16, micro=8, releaselevel='final', serial=0) + +try: + from logging import NullHandler +except ImportError: + class NullHandler(logging.Handler): + def emit(self, record): + pass + +logging.getLogger(__name__).addHandler(NullHandler()) diff --git a/RBXLegacyDiscordBot/lib/discord/calls.py b/RBXLegacyDiscordBot/lib/discord/calls.py new file mode 100644 index 0000000..40df55a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/calls.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from . import utils +import datetime +from .enums import ServerRegion, try_enum +from .member import VoiceState + +class CallMessage: + """Represents a group call message from Discord. + + This is only received in cases where the message type is equivalent to + :attr:`MessageType.call`. + + Attributes + ----------- + ended_timestamp: Optional[datetime.datetime] + A naive UTC datetime object that represents the time that the call has ended. + participants: List[:class:`User`] + The list of users that are participating in this call. + message: :class:`Message` + The message associated with this call message. + """ + + def __init__(self, message, **kwargs): + self.message = message + self.ended_timestamp = utils.parse_time(kwargs.get('ended_timestamp')) + self.participants = kwargs.get('participants') + + @property + def call_ended(self): + """bool: Indicates if the call has ended.""" + return self.ended_timestamp is not None + + @property + def channel(self): + """:class:`PrivateChannel`\: The private channel associated with this message.""" + return self.message.channel + + @property + def duration(self): + """Queries the duration of the call. + + If the call has not ended then the current duration will + be returned. + + Returns + --------- + datetime.timedelta + The timedelta object representing the duration. + """ + if self.ended_timestamp is None: + return datetime.datetime.utcnow() - self.message.timestamp + else: + return self.ended_timestamp - self.message.timestamp + +class GroupCall: + """Represents the actual group call from Discord. + + This is accompanied with a :class:`CallMessage` denoting the information. + + Attributes + ----------- + call: :class:`CallMessage` + The call message associated with this group call. + unavailable: bool + Denotes if this group call is unavailable. + ringing: List[:class:`User`] + A list of users that are currently being rung to join the call. + region: :class:`ServerRegion` + The server region the group call is being hosted on. + """ + + def __init__(self, **kwargs): + self.call = kwargs.get('call') + self.unavailable = kwargs.get('unavailable') + self._voice_states = {} + + for state in kwargs.get('voice_states', []): + self._update_voice_state(state) + + self._update(**kwargs) + + def _update(self, **kwargs): + self.region = try_enum(ServerRegion, kwargs.get('region')) + lookup = {u.id: u for u in self.call.channel.recipients} + me = self.call.channel.me + lookup[me.id] = me + self.ringing = list(filter(None, map(lambda i: lookup.get(i), kwargs.get('ringing', [])))) + + def _update_voice_state(self, data): + user_id = data['user_id'] + # left the voice channel? + if data['channel_id'] is None: + self._voice_states.pop(user_id, None) + else: + data['voice_channel'] = self.channel + self._voice_states[user_id] = VoiceState(**data) + + @property + def connected(self): + """A property that returns the list of :class:`User` that are currently in this call.""" + ret = [u for u in self.channel.recipients if self.voice_state_for(u) is not None] + me = self.channel.me + if self.voice_state_for(me) is not None: + ret.append(me) + + return ret + + @property + def channel(self): + """:class:`PrivateChannel`\: Returns the channel the group call is in.""" + return self.call.channel + + def voice_state_for(self, user): + """Retrieves the :class:`VoiceState` for a specified :class:`User`. + + If the :class:`User` has no voice state then this function returns + ``None``. + + Parameters + ------------ + user: :class:`User` + The user to retrieve the voice state for. + + Returns + -------- + Optiona[:class:`VoiceState`] + The voice state associated with this user. + """ + + return self._voice_states.get(user.id) + diff --git a/RBXLegacyDiscordBot/lib/discord/channel.py b/RBXLegacyDiscordBot/lib/discord/channel.py new file mode 100644 index 0000000..95aaaea --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/channel.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import copy +from . import utils +from .permissions import Permissions, PermissionOverwrite +from .enums import ChannelType +from collections import namedtuple +from .mixins import Hashable +from .role import Role +from .user import User +from .member import Member + +Overwrites = namedtuple('Overwrites', 'id allow deny type') + +class Channel(Hashable): + """Represents a Discord server channel. + + Supported Operations: + + +-----------+---------------------------------------+ + | Operation | Description | + +===========+=======================================+ + | x == y | Checks if two channels are equal. | + +-----------+---------------------------------------+ + | x != y | Checks if two channels are not equal. | + +-----------+---------------------------------------+ + | hash(x) | Returns the channel's hash. | + +-----------+---------------------------------------+ + | str(x) | Returns the channel's name. | + +-----------+---------------------------------------+ + + Attributes + ----------- + name : str + The channel name. + server : :class:`Server` + The server the channel belongs to. + id : str + The channel ID. + topic : Optional[str] + The channel's topic. None if it doesn't exist. + is_private : bool + ``True`` if the channel is a private channel (i.e. PM). ``False`` in this case. + position : int + The position in the channel list. This is a number that starts at 0. e.g. the + top channel is position 0. The position varies depending on being a voice channel + or a text channel, so a 0 position voice channel is on top of the voice channel + list. + type : :class:`ChannelType` + The channel type. There is a chance that the type will be ``str`` if + the channel type is not within the ones recognised by the enumerator. + bitrate : int + The channel's preferred audio bitrate in bits per second. + voice_members + A list of :class:`Members` that are currently inside this voice channel. + If :attr:`type` is not :attr:`ChannelType.voice` then this is always an empty array. + user_limit : int + The channel's limit for number of members that can be in a voice channel. + """ + + __slots__ = [ 'voice_members', 'name', 'id', 'server', 'topic', 'position', + 'is_private', 'type', 'bitrate', 'user_limit', + '_permission_overwrites' ] + + def __init__(self, **kwargs): + self._update(**kwargs) + self.voice_members = [] + + def __str__(self): + return self.name + + def _update(self, **kwargs): + self.name = kwargs.get('name') + self.server = kwargs.get('server') + self.id = kwargs.get('id') + self.topic = kwargs.get('topic') + self.is_private = False + self.position = kwargs.get('position') + self.bitrate = kwargs.get('bitrate') + self.type = kwargs.get('type') + self.user_limit = kwargs.get('user_limit') + try: + self.type = ChannelType(self.type) + except: + pass + + self._permission_overwrites = [] + everyone_index = 0 + everyone_id = self.server.id + + for index, overridden in enumerate(kwargs.get('permission_overwrites', [])): + overridden_id = overridden['id'] + self._permission_overwrites.append(Overwrites(**overridden)) + + if overridden.get('type') == 'member': + continue + + if overridden_id == everyone_id: + # the @everyone role is not guaranteed to be the first one + # in the list of permission overwrites, however the permission + # resolution code kind of requires that it is the first one in + # the list since it is special. So we need the index so we can + # swap it to be the first one. + everyone_index = index + + # do the swap + tmp = self._permission_overwrites + if tmp: + tmp[everyone_index], tmp[0] = tmp[0], tmp[everyone_index] + + @property + def changed_roles(self): + """Returns a list of :class:`Roles` that have been overridden from + their default values in the :attr:`Server.roles` attribute.""" + ret = [] + for overwrite in filter(lambda o: o.type == 'role', self._permission_overwrites): + role = utils.get(self.server.roles, id=overwrite.id) + if role is None: + continue + + role = copy.copy(role) + role.permissions.handle_overwrite(overwrite.allow, overwrite.deny) + ret.append(role) + return ret + + @property + def is_default(self): + """bool : Indicates if this is the default channel for the :class:`Server` it belongs to.""" + return self.server.id == self.id + + @property + def mention(self): + """str : The string that allows you to mention the channel.""" + return '<#{0.id}>'.format(self) + + @property + def created_at(self): + """Returns the channel's creation time in UTC.""" + return utils.snowflake_time(self.id) + + def overwrites_for(self, obj): + """Returns the channel-specific overwrites for a member or a role. + + Parameters + ----------- + obj + The :class:`Role` or :class:`Member` or :class:`Object` denoting + whose overwrite to get. + + Returns + --------- + :class:`PermissionOverwrite` + The permission overwrites for this object. + """ + + if isinstance(obj, Member): + predicate = lambda p: p.type == 'member' + elif isinstance(obj, Role): + predicate = lambda p: p.type == 'role' + else: + predicate = lambda p: True + + for overwrite in filter(predicate, self._permission_overwrites): + if overwrite.id == obj.id: + allow = Permissions(overwrite.allow) + deny = Permissions(overwrite.deny) + return PermissionOverwrite.from_pair(allow, deny) + + return PermissionOverwrite() + + @property + def overwrites(self): + """Returns all of the channel's overwrites. + + This is returned as a list of two-element tuples containing the target, + which can be either a :class:`Role` or a :class:`Member` and the overwrite + as the second element as a :class:`PermissionOverwrite`. + + Returns + -------- + List[Tuple[Union[:class:`Role`, :class:`Member`], :class:`PermissionOverwrite`]]: + The channel's permission overwrites. + """ + ret = [] + for ow in self._permission_overwrites: + allow = Permissions(ow.allow) + deny = Permissions(ow.deny) + overwrite = PermissionOverwrite.from_pair(allow, deny) + + if ow.type == 'role': + # accidentally quadratic + target = utils.find(lambda r: r.id == ow.id, self.server.roles) + elif ow.type == 'member': + target = self.server.get_member(ow.id) + + ret.append((target, overwrite)) + return ret + + def permissions_for(self, member): + """Handles permission resolution for the current :class:`Member`. + + This function takes into consideration the following cases: + + - Server owner + - Server roles + - Channel overrides + - Member overrides + - Whether the channel is the default channel. + + Parameters + ---------- + member : :class:`Member` + The member to resolve permissions for. + + Returns + ------- + :class:`Permissions` + The resolved permissions for the member. + """ + + # The current cases can be explained as: + # Server owner get all permissions -- no questions asked. Otherwise... + # The @everyone role gets the first application. + # After that, the applied roles that the user has in the channel + # (or otherwise) are then OR'd together. + # After the role permissions are resolved, the member permissions + # have to take into effect. + # After all that is done.. you have to do the following: + + # If manage permissions is True, then all permissions are set to + # True. If the channel is the default channel then everyone gets + # read permissions regardless. + + # The operation first takes into consideration the denied + # and then the allowed. + + if member.id == self.server.owner.id: + return Permissions.all() + + default = self.server.default_role + base = Permissions(default.permissions.value) + + # Apply server roles that the member has. + for role in member.roles: + base.value |= role.permissions.value + + # Server-wide Administrator -> True for everything + # Bypass all channel-specific overrides + if base.administrator: + return Permissions.all() + + member_role_ids = set(map(lambda r: r.id, member.roles)) + denies = 0 + allows = 0 + + # Apply channel specific role permission overwrites + for overwrite in self._permission_overwrites: + if overwrite.type == 'role' and overwrite.id in member_role_ids: + denies |= overwrite.deny + allows |= overwrite.allow + + base.handle_overwrite(allow=allows, deny=denies) + + # Apply member specific permission overwrites + for overwrite in self._permission_overwrites: + if overwrite.type == 'member' and overwrite.id == member.id: + base.handle_overwrite(allow=overwrite.allow, deny=overwrite.deny) + break + + # default channels can always be read + if self.is_default: + base.read_messages = True + + # if you can't send a message in a channel then you can't have certain + # permissions as well + if not base.send_messages: + base.send_tts_messages = False + base.mention_everyone = False + base.embed_links = False + base.attach_files = False + + # if you can't read a channel then you have no permissions there + if not base.read_messages: + denied = Permissions.all_channel() + base.value &= ~denied.value + + # text channels do not have voice related permissions + if self.type is ChannelType.text: + denied = Permissions.voice() + base.value &= ~denied.value + + return base + +class PrivateChannel(Hashable): + """Represents a Discord private channel. + + Supported Operations: + + +-----------+-------------------------------------------------+ + | Operation | Description | + +===========+=================================================+ + | x == y | Checks if two channels are equal. | + +-----------+-------------------------------------------------+ + | x != y | Checks if two channels are not equal. | + +-----------+-------------------------------------------------+ + | hash(x) | Returns the channel's hash. | + +-----------+-------------------------------------------------+ + | str(x) | Returns a string representation of the channel | + +-----------+-------------------------------------------------+ + + Attributes + ---------- + recipients: list of :class:`User` + The users you are participating with in the private channel. + me: :class:`User` + The user presenting yourself. + id: str + The private channel ID. + is_private: bool + ``True`` if the channel is a private channel (i.e. PM). ``True`` in this case. + type: :class:`ChannelType` + The type of private channel. + owner: Optional[:class:`User`] + The user that owns the private channel. If the channel type is not + :attr:`ChannelType.group` then this is always ``None``. + icon: Optional[str] + The private channel's icon hash. If the channel type is not + :attr:`ChannelType.group` then this is always ``None``. + name: Optional[str] + The private channel's name. If the channel type is not + :attr:`ChannelType.group` then this is always ``None``. + """ + + __slots__ = ['id', 'recipients', 'type', 'owner', 'icon', 'name', 'me'] + + def __init__(self, me, **kwargs): + self.recipients = [User(**u) for u in kwargs['recipients']] + self.id = kwargs['id'] + self.me = me + self.type = ChannelType(kwargs['type']) + self._update_group(**kwargs) + + def _update_group(self, **kwargs): + owner_id = kwargs.get('owner_id') + self.icon = kwargs.get('icon') + self.name = kwargs.get('name') + self.owner = utils.find(lambda u: u.id == owner_id, self.recipients) + + @property + def is_private(self): + return True + + def __str__(self): + if self.type is ChannelType.private: + return 'Direct Message with {0.name}'.format(self.user) + + if self.name: + return self.name + + if len(self.recipients) == 0: + return 'Unnamed' + + return ', '.join(map(lambda x: x.name, self.recipients)) + + @property + def user(self): + """A property that returns the first recipient of the private channel. + + This is mainly for compatibility and ease of use with old style private + channels that had a single recipient. + """ + return self.recipients[0] + + @property + def icon_url(self): + """Returns the channel's icon URL if available or an empty string otherwise.""" + if self.icon is None: + return '' + + return 'https://cdn.discordapp.com/channel-icons/{0.id}/{0.icon}.jpg'.format(self) + + @property + def created_at(self): + """Returns the private channel's creation time in UTC.""" + return utils.snowflake_time(self.id) + + def permissions_for(self, user): + """Handles permission resolution for a :class:`User`. + + This function is there for compatibility with :class:`Channel`. + + Actual private messages do not really have the concept of permissions. + + This returns all the Text related permissions set to true except: + + - send_tts_messages: You cannot send TTS messages in a PM. + - manage_messages: You cannot delete others messages in a PM. + + This also handles permissions for :attr:`ChannelType.group` channels + such as kicking or mentioning everyone. + + Parameters + ----------- + user : :class:`User` + The user to check permissions for. + + Returns + -------- + :class:`Permissions` + The resolved permissions for the user. + """ + + base = Permissions.text() + base.send_tts_messages = False + base.manage_messages = False + base.mention_everyone = self.type is ChannelType.group + + if user == self.owner: + base.kick_members = True + + return base + + diff --git a/RBXLegacyDiscordBot/lib/discord/client.py b/RBXLegacyDiscordBot/lib/discord/client.py new file mode 100644 index 0000000..0d6c3e3 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/client.py @@ -0,0 +1,3319 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from . import __version__ as library_version +from .user import User +from .member import Member +from .channel import Channel, PrivateChannel +from .server import Server +from .message import Message +from .invite import Invite +from .object import Object +from .reaction import Reaction +from .role import Role +from .errors import * +from .state import ConnectionState +from .permissions import Permissions, PermissionOverwrite +from . import utils, compat +from .enums import ChannelType, ServerRegion, VerificationLevel, Status +from .voice_client import VoiceClient +from .iterators import LogsFromIterator +from .gateway import * +from .emoji import Emoji +from .http import HTTPClient + +import asyncio +import aiohttp +import websockets + +import logging, traceback +import sys, re, io, enum +import tempfile, os, hashlib +import itertools +import datetime +from collections import namedtuple +from os.path import split as path_split + +PY35 = sys.version_info >= (3, 5) +log = logging.getLogger(__name__) + +AppInfo = namedtuple('AppInfo', 'id name description icon owner') +WaitedReaction = namedtuple('WaitedReaction', 'reaction user') + +def app_info_icon_url(self): + """Retrieves the application's icon_url if it exists. Empty string otherwise.""" + if not self.icon: + return '' + + return 'https://cdn.discordapp.com/app-icons/{0.id}/{0.icon}.jpg'.format(self) + +AppInfo.icon_url = property(app_info_icon_url) + +class WaitForType(enum.Enum): + message = 0 + reaction = 1 + +ChannelPermissions = namedtuple('ChannelPermissions', 'target overwrite') +ChannelPermissions.__new__.__defaults__ = (PermissionOverwrite(),) + +class Client: + """Represents a client connection that connects to Discord. + This class is used to interact with the Discord WebSocket and API. + + A number of options can be passed to the :class:`Client`. + + .. _deque: https://docs.python.org/3.4/library/collections.html#collections.deque + .. _event loop: https://docs.python.org/3/library/asyncio-eventloops.html + .. _connector: http://aiohttp.readthedocs.org/en/stable/client_reference.html#connectors + .. _ProxyConnector: http://aiohttp.readthedocs.org/en/stable/client_reference.html#proxyconnector + + Parameters + ---------- + max_messages : Optional[int] + The maximum number of messages to store in :attr:`messages`. + This defaults to 5000. Passing in `None` or a value less than 100 + will use the default instead of the passed in value. + loop : Optional[event loop]. + The `event loop`_ to use for asynchronous operations. Defaults to ``None``, + in which case the default event loop is used via ``asyncio.get_event_loop()``. + cache_auth : Optional[bool] + Indicates if :meth:`login` should cache the authentication tokens. Defaults + to ``True``. The method in which the cache is written is done by writing to + disk to a temporary directory. + connector : aiohttp.BaseConnector + The `connector`_ to use for connection pooling. Useful for proxies, e.g. + with a `ProxyConnector`_. + shard_id : Optional[int] + Integer starting at 0 and less than shard_count. + shard_count : Optional[int] + The total number of shards. + + Attributes + ----------- + user : Optional[:class:`User`] + Represents the connected client. None if not logged in. + voice_clients : iterable of :class:`VoiceClient` + Represents a list of voice connections. To connect to voice use + :meth:`join_voice_channel`. To query the voice connection state use + :meth:`is_voice_connected`. + servers : iterable of :class:`Server` + The servers that the connected client is a member of. + private_channels : iterable of :class:`PrivateChannel` + The private channels that the connected client is participating on. + messages + A deque_ of :class:`Message` that the client has received from all + servers and private messages. The number of messages stored in this + deque is controlled by the ``max_messages`` parameter. + email + The email used to login. This is only set if login is successful, + otherwise it's None. + ws + The websocket gateway the client is currently connected to. Could be None. + loop + The `event loop`_ that the client uses for HTTP requests and websocket operations. + + """ + def __init__(self, *, loop=None, **options): + self.ws = None + self.email = None + self.loop = asyncio.get_event_loop() if loop is None else loop + self._listeners = [] + self.cache_auth = options.get('cache_auth', True) + self.shard_id = options.get('shard_id') + self.shard_count = options.get('shard_count') + + max_messages = options.get('max_messages') + if max_messages is None or max_messages < 100: + max_messages = 5000 + + self.connection = ConnectionState(self.dispatch, self.request_offline_members, + self._syncer, max_messages, loop=self.loop) + + connector = options.pop('connector', None) + self.http = HTTPClient(connector, loop=self.loop) + + self._closed = asyncio.Event(loop=self.loop) + self._is_logged_in = asyncio.Event(loop=self.loop) + self._is_ready = asyncio.Event(loop=self.loop) + + if VoiceClient.warn_nacl: + VoiceClient.warn_nacl = False + log.warning("PyNaCl is not installed, voice will NOT be supported") + + # internals + + @asyncio.coroutine + def _syncer(self, guilds): + yield from self.ws.request_sync(guilds) + + def _get_cache_filename(self, email): + filename = hashlib.md5(email.encode('utf-8')).hexdigest() + return os.path.join(tempfile.gettempdir(), 'discord_py', filename) + + def _get_cache_token(self, email, password): + try: + log.info('attempting to login via cache') + cache_file = self._get_cache_filename(email) + self.email = email + with open(cache_file, 'r') as f: + log.info('login cache file found') + return f.read() + + # at this point our check failed + # so we have to login and get the proper token and then + # redo the cache + except OSError: + log.info('a problem occurred while opening login cache') + return None # file not found et al + + def _update_cache(self, email, password): + try: + cache_file = self._get_cache_filename(email) + os.makedirs(os.path.dirname(cache_file), exist_ok=True) + with os.fdopen(os.open(cache_file, os.O_WRONLY | os.O_CREAT, 0o0600), 'w') as f: + log.info('updating login cache') + f.write(self.http.token) + except OSError: + log.info('a problem occurred while updating the login cache') + pass + + def handle_reaction_add(self, reaction, user): + removed = [] + for i, (condition, future, event_type) in enumerate(self._listeners): + if event_type is not WaitForType.reaction: + continue + + if future.cancelled(): + removed.append(i) + continue + + try: + result = condition(reaction, user) + except Exception as e: + future.set_exception(e) + removed.append(i) + else: + if result: + future.set_result(WaitedReaction(reaction, user)) + removed.append(i) + + + for idx in reversed(removed): + del self._listeners[idx] + + def handle_message(self, message): + removed = [] + for i, (condition, future, event_type) in enumerate(self._listeners): + if event_type is not WaitForType.message: + continue + + if future.cancelled(): + removed.append(i) + continue + + try: + result = condition(message) + except Exception as e: + future.set_exception(e) + removed.append(i) + else: + if result: + future.set_result(message) + removed.append(i) + + + for idx in reversed(removed): + del self._listeners[idx] + + def handle_ready(self): + self._is_ready.set() + + def _resolve_invite(self, invite): + if isinstance(invite, Invite) or isinstance(invite, Object): + return invite.id + else: + rx = r'(?:https?\:\/\/)?discord\.gg\/(.+)' + m = re.match(rx, invite) + if m: + return m.group(1) + return invite + + @asyncio.coroutine + def _resolve_destination(self, destination): + if isinstance(destination, Channel): + return destination.id, destination.server.id + elif isinstance(destination, PrivateChannel): + return destination.id, None + elif isinstance(destination, Server): + return destination.id, destination.id + elif isinstance(destination, User): + found = self.connection._get_private_channel_by_user(destination.id) + if found is None: + # Couldn't find the user, so start a PM with them first. + channel = yield from self.start_private_message(destination) + return channel.id, None + else: + return found.id, None + elif isinstance(destination, Object): + found = self.get_channel(destination.id) + if found is not None: + return (yield from self._resolve_destination(found)) + + # couldn't find it in cache so YOLO + return destination.id, destination.id + else: + fmt = 'Destination must be Channel, PrivateChannel, User, or Object. Received {0.__class__.__name__}' + raise InvalidArgument(fmt.format(destination)) + + def __getattr__(self, name): + if name in ('user', 'servers', 'private_channels', 'messages', 'voice_clients'): + return getattr(self.connection, name) + else: + msg = "'{}' object has no attribute '{}'" + raise AttributeError(msg.format(self.__class__, name)) + + def __setattr__(self, name, value): + if name in ('user', 'servers', 'private_channels', 'messages', 'voice_clients'): + return setattr(self.connection, name, value) + else: + object.__setattr__(self, name, value) + + @asyncio.coroutine + def _run_event(self, event, *args, **kwargs): + try: + yield from getattr(self, event)(*args, **kwargs) + except asyncio.CancelledError: + pass + except Exception: + try: + yield from self.on_error(event, *args, **kwargs) + except asyncio.CancelledError: + pass + + def dispatch(self, event, *args, **kwargs): + log.debug('Dispatching event {}'.format(event)) + method = 'on_' + event + handler = 'handle_' + event + + if hasattr(self, handler): + getattr(self, handler)(*args, **kwargs) + + if hasattr(self, method): + compat.create_task(self._run_event(method, *args, **kwargs), loop=self.loop) + + @asyncio.coroutine + def on_error(self, event_method, *args, **kwargs): + """|coro| + + The default error handler provided by the client. + + By default this prints to ``sys.stderr`` however it could be + overridden to have a different implementation. + Check :func:`discord.on_error` for more details. + """ + print('Ignoring exception in {}'.format(event_method), file=sys.stderr) + traceback.print_exc() + + # login state management + + @asyncio.coroutine + def _login_1(self, token, **kwargs): + log.info('logging in using static token') + is_bot = kwargs.pop('bot', True) + data = yield from self.http.static_login(token, bot=is_bot) + self.email = data.get('email', None) + self.connection.is_bot = is_bot + self._is_logged_in.set() + + @asyncio.coroutine + def _login_2(self, email, password, **kwargs): + # attempt to read the token from cache + self.connection.is_bot = False + + if self.cache_auth: + token = self._get_cache_token(email, password) + try: + yield from self.http.static_login(token, bot=False) + except: + log.info('cache auth token is out of date') + else: + self._is_logged_in.set() + return + + + yield from self.http.email_login(email, password) + self.email = email + self._is_logged_in.set() + + # since we went through all this trouble + # let's make sure we don't have to do it again + if self.cache_auth: + self._update_cache(email, password) + + @asyncio.coroutine + def login(self, *args, **kwargs): + """|coro| + + Logs in the client with the specified credentials. + + This function can be used in two different ways. + + .. code-block:: python + + await client.login('token') + + # or + + await client.login('email', 'password') + + More than 2 parameters or less than 1 parameter raises a + :exc:`TypeError`. + + Parameters + ----------- + bot : bool + Keyword argument that specifies if the account logging on is a bot + token or not. Only useful for logging in with a static token. + Ignored for the email and password combo. Defaults to ``True``. + + Raises + ------ + LoginFailure + The wrong credentials are passed. + HTTPException + An unknown HTTP related error occurred, + usually when it isn't 200 or the known incorrect credentials + passing status code. + TypeError + The incorrect number of parameters is passed. + """ + + n = len(args) + if n in (2, 1): + yield from getattr(self, '_login_' + str(n))(*args, **kwargs) + else: + raise TypeError('login() takes 1 or 2 positional arguments but {} were given'.format(n)) + + @asyncio.coroutine + def logout(self): + """|coro| + + Logs out of Discord and closes all connections. + """ + yield from self.close() + self._is_logged_in.clear() + + @asyncio.coroutine + def connect(self): + """|coro| + + Creates a websocket connection and lets the websocket listen + to messages from discord. + + Raises + ------- + GatewayNotFound + If the gateway to connect to discord is not found. Usually if this + is thrown then there is a discord API outage. + ConnectionClosed + The websocket connection has been terminated. + """ + self.ws = yield from DiscordWebSocket.from_client(self) + + while not self.is_closed: + try: + yield from self.ws.poll_event() + except (ReconnectWebSocket, ResumeWebSocket) as e: + resume = type(e) is ResumeWebSocket + log.info('Got ' + type(e).__name__) + self.ws = yield from DiscordWebSocket.from_client(self, resume=resume) + except ConnectionClosed as e: + yield from self.close() + if e.code != 1000: + raise + + @asyncio.coroutine + def close(self): + """|coro| + + Closes the connection to discord. + """ + if self.is_closed: + return + + for voice in list(self.voice_clients): + try: + yield from voice.disconnect() + except: + # if an error happens during disconnects, disregard it. + pass + + self.connection._remove_voice_client(voice.server.id) + + if self.ws is not None and self.ws.open: + yield from self.ws.close() + + + yield from self.http.close() + self._closed.set() + self._is_ready.clear() + + @asyncio.coroutine + def start(self, *args, **kwargs): + """|coro| + + A shorthand coroutine for :meth:`login` + :meth:`connect`. + """ + yield from self.login(*args, **kwargs) + yield from self.connect() + + def run(self, *args, **kwargs): + """A blocking call that abstracts away the `event loop`_ + initialisation from you. + + If you want more control over the event loop then this + function should not be used. Use :meth:`start` coroutine + or :meth:`connect` + :meth:`login`. + + Roughly Equivalent to: :: + + try: + loop.run_until_complete(start(*args, **kwargs)) + except KeyboardInterrupt: + loop.run_until_complete(logout()) + # cancel all tasks lingering + finally: + loop.close() + + Warning + -------- + This function must be the last function to call due to the fact that it + is blocking. That means that registration of events or anything being + called after this function call will not execute until it returns. + """ + + try: + self.loop.run_until_complete(self.start(*args, **kwargs)) + except KeyboardInterrupt: + self.loop.run_until_complete(self.logout()) + pending = asyncio.Task.all_tasks(loop=self.loop) + gathered = asyncio.gather(*pending, loop=self.loop) + try: + gathered.cancel() + self.loop.run_until_complete(gathered) + + # we want to retrieve any exceptions to make sure that + # they don't nag us about it being un-retrieved. + gathered.exception() + except: + pass + finally: + self.loop.close() + + # properties + + @property + def is_logged_in(self): + """bool: Indicates if the client has logged in successfully.""" + return self._is_logged_in.is_set() + + @property + def is_closed(self): + """bool: Indicates if the websocket connection is closed.""" + return self._closed.is_set() + + # helpers/getters + + def get_channel(self, id): + """Returns a :class:`Channel` or :class:`PrivateChannel` with the following ID. If not found, returns None.""" + return self.connection.get_channel(id) + + def get_server(self, id): + """Returns a :class:`Server` with the given ID. If not found, returns None.""" + return self.connection._get_server(id) + + def get_all_emojis(self): + """Returns a generator with every :class:`Emoji` the client can see.""" + for server in self.servers: + for emoji in server.emojis: + yield emoji + + def get_all_channels(self): + """A generator that retrieves every :class:`Channel` the client can 'access'. + + This is equivalent to: :: + + for server in client.servers: + for channel in server.channels: + yield channel + + Note + ----- + Just because you receive a :class:`Channel` does not mean that + you can communicate in said channel. :meth:`Channel.permissions_for` should + be used for that. + """ + + for server in self.servers: + for channel in server.channels: + yield channel + + def get_all_members(self): + """Returns a generator with every :class:`Member` the client can see. + + This is equivalent to: :: + + for server in client.servers: + for member in server.members: + yield member + + """ + for server in self.servers: + for member in server.members: + yield member + + # listeners/waiters + + @asyncio.coroutine + def wait_until_ready(self): + """|coro| + + This coroutine waits until the client is all ready. This could be considered + another way of asking for :func:`discord.on_ready` except meant for your own + background tasks. + """ + yield from self._is_ready.wait() + + @asyncio.coroutine + def wait_until_login(self): + """|coro| + + This coroutine waits until the client is logged on successfully. This + is different from waiting until the client's state is all ready. For + that check :func:`discord.on_ready` and :meth:`wait_until_ready`. + """ + yield from self._is_logged_in.wait() + + @asyncio.coroutine + def wait_for_message(self, timeout=None, *, author=None, channel=None, content=None, check=None): + """|coro| + + Waits for a message reply from Discord. This could be seen as another + :func:`discord.on_message` event outside of the actual event. This could + also be used for follow-ups and easier user interactions. + + The keyword arguments passed into this function are combined using the logical and + operator. The ``check`` keyword argument can be used to pass in more complicated + checks and must be a regular function (not a coroutine). + + The ``timeout`` parameter is passed into `asyncio.wait_for`_. By default, it + does not timeout. Instead of throwing ``asyncio.TimeoutError`` the coroutine + catches the exception and returns ``None`` instead of a :class:`Message`. + + If the ``check`` predicate throws an exception, then the exception is propagated. + + This function returns the **first message that meets the requirements**. + + .. _asyncio.wait_for: https://docs.python.org/3/library/asyncio-task.html#asyncio.wait_for + + Examples + ---------- + + Basic example: + + .. code-block:: python + :emphasize-lines: 5 + + @client.event + async def on_message(message): + if message.content.startswith('$greet'): + await client.send_message(message.channel, 'Say hello') + msg = await client.wait_for_message(author=message.author, content='hello') + await client.send_message(message.channel, 'Hello.') + + Asking for a follow-up question: + + .. code-block:: python + :emphasize-lines: 6 + + @client.event + async def on_message(message): + if message.content.startswith('$start'): + await client.send_message(message.channel, 'Type $stop 4 times.') + for i in range(4): + msg = await client.wait_for_message(author=message.author, content='$stop') + fmt = '{} left to go...' + await client.send_message(message.channel, fmt.format(3 - i)) + + await client.send_message(message.channel, 'Good job!') + + Advanced filters using ``check``: + + .. code-block:: python + :emphasize-lines: 9 + + @client.event + async def on_message(message): + if message.content.startswith('$cool'): + await client.send_message(message.channel, 'Who is cool? Type $name namehere') + + def check(msg): + return msg.content.startswith('$name') + + message = await client.wait_for_message(author=message.author, check=check) + name = message.content[len('$name'):].strip() + await client.send_message(message.channel, '{} is cool indeed'.format(name)) + + + Parameters + ----------- + timeout : float + The number of seconds to wait before returning ``None``. + author : :class:`Member` or :class:`User` + The author the message must be from. + channel : :class:`Channel` or :class:`PrivateChannel` or :class:`Object` + The channel the message must be from. + content : str + The exact content the message must have. + check : function + A predicate for other complicated checks. The predicate must take + a :class:`Message` as its only parameter. + + Returns + -------- + :class:`Message` + The message that you requested for. + """ + + def predicate(message): + result = True + if author is not None: + result = result and message.author == author + + if content is not None: + result = result and message.content == content + + if channel is not None: + result = result and message.channel.id == channel.id + + if callable(check): + # the exception thrown by check is propagated through the future. + result = result and check(message) + + return result + + future = asyncio.Future(loop=self.loop) + self._listeners.append((predicate, future, WaitForType.message)) + try: + message = yield from asyncio.wait_for(future, timeout, loop=self.loop) + except asyncio.TimeoutError: + message = None + return message + + + @asyncio.coroutine + def wait_for_reaction(self, emoji=None, *, user=None, timeout=None, message=None, check=None): + """|coro| + + Waits for a message reaction from Discord. This is similar to :meth:`wait_for_message` + and could be seen as another :func:`on_reaction_add` event outside of the actual event. + This could be used for follow up situations. + + Similar to :meth:`wait_for_message`, the keyword arguments are combined using logical + AND operator. The ``check`` keyword argument can be used to pass in more complicated + checks and must a regular function taking in two arguments, ``(reaction, user)``. It + must not be a coroutine. + + The ``timeout`` parameter is passed into asyncio.wait_for. By default, it + does not timeout. Instead of throwing ``asyncio.TimeoutError`` the coroutine + catches the exception and returns ``None`` instead of a the ``(reaction, user)`` + tuple. + + If the ``check`` predicate throws an exception, then the exception is propagated. + + The ``emoji`` parameter can be either a :class:`Emoji`, a ``str`` representing + an emoji, or a sequence of either type. If the ``emoji`` parameter is a sequence + then the first reaction emoji that is in the list is returned. If ``None`` is + passed then the first reaction emoji used is returned. + + This function returns the **first reaction that meets the requirements**. + + Examples + --------- + + Basic Example: + + .. code-block:: python + + @client.event + async def on_message(message): + if message.content.startswith('$react'): + msg = await client.send_message(message.channel, 'React with thumbs up or thumbs down.') + res = await client.wait_for_reaction(['\N{THUMBS UP SIGN}', '\N{THUMBS DOWN SIGN}'], message=msg) + await client.send_message(message.channel, '{0.user} reacted with {0.reaction.emoji}!'.format(res)) + + Checking for reaction emoji regardless of skin tone: + + .. code-block:: python + + @client.event + async def on_message(message): + if message.content.startswith('$react'): + msg = await client.send_message(message.channel, 'React with thumbs up or thumbs down.') + + def check(reaction, user): + e = str(reaction.emoji) + return e.startswith(('\N{THUMBS UP SIGN}', '\N{THUMBS DOWN SIGN}')) + + res = await client.wait_for_reaction(message=msg, check=check) + await client.send_message(message.channel, '{0.user} reacted with {0.reaction.emoji}!'.format(res)) + + Parameters + ----------- + timeout: float + The number of seconds to wait before returning ``None``. + user: :class:`Member` or :class:`User` + The user the reaction must be from. + emoji: str or :class:`Emoji` or sequence + The emoji that we are waiting to react with. + message: :class:`Message` + The message that we want the reaction to be from. + check: function + A predicate for other complicated checks. The predicate must take + ``(reaction, user)`` as its two parameters, which ``reaction`` being a + :class:`Reaction` and ``user`` being either a :class:`User` or a + :class:`Member`. + + Returns + -------- + namedtuple + A namedtuple with attributes ``reaction`` and ``user`` similar to :func:`on_reaction_add`. + """ + + if emoji is None: + emoji_check = lambda r: True + elif isinstance(emoji, (str, Emoji)): + emoji_check = lambda r: r.emoji == emoji + else: + emoji_check = lambda r: r.emoji in emoji + + def predicate(reaction, reaction_user): + result = emoji_check(reaction) + + if message is not None: + result = result and message.id == reaction.message.id + + if user is not None: + result = result and user.id == reaction_user.id + + if callable(check): + # the exception thrown by check is propagated through the future. + result = result and check(reaction, reaction_user) + + return result + + future = asyncio.Future(loop=self.loop) + self._listeners.append((predicate, future, WaitForType.reaction)) + try: + return (yield from asyncio.wait_for(future, timeout, loop=self.loop)) + except asyncio.TimeoutError: + return None + + # event registration + + def event(self, coro): + """A decorator that registers an event to listen to. + + You can find more info about the events on the :ref:`documentation below `. + + The events must be a |corourl|_, if not, :exc:`ClientException` is raised. + + Examples + --------- + + Using the basic :meth:`event` decorator: :: + + @client.event + @asyncio.coroutine + def on_ready(): + print('Ready!') + + Saving characters by using the :meth:`async_event` decorator: :: + + @client.async_event + def on_ready(): + print('Ready!') + + """ + + if not asyncio.iscoroutinefunction(coro): + raise ClientException('event registered must be a coroutine function') + + setattr(self, coro.__name__, coro) + log.info('{0.__name__} has successfully been registered as an event'.format(coro)) + return coro + + def async_event(self, coro): + """A shorthand decorator for ``asyncio.coroutine`` + :meth:`event`.""" + if not asyncio.iscoroutinefunction(coro): + coro = asyncio.coroutine(coro) + + return self.event(coro) + + # Message sending/management + + @asyncio.coroutine + def start_private_message(self, user): + """|coro| + + Starts a private message with the user. This allows you to + :meth:`send_message` to the user. + + Note + ----- + This method should rarely be called as :meth:`send_message` + does it automatically for you. + + Parameters + ----------- + user : :class:`User` + The user to start the private message with. + + Raises + ------ + HTTPException + The request failed. + InvalidArgument + The user argument was not of :class:`User`. + """ + + if not isinstance(user, User): + raise InvalidArgument('user argument must be a User') + + data = yield from self.http.start_private_message(user.id) + channel = PrivateChannel(me=self.user, **data) + self.connection._add_private_channel(channel) + return channel + + @asyncio.coroutine + def add_reaction(self, message, emoji): + """|coro| + + Add a reaction to the given message. + + The message must be a :class:`Message` that exists. emoji may be a unicode emoji, + or a custom server :class:`Emoji`. + + Parameters + ------------ + message : :class:`Message` + The message to react to. + emoji : :class:`Emoji` or str + The emoji to react with. + + Raises + -------- + HTTPException + Adding the reaction failed. + Forbidden + You do not have the proper permissions to react to the message. + NotFound + The message or emoji you specified was not found. + InvalidArgument + The message or emoji parameter is invalid. + """ + if not isinstance(message, Message): + raise InvalidArgument('message argument must be a Message') + if not isinstance(emoji, (str, Emoji)): + raise InvalidArgument('emoji argument must be a string or Emoji') + + if isinstance(emoji, Emoji): + emoji = '{}:{}'.format(emoji.name, emoji.id) + + yield from self.http.add_reaction(message.id, message.channel.id, emoji) + + @asyncio.coroutine + def remove_reaction(self, message, emoji, member): + """|coro| + + Remove a reaction by the member from the given message. + + If member != server.me, you need Manage Messages to remove the reaction. + + The message must be a :class:`Message` that exists. emoji may be a unicode emoji, + or a custom server :class:`Emoji`. + + Parameters + ------------ + message : :class:`Message` + The message. + emoji : :class:`Emoji` or str + The emoji to remove. + member : :class:`Member` + The member for which to delete the reaction. + + Raises + -------- + HTTPException + Removing the reaction failed. + Forbidden + You do not have the proper permissions to remove the reaction. + NotFound + The message or emoji you specified was not found. + InvalidArgument + The message or emoji parameter is invalid. + """ + if not isinstance(message, Message): + raise InvalidArgument('message argument must be a Message') + if not isinstance(emoji, (str, Emoji)): + raise InvalidArgument('emoji must be a string or Emoji') + + if isinstance(emoji, Emoji): + emoji = '{}:{}'.format(emoji.name, emoji.id) + + if member == self.user: + member_id = '@me' + else: + member_id = member.id + + yield from self.http.remove_reaction(message.id, message.channel.id, emoji, member_id) + + @asyncio.coroutine + def get_reaction_users(self, reaction, limit=100, after=None): + """|coro| + + Get the users that added a reaction to a message. + + Parameters + ------------ + reaction : :class:`Reaction` + The reaction to retrieve users for. + limit : int + The maximum number of results to return. + after : :class:`Member` or :class:`Object` + For pagination, reactions are sorted by member. + + Raises + -------- + HTTPException + Getting the users for the reaction failed. + NotFound + The message or emoji you specified was not found. + InvalidArgument + The reaction parameter is invalid. + """ + if not isinstance(reaction, Reaction): + raise InvalidArgument('reaction must be a Reaction') + + emoji = reaction.emoji + + if isinstance(emoji, Emoji): + emoji = '{}:{}'.format(emoji.name, emoji.id) + + if after: + after = after.id + + data = yield from self.http.get_reaction_users( + reaction.message.id, reaction.message.channel.id, + emoji, limit, after=after) + + return [User(**user) for user in data] + + @asyncio.coroutine + def clear_reactions(self, message): + """|coro| + + Removes all the reactions from a given message. + + You need Manage Messages permission to use this. + + Parameters + ----------- + message: :class:`Message` + The message to remove all reactions from. + + Raises + -------- + HTTPException + Removing the reactions failed. + Forbidden + You do not have the proper permissions to remove all the reactions. + """ + yield from self.http.clear_reactions(message.id, message.channel.id) + + @asyncio.coroutine + def send_message(self, destination, content=None, *, tts=False, embed=None): + """|coro| + + Sends a message to the destination given with the content given. + + The destination could be a :class:`Channel`, :class:`PrivateChannel` or :class:`Server`. + For convenience it could also be a :class:`User`. If it's a :class:`User` or :class:`PrivateChannel` + then it sends the message via private message, otherwise it sends the message to the channel. + If the destination is a :class:`Server` then it's equivalent to calling + :attr:`Server.default_channel` and sending it there. + + If it is a :class:`Object` instance then it is assumed to be the + destination ID. The destination ID is a *channel* so passing in a user + ID will not be a valid destination. + + .. versionchanged:: 0.9.0 + ``str`` being allowed was removed and replaced with :class:`Object`. + + The content must be a type that can convert to a string through ``str(content)``. + If the content is set to ``None`` (the default), then the ``embed`` parameter must + be provided. + + If the ``embed`` parameter is provided, it must be of type :class:`Embed` and + it must be a rich embed type. + + Parameters + ------------ + destination + The location to send the message. + content + The content of the message to send. If this is missing, + then the ``embed`` parameter must be present. + tts : bool + Indicates if the message should be sent using text-to-speech. + embed: :class:`Embed` + The rich embed for the content. + + Raises + -------- + HTTPException + Sending the message failed. + Forbidden + You do not have the proper permissions to send the message. + NotFound + The destination was not found and hence is invalid. + InvalidArgument + The destination parameter is invalid. + + Examples + ---------- + + Sending a regular message: + + .. code-block:: python + + await client.send_message(message.channel, 'Hello') + + Sending a TTS message: + + .. code-block:: python + + await client.send_message(message.channel, 'Goodbye.', tts=True) + + Sending an embed message: + + .. code-block:: python + + em = discord.Embed(title='My Embed Title', description='My Embed Content.', colour=0xDEADBF) + em.set_author(name='Someone', icon_url=client.user.default_avatar_url) + await client.send_message(message.channel, embed=em) + + Returns + --------- + :class:`Message` + The message that was sent. + """ + + channel_id, guild_id = yield from self._resolve_destination(destination) + + content = str(content) if content is not None else None + + if embed is not None: + embed = embed.to_dict() + + data = yield from self.http.send_message(channel_id, content, guild_id=guild_id, tts=tts, embed=embed) + channel = self.get_channel(data.get('channel_id')) + message = self.connection._create_message(channel=channel, **data) + return message + + @asyncio.coroutine + def send_typing(self, destination): + """|coro| + + Send a *typing* status to the destination. + + *Typing* status will go away after 10 seconds, or after a message is sent. + + The destination parameter follows the same rules as :meth:`send_message`. + + Parameters + ---------- + destination + The location to send the typing update. + """ + + channel_id, guild_id = yield from self._resolve_destination(destination) + yield from self.http.send_typing(channel_id) + + @asyncio.coroutine + def send_file(self, destination, fp, *, filename=None, content=None, tts=False): + """|coro| + + Sends a message to the destination given with the file given. + + The destination parameter follows the same rules as :meth:`send_message`. + + The ``fp`` parameter should be either a string denoting the location for a + file or a *file-like object*. The *file-like object* passed is **not closed** + at the end of execution. You are responsible for closing it yourself. + + .. note:: + + If the file-like object passed is opened via ``open`` then the modes + 'rb' should be used. + + The ``filename`` parameter is the filename of the file. + If this is not given then it defaults to ``fp.name`` or if ``fp`` is a string + then the ``filename`` will default to the string given. You can overwrite + this value by passing this in. + + Parameters + ------------ + destination + The location to send the message. + fp + The *file-like object* or file path to send. + filename : str + The filename of the file. Defaults to ``fp.name`` if it's available. + content + The content of the message to send along with the file. This is + forced into a string by a ``str(content)`` call. + tts : bool + If the content of the message should be sent with TTS enabled. + + Raises + ------- + HTTPException + Sending the file failed. + + Returns + -------- + :class:`Message` + The message sent. + """ + + channel_id, guild_id = yield from self._resolve_destination(destination) + + try: + with open(fp, 'rb') as f: + buffer = io.BytesIO(f.read()) + if filename is None: + _, filename = path_split(fp) + except TypeError: + buffer = fp + + content = str(content) if content is not None else None + data = yield from self.http.send_file(channel_id, buffer, guild_id=guild_id, + filename=filename, content=content, tts=tts) + channel = self.get_channel(data.get('channel_id')) + message = self.connection._create_message(channel=channel, **data) + return message + + @asyncio.coroutine + def delete_message(self, message): + """|coro| + + Deletes a :class:`Message`. + + Your own messages could be deleted without any proper permissions. However to + delete other people's messages, you need the proper permissions to do so. + + Parameters + ----------- + message : :class:`Message` + The message to delete. + + Raises + ------ + Forbidden + You do not have proper permissions to delete the message. + HTTPException + Deleting the message failed. + """ + channel = message.channel + guild_id = channel.server.id if not getattr(channel, 'is_private', True) else None + yield from self.http.delete_message(channel.id, message.id, guild_id) + + @asyncio.coroutine + def delete_messages(self, messages): + """|coro| + + Deletes a list of messages. This is similar to :func:`delete_message` + except it bulk deletes multiple messages. + + The channel to check where the message is deleted from is handled via + the first element of the iterable's ``.channel.id`` attributes. If the + channel is not consistent throughout the entire sequence, then an + :exc:`HTTPException` will be raised. + + Usable only by bot accounts. + + Parameters + ----------- + messages : iterable of :class:`Message` + An iterable of messages denoting which ones to bulk delete. + + Raises + ------ + ClientException + The number of messages to delete is less than 2 or more than 100. + Forbidden + You do not have proper permissions to delete the messages or + you're not using a bot account. + HTTPException + Deleting the messages failed. + """ + + messages = list(messages) + if len(messages) > 100 or len(messages) < 2: + raise ClientException('Can only delete messages in the range of [2, 100]') + + channel = messages[0].channel + message_ids = [m.id for m in messages] + guild_id = channel.server.id if not getattr(channel, 'is_private', True) else None + yield from self.http.delete_messages(channel.id, message_ids, guild_id) + + @asyncio.coroutine + def purge_from(self, channel, *, limit=100, check=None, before=None, after=None, around=None): + """|coro| + + Purges a list of messages that meet the criteria given by the predicate + ``check``. If a ``check`` is not provided then all messages are deleted + without discrimination. + + You must have Manage Messages permission to delete messages even if they + are your own. The Read Message History permission is also needed to + retrieve message history. + + Usable only by bot accounts. + + Parameters + ----------- + channel : :class:`Channel` + The channel to purge from. + limit : int + The number of messages to search through. This is not the number + of messages that will be deleted, though it can be. + check : predicate + The function used to check if a message should be deleted. + It must take a :class:`Message` as its sole parameter. + before : :class:`Message` or `datetime` + The message or date before which all deleted messages must be. + If a date is provided it must be a timezone-naive datetime representing UTC time. + after : :class:`Message` or `datetime` + The message or date after which all deleted messages must be. + If a date is provided it must be a timezone-naive datetime representing UTC time. + around : :class:`Message` or `datetime` + The message or date around which all deleted messages must be. + If a date is provided it must be a timezone-naive datetime representing UTC time. + + Raises + ------- + Forbidden + You do not have proper permissions to do the actions required or + you're not using a bot account. + HTTPException + Purging the messages failed. + + Examples + --------- + + Deleting bot's messages :: + + def is_me(m): + return m.author == client.user + + deleted = await client.purge_from(channel, limit=100, check=is_me) + await client.send_message(channel, 'Deleted {} message(s)'.format(len(deleted))) + + Returns + -------- + list + The list of messages that were deleted. + """ + + if check is None: + check = lambda m: True + + if isinstance(before, datetime.datetime): + before = Object(utils.time_snowflake(before, high=False)) + if isinstance(after, datetime.datetime): + after = Object(utils.time_snowflake(after, high=True)) + if isinstance(around, datetime.datetime): + around = Object(utils.time_snowflake(around, high=True)) + + iterator = LogsFromIterator(self, channel, limit, before=before, after=after, around=around) + ret = [] + count = 0 + + while True: + try: + msg = yield from iterator.iterate() + except asyncio.QueueEmpty: + # no more messages to poll + if count >= 2: + # more than 2 messages -> bulk delete + to_delete = ret[-count:] + yield from self.delete_messages(to_delete) + elif count == 1: + # delete a single message + yield from self.delete_message(ret[-1]) + + return ret + else: + if count == 100: + # we've reached a full 'queue' + to_delete = ret[-100:] + yield from self.delete_messages(to_delete) + count = 0 + yield from asyncio.sleep(1, loop=self.loop) + + if check(msg): + count += 1 + ret.append(msg) + + @asyncio.coroutine + def edit_message(self, message, new_content=None, *, embed=None): + """|coro| + + Edits a :class:`Message` with the new message content. + + The new_content must be able to be transformed into a string via ``str(new_content)``. + + If the ``new_content`` is not provided, then ``embed`` must be provided, which must + be of type :class:`Embed`. + + The :class:`Message` object is not directly modified afterwards until the + corresponding WebSocket event is received. + + Parameters + ----------- + message : :class:`Message` + The message to edit. + new_content + The new content to replace the message with. + embed: :class:`Embed` + The new embed to replace the original embed with. + + Raises + ------- + HTTPException + Editing the message failed. + + Returns + -------- + :class:`Message` + The new edited message. + """ + + channel = message.channel + content = str(new_content) if new_content else None + embed = embed.to_dict() if embed else None + guild_id = channel.server.id if not getattr(channel, 'is_private', True) else None + data = yield from self.http.edit_message(message.id, channel.id, content, guild_id=guild_id, embed=embed) + return self.connection._create_message(channel=channel, **data) + + @asyncio.coroutine + def get_message(self, channel, id): + """|coro| + + Retrieves a single :class:`Message` from a :class:`Channel`. + + This can only be used by bot accounts. + + Parameters + ------------ + channel: :class:`Channel` or :class:`PrivateChannel` + The text channel to retrieve the message from. + id: str + The message ID to look for. + + Returns + -------- + :class:`Message` + The message asked for. + + Raises + -------- + NotFound + The specified channel or message was not found. + Forbidden + You do not have the permissions required to get a message. + HTTPException + Retrieving the message failed. + """ + + data = yield from self.http.get_message(channel.id, id) + return self.connection._create_message(channel=channel, **data) + + @asyncio.coroutine + def pin_message(self, message): + """|coro| + + Pins a message. You must have Manage Messages permissions + to do this in a non-private channel context. + + Parameters + ----------- + message: :class:`Message` + The message to pin. + + Raises + ------- + Forbidden + You do not have permissions to pin the message. + NotFound + The message or channel was not found. + HTTPException + Pinning the message failed, probably due to the channel + having more than 50 pinned messages. + """ + yield from self.http.pin_message(message.channel.id, message.id) + + @asyncio.coroutine + def unpin_message(self, message): + """|coro| + + Unpins a message. You must have Manage Messages permissions + to do this in a non-private channel context. + + Parameters + ----------- + message: :class:`Message` + The message to unpin. + + Raises + ------- + Forbidden + You do not have permissions to unpin the message. + NotFound + The message or channel was not found. + HTTPException + Unpinning the message failed. + """ + yield from self.http.unpin_message(message.channel.id, message.id) + + @asyncio.coroutine + def pins_from(self, channel): + """|coro| + + Returns a list of :class:`Message` that are currently pinned for + the specified :class:`Channel` or :class:`PrivateChannel`. + + Parameters + ----------- + channel: :class:`Channel` or :class:`PrivateChannel` + The channel to look through pins for. + + Raises + ------- + NotFound + The channel was not found. + HTTPException + Retrieving the pinned messages failed. + """ + + data = yield from self.http.pins_from(channel.id) + return [self.connection._create_message(channel=channel, **m) for m in data] + + def _logs_from(self, channel, limit=100, before=None, after=None, around=None): + """|coro| + + This coroutine returns a generator that obtains logs from a specified channel. + + Parameters + ----------- + channel : :class:`Channel` or :class:`PrivateChannel` + The channel to obtain the logs from. + limit : int + The number of messages to retrieve. + before : :class:`Message` or `datetime` + The message or date before which all returned messages must be. + If a date is provided it must be a timezone-naive datetime representing UTC time. + after : :class:`Message` or `datetime` + The message or date after which all returned messages must be. + If a date is provided it must be a timezone-naive datetime representing UTC time. + around : :class:`Message` or `datetime` + The message or date around which all returned messages must be. + If a date is provided it must be a timezone-naive datetime representing UTC time. + + Raises + ------ + Forbidden + You do not have permissions to get channel logs. + NotFound + The channel you are requesting for doesn't exist. + HTTPException + The request to get logs failed. + + Yields + ------- + :class:`Message` + The message with the message data parsed. + + Examples + --------- + + Basic logging: :: + + logs = yield from client.logs_from(channel) + for message in logs: + if message.content.startswith('!hello'): + if message.author == client.user: + yield from client.edit_message(message, 'goodbye') + + Python 3.5 Usage :: + + counter = 0 + async for message in client.logs_from(channel, limit=500): + if message.author == client.user: + counter += 1 + """ + before = getattr(before, 'id', None) + after = getattr(after, 'id', None) + around = getattr(around, 'id', None) + + return self.http.logs_from(channel.id, limit, before=before, after=after, around=around) + + if PY35: + def logs_from(self, channel, limit=100, *, before=None, after=None, around=None, reverse=False): + if isinstance(before, datetime.datetime): + before = Object(utils.time_snowflake(before, high=False)) + if isinstance(after, datetime.datetime): + after = Object(utils.time_snowflake(after, high=True)) + if isinstance(around, datetime.datetime): + around = Object(utils.time_snowflake(around)) + + return LogsFromIterator(self, channel, limit, before=before, after=after, around=around, reverse=reverse) + else: + @asyncio.coroutine + def logs_from(self, channel, limit=100, *, before=None, after=None): + if isinstance(before, datetime.datetime): + before = Object(utils.time_snowflake(before, high=False)) + if isinstance(after, datetime.datetime): + after = Object(utils.time_snowflake(after, high=True)) + + def generator(data): + for message in data: + yield self.connection._create_message(channel=channel, **message) + + result = [] + while limit > 0: + retrieve = limit if limit <= 100 else 100 + data = yield from self._logs_from(channel, retrieve, before, after) + if len(data): + limit -= retrieve + result.extend(data) + before = Object(id=data[-1]['id']) + else: + break + + return generator(result) + + logs_from.__doc__ = _logs_from.__doc__ + + # Member management + + @asyncio.coroutine + def request_offline_members(self, server): + """|coro| + + Requests previously offline members from the server to be filled up + into the :attr:`Server.members` cache. This function is usually not + called. + + When the client logs on and connects to the websocket, Discord does + not provide the library with offline members if the number of members + in the server is larger than 250. You can check if a server is large + if :attr:`Server.large` is ``True``. + + Parameters + ----------- + server : :class:`Server` or iterable + The server to request offline members for. If this parameter is a + iterable then it is interpreted as an iterator of servers to + request offline members for. + """ + + if hasattr(server, 'id'): + guild_id = server.id + else: + guild_id = [s.id for s in server] + + payload = { + 'op': 8, + 'd': { + 'guild_id': guild_id, + 'query': '', + 'limit': 0 + } + } + + yield from self.ws.send_as_json(payload) + + @asyncio.coroutine + def kick(self, member): + """|coro| + + Kicks a :class:`Member` from the server they belong to. + + Warning + -------- + This function kicks the :class:`Member` based on the server it + belongs to, which is accessed via :attr:`Member.server`. So you + must have the proper permissions in that server. + + Parameters + ----------- + member : :class:`Member` + The member to kick from their server. + + Raises + ------- + Forbidden + You do not have the proper permissions to kick. + HTTPException + Kicking failed. + """ + yield from self.http.kick(member.id, member.server.id) + + @asyncio.coroutine + def ban(self, member, delete_message_days=1): + """|coro| + + Bans a :class:`Member` from the server they belong to. + + Warning + -------- + This function bans the :class:`Member` based on the server it + belongs to, which is accessed via :attr:`Member.server`. So you + must have the proper permissions in that server. + + Parameters + ----------- + member : :class:`Member` + The member to ban from their server. + delete_message_days : int + The number of days worth of messages to delete from the user + in the server. The minimum is 0 and the maximum is 7. + + Raises + ------- + Forbidden + You do not have the proper permissions to ban. + HTTPException + Banning failed. + """ + yield from self.http.ban(member.id, member.server.id, delete_message_days) + + @asyncio.coroutine + def unban(self, server, user): + """|coro| + + Unbans a :class:`User` from the server they are banned from. + + Parameters + ----------- + server : :class:`Server` + The server to unban the user from. + user : :class:`User` + The user to unban. + + Raises + ------- + Forbidden + You do not have the proper permissions to unban. + HTTPException + Unbanning failed. + """ + yield from self.http.unban(user.id, server.id) + + @asyncio.coroutine + def server_voice_state(self, member, *, mute=None, deafen=None): + """|coro| + + Server mutes or deafens a specific :class:`Member`. + + Warning + -------- + This function mutes or un-deafens the :class:`Member` based on the + server it belongs to, which is accessed via :attr:`Member.server`. + So you must have the proper permissions in that server. + + Parameters + ----------- + member : :class:`Member` + The member to unban from their server. + mute: Optional[bool] + Indicates if the member should be server muted or un-muted. + deafen: Optional[bool] + Indicates if the member should be server deafened or un-deafened. + + Raises + ------- + Forbidden + You do not have the proper permissions to deafen or mute. + HTTPException + The operation failed. + """ + yield from self.http.server_voice_state(member.id, member.server.id, mute=mute, deafen=deafen) + + @asyncio.coroutine + def edit_profile(self, password=None, **fields): + """|coro| + + Edits the current profile of the client. + + If a bot account is used then the password field is optional, + otherwise it is required. + + The :attr:`Client.user` object is not modified directly afterwards until the + corresponding WebSocket event is received. + + Note + ----- + To upload an avatar, a *bytes-like object* must be passed in that + represents the image being uploaded. If this is done through a file + then the file must be opened via ``open('some_filename', 'rb')`` and + the *bytes-like object* is given through the use of ``fp.read()``. + + The only image formats supported for uploading is JPEG and PNG. + + Parameters + ----------- + password : str + The current password for the client's account. Not used + for bot accounts. + new_password : str + The new password you wish to change to. + email : str + The new email you wish to change to. + username :str + The new username you wish to change to. + avatar : bytes + A *bytes-like object* representing the image to upload. + Could be ``None`` to denote no avatar. + + Raises + ------ + HTTPException + Editing your profile failed. + InvalidArgument + Wrong image format passed for ``avatar``. + ClientException + Password is required for non-bot accounts. + """ + + try: + avatar_bytes = fields['avatar'] + except KeyError: + avatar = self.user.avatar + else: + if avatar_bytes is not None: + avatar = utils._bytes_to_base64_data(avatar_bytes) + else: + avatar = None + + not_bot_account = not self.user.bot + if not_bot_account and password is None: + raise ClientException('Password is required for non-bot accounts.') + + args = { + 'password': password, + 'username': fields.get('username', self.user.name), + 'avatar': avatar + } + + if not_bot_account: + args['email'] = fields.get('email', self.email) + + if 'new_password' in fields: + args['new_password'] = fields['new_password'] + + data = yield from self.http.edit_profile(**args) + if not_bot_account: + self.email = data['email'] + if 'token' in data: + self.http._token(data['token'], bot=False) + + if self.cache_auth: + self._update_cache(self.email, password) + + @asyncio.coroutine + @utils.deprecated('change_presence') + def change_status(self, game=None, idle=False): + """|coro| + + Changes the client's status. + + The game parameter is a Game object (not a string) that represents + a game being played currently. + + The idle parameter is a boolean parameter that indicates whether the + client should go idle or not. + + .. deprecated:: v0.13.0 + Use :meth:`change_presence` instead. + + Parameters + ---------- + game : Optional[:class:`Game`] + The game being played. None if no game is being played. + idle : bool + Indicates if the client should go idle. + + Raises + ------ + InvalidArgument + If the ``game`` parameter is not :class:`Game` or None. + """ + yield from self.ws.change_presence(game=game, idle=idle) + + @asyncio.coroutine + def change_presence(self, *, game=None, status=None, afk=False): + """|coro| + + Changes the client's presence. + + The game parameter is a Game object (not a string) that represents + a game being played currently. + + Parameters + ---------- + game: Optional[:class:`Game`] + The game being played. None if no game is being played. + status: Optional[:class:`Status`] + Indicates what status to change to. If None, then + :attr:`Status.online` is used. + afk: bool + Indicates if you are going AFK. This allows the discord + client to know how to handle push notifications better + for you in case you are actually idle and not lying. + + Raises + ------ + InvalidArgument + If the ``game`` parameter is not :class:`Game` or None. + """ + + if status is None: + status = 'online' + elif status is Status.offline: + status = 'invisible' + else: + status = str(status) + + yield from self.ws.change_presence(game=game, status=status, afk=afk) + + @asyncio.coroutine + def change_nickname(self, member, nickname): + """|coro| + + Changes a member's nickname. + + You must have the proper permissions to change someone's + (or your own) nickname. + + Parameters + ---------- + member : :class:`Member` + The member to change the nickname for. + nickname : Optional[str] + The nickname to change it to. ``None`` to remove + the nickname. + + Raises + ------ + Forbidden + You do not have permissions to change the nickname. + HTTPException + Changing the nickname failed. + """ + + nickname = nickname if nickname else '' + + if member == self.user: + yield from self.http.change_my_nickname(member.server.id, nickname) + else: + yield from self.http.change_nickname(member.server.id, member.id, nickname) + + # Channel management + + @asyncio.coroutine + def edit_channel(self, channel, **options): + """|coro| + + Edits a :class:`Channel`. + + You must have the proper permissions to edit the channel. + + To move the channel's position use :meth:`move_channel` instead. + + The :class:`Channel` object is not directly modified afterwards until the + corresponding WebSocket event is received. + + Parameters + ---------- + channel : :class:`Channel` + The channel to update. + name : str + The new channel name. + topic : str + The new channel's topic. + bitrate : int + The new channel's bitrate. Voice only. + user_limit : int + The new channel's user limit. Voice only. + + Raises + ------ + Forbidden + You do not have permissions to edit the channel. + HTTPException + Editing the channel failed. + """ + + keys = ('name', 'topic', 'position') + for key in keys: + if key not in options: + options[key] = getattr(channel, key) + + yield from self.http.edit_channel(channel.id, **options) + + @asyncio.coroutine + def move_channel(self, channel, position): + """|coro| + + Moves the specified :class:`Channel` to the given position in the GUI. + Note that voice channels and text channels have different position values. + + The :class:`Channel` object is not directly modified afterwards until the + corresponding WebSocket event is received. + + .. warning:: + + :class:`Object` instances do not work with this function. + + Parameters + ----------- + channel : :class:`Channel` + The channel to change positions of. + position : int + The position to insert the channel to. + + Raises + ------- + InvalidArgument + If position is less than 0 or greater than the number of channels. + Forbidden + You do not have permissions to change channel order. + HTTPException + If moving the channel failed, or you are of too low rank to move the channel. + """ + + if position < 0: + raise InvalidArgument('Channel position cannot be less than 0.') + + channels = [c for c in channel.server.channels if c.type is channel.type] + + if position >= len(channels): + raise InvalidArgument('Channel position cannot be greater than {}'.format(len(channels) - 1)) + + channels.sort(key=lambda c: c.position) + + try: + # remove ourselves from the channel list + channels.remove(channel) + except ValueError: + # not there somehow lol + return + else: + # add ourselves at our designated position + channels.insert(position, channel) + + payload = [{'id': c.id, 'position': index } for index, c in enumerate(channels)] + yield from self.http.move_channel_position(channel.server.id, payload) + + @asyncio.coroutine + def create_channel(self, server, name, *overwrites, type=None): + """|coro| + + Creates a :class:`Channel` in the specified :class:`Server`. + + Note that you need the proper permissions to create the channel. + + The ``overwrites`` argument list can be used to create a 'secret' + channel upon creation. A namedtuple of :class:`ChannelPermissions` + is exposed to create a channel-specific permission overwrite in a more + self-documenting matter. You can also use a regular tuple of ``(target, overwrite)`` + where the ``overwrite`` expected has to be of type :class:`PermissionOverwrite`. + + Examples + ---------- + + Creating a voice channel: + + .. code-block:: python + + await client.create_channel(server, 'Voice', type=discord.ChannelType.voice) + + Creating a 'secret' text channel: + + .. code-block:: python + + everyone_perms = discord.PermissionOverwrite(read_messages=False) + my_perms = discord.PermissionOverwrite(read_messages=True) + + everyone = discord.ChannelPermissions(target=server.default_role, overwrite=everyone_perms) + mine = discord.ChannelPermissions(target=server.me, overwrite=my_perms) + await client.create_channel(server, 'secret', everyone, mine) + + Or in a more 'compact' way: + + .. code-block:: python + + everyone = discord.PermissionOverwrite(read_messages=False) + mine = discord.PermissionOverwrite(read_messages=True) + await client.create_channel(server, 'secret', (server.default_role, everyone), (server.me, mine)) + + Parameters + ----------- + server : :class:`Server` + The server to create the channel in. + name : str + The channel's name. + type : :class:`ChannelType` + The type of channel to create. Defaults to :attr:`ChannelType.text`. + overwrites: + An argument list of channel specific overwrites to apply on the channel on + creation. Useful for creating 'secret' channels. + + Raises + ------- + Forbidden + You do not have the proper permissions to create the channel. + NotFound + The server specified was not found. + HTTPException + Creating the channel failed. + InvalidArgument + The permission overwrite array is not in proper form. + + Returns + ------- + :class:`Channel` + The channel that was just created. This channel is + different than the one that will be added in cache. + """ + + if type is None: + type = ChannelType.text + + perms = [] + for overwrite in overwrites: + target = overwrite[0] + perm = overwrite[1] + if not isinstance(perm, PermissionOverwrite): + raise InvalidArgument('Expected PermissionOverwrite received {0.__name__}'.format(type(perm))) + + allow, deny = perm.pair() + payload = { + 'allow': allow.value, + 'deny': deny.value, + 'id': target.id + } + + if isinstance(target, User): + payload['type'] = 'member' + elif isinstance(target, Role): + payload['type'] = 'role' + else: + raise InvalidArgument('Expected Role, User, or Member target, received {0.__name__}'.format(type(target))) + + perms.append(payload) + + data = yield from self.http.create_channel(server.id, name, str(type), permission_overwrites=perms) + channel = Channel(server=server, **data) + return channel + + @asyncio.coroutine + def delete_channel(self, channel): + """|coro| + + Deletes a :class:`Channel`. + + In order to delete the channel, the client must have the proper permissions + in the server the channel belongs to. + + Parameters + ------------ + channel : :class:`Channel` + The channel to delete. + + Raises + ------- + Forbidden + You do not have proper permissions to delete the channel. + NotFound + The specified channel was not found. + HTTPException + Deleting the channel failed. + """ + yield from self.http.delete_channel(channel.id) + + # Server management + + @asyncio.coroutine + def leave_server(self, server): + """|coro| + + Leaves a :class:`Server`. + + Note + -------- + You cannot leave the server that you own, you must delete it instead + via :meth:`delete_server`. + + Parameters + ---------- + server : :class:`Server` + The server to leave. + + Raises + -------- + HTTPException + If leaving the server failed. + """ + yield from self.http.leave_server(server.id) + + @asyncio.coroutine + def delete_server(self, server): + """|coro| + + Deletes a :class:`Server`. You must be the server owner to delete the + server. + + Parameters + ---------- + server : :class:`Server` + The server to delete. + + Raises + -------- + HTTPException + If deleting the server failed. + Forbidden + You do not have permissions to delete the server. + """ + + yield from self.http.delete_server(server.id) + + @asyncio.coroutine + def create_server(self, name, region=None, icon=None): + """|coro| + + Creates a :class:`Server`. + + Bot accounts generally are not allowed to create servers. + See Discord's official documentation for more info. + + Parameters + ---------- + name : str + The name of the server. + region : :class:`ServerRegion` + The region for the voice communication server. + Defaults to :attr:`ServerRegion.us_west`. + icon : bytes + The *bytes-like* object representing the icon. See :meth:`edit_profile` + for more details on what is expected. + + Raises + ------ + HTTPException + Server creation failed. + InvalidArgument + Invalid icon image format given. Must be PNG or JPG. + + Returns + ------- + :class:`Server` + The server created. This is not the same server that is + added to cache. + """ + if icon is not None: + icon = utils._bytes_to_base64_data(icon) + + if region is None: + region = ServerRegion.us_west.value + else: + region = region.value + + data = yield from self.http.create_server(name, region, icon) + return Server(**data) + + @asyncio.coroutine + def edit_server(self, server, **fields): + """|coro| + + Edits a :class:`Server`. + + You must have the proper permissions to edit the server. + + The :class:`Server` object is not directly modified afterwards until the + corresponding WebSocket event is received. + + Parameters + ---------- + server: :class:`Server` + The server to edit. + name: str + The new name of the server. + icon: bytes + A *bytes-like* object representing the icon. See :meth:`edit_profile` + for more details. Could be ``None`` to denote no icon. + splash: bytes + A *bytes-like* object representing the invite splash. See + :meth:`edit_profile` for more details. Could be ``None`` to denote + no invite splash. Only available for partnered servers with + ``INVITE_SPLASH`` feature. + region: :class:`ServerRegion` + The new region for the server's voice communication. + afk_channel: Optional[:class:`Channel`] + The new channel that is the AFK channel. Could be ``None`` for no AFK channel. + afk_timeout: int + The number of seconds until someone is moved to the AFK channel. + owner: :class:`Member` + The new owner of the server to transfer ownership to. Note that you must + be owner of the server to do this. + verification_level: :class:`VerificationLevel` + The new verification level for the server. + + Raises + ------- + Forbidden + You do not have permissions to edit the server. + NotFound + The server you are trying to edit does not exist. + HTTPException + Editing the server failed. + InvalidArgument + The image format passed in to ``icon`` is invalid. It must be + PNG or JPG. This is also raised if you are not the owner of the + server and request an ownership transfer. + """ + + try: + icon_bytes = fields['icon'] + except KeyError: + icon = server.icon + else: + if icon_bytes is not None: + icon = utils._bytes_to_base64_data(icon_bytes) + else: + icon = None + + try: + splash_bytes = fields['splash'] + except KeyError: + splash = server.splash + else: + if splash_bytes is not None: + splash = utils._bytes_to_base64_data(splash_bytes) + else: + splash = None + + fields['icon'] = icon + fields['splash'] = splash + + try: + afk_channel = fields.pop('afk_channel') + except KeyError: + pass + else: + if afk_channel is None: + fields['afk_channel_id'] = afk_channel + else: + fields['afk_channel_id'] = afk_channel.id + + if 'owner' in fields: + if server.owner != server.me: + raise InvalidArgument('To transfer ownership you must be the owner of the server.') + + fields['owner_id'] = fields['owner'].id + + if 'region' in fields: + fields['region'] = str(fields['region']) + + level = fields.get('verification_level', server.verification_level) + if not isinstance(level, VerificationLevel): + raise InvalidArgument('verification_level field must of type VerificationLevel') + + fields['verification_level'] = level.value + yield from self.http.edit_server(server.id, **fields) + + @asyncio.coroutine + def get_bans(self, server): + """|coro| + + Retrieves all the :class:`User` s that are banned from the specified + server. + + You must have proper permissions to get this information. + + Parameters + ---------- + server : :class:`Server` + The server to get ban information from. + + Raises + ------- + Forbidden + You do not have proper permissions to get the information. + HTTPException + An error occurred while fetching the information. + + Returns + -------- + list + A list of :class:`User` that have been banned. + """ + + data = yield from self.http.get_bans(server.id) + return [User(**user['user']) for user in data] + + @asyncio.coroutine + def prune_members(self, server, *, days): + """|coro| + + Prunes a :class:`Server` from its inactive members. + + The inactive members are denoted if they have not logged on in + ``days`` number of days and they have no roles. + + You must have the "Kick Members" permission to use this. + + To check how many members you would prune without actually pruning, + see the :meth:`estimate_pruned_members` function. + + Parameters + ----------- + server: :class:`Server` + The server to prune from. + days: int + The number of days before counting as inactive. + + Raises + ------- + Forbidden + You do not have permissions to prune members. + HTTPException + An error occurred while pruning members. + InvalidArgument + An integer was not passed for ``days``. + + Returns + --------- + int + The number of members pruned. + """ + + if not isinstance(days, int): + raise InvalidArgument('Expected int for ``days``, received {0.__class__.__name__} instead.'.format(days)) + + data = yield from self.http.prune_members(server.id, days) + return data['pruned'] + + @asyncio.coroutine + def estimate_pruned_members(self, server, *, days): + """|coro| + + Similar to :meth:`prune_members` except instead of actually + pruning members, it returns how many members it would prune + from the server had it been called. + + Parameters + ----------- + server: :class:`Server` + The server to estimate a prune from. + days: int + The number of days before counting as inactive. + + Raises + ------- + Forbidden + You do not have permissions to prune members. + HTTPException + An error occurred while fetching the prune members estimate. + InvalidArgument + An integer was not passed for ``days``. + + Returns + --------- + int + The number of members estimated to be pruned. + """ + + if not isinstance(days, int): + raise InvalidArgument('Expected int for ``days``, received {0.__class__.__name__} instead.'.format(days)) + + data = yield from self.http.estimate_pruned_members(server.id, days) + return data['pruned'] + + @asyncio.coroutine + def create_custom_emoji(self, server, *, name, image): + """|coro| + + Creates a custom :class:`Emoji` for a :class:`Server`. + + This endpoint is only allowed for user bots or white listed + bots. If this is done by a user bot then this is a local + emoji that can only be used inside that server. + + There is currently a limit of 50 local emotes per server. + + Parameters + ----------- + server: :class:`Server` + The server to add the emoji to. + name: str + The emoji name. Must be at least 2 characters. + image: bytes + The *bytes-like* object representing the image data to use. + Only JPG and PNG images are supported. + + Returns + -------- + :class:`Emoji` + The created emoji. + + Raises + ------- + Forbidden + You are not allowed to create emojis. + HTTPException + An error occurred creating an emoji. + """ + + img = utils._bytes_to_base64_data(image) + data = yield from self.http.create_custom_emoji(server.id, name, img) + return Emoji(server=server, **data) + + @asyncio.coroutine + def delete_custom_emoji(self, emoji): + """|coro| + + Deletes a custom :class:`Emoji` from a :class:`Server`. + + This follows the same rules as :meth:`create_custom_emoji`. + + Parameters + ----------- + emoji: :class:`Emoji` + The emoji to delete. + + Raises + ------- + Forbidden + You are not allowed to delete emojis. + HTTPException + An error occurred deleting the emoji. + """ + + yield from self.http.delete_custom_emoji(emoji.server.id, emoji.id) + + @asyncio.coroutine + def edit_custom_emoji(self, emoji, *, name): + """|coro| + + Edits a :class:`Emoji`. + + Parameters + ----------- + emoji: :class:`Emoji` + The emoji to edit. + name: str + The new emoji name. + + Raises + ------- + Forbidden + You are not allowed to edit emojis. + HTTPException + An error occurred editing the emoji. + """ + + yield from self.http.edit_custom_emoji(emoji.server.id, emoji.id, name=name) + + + # Invite management + + def _fill_invite_data(self, data): + server = self.connection._get_server(data['guild']['id']) + if server is not None: + ch_id = data['channel']['id'] + channel = server.get_channel(ch_id) + else: + server = Object(id=data['guild']['id']) + server.name = data['guild']['name'] + channel = Object(id=data['channel']['id']) + channel.name = data['channel']['name'] + data['server'] = server + data['channel'] = channel + + @asyncio.coroutine + def create_invite(self, destination, **options): + """|coro| + + Creates an invite for the destination which could be either a + :class:`Server` or :class:`Channel`. + + Parameters + ------------ + destination + The :class:`Server` or :class:`Channel` to create the invite to. + max_age : int + How long the invite should last. If it's 0 then the invite + doesn't expire. Defaults to 0. + max_uses : int + How many uses the invite could be used for. If it's 0 then there + are unlimited uses. Defaults to 0. + temporary : bool + Denotes that the invite grants temporary membership + (i.e. they get kicked after they disconnect). Defaults to False. + unique: bool + Indicates if a unique invite URL should be created. Defaults to True. + If this is set to False then it will return a previously created + invite. + + Raises + ------- + HTTPException + Invite creation failed. + + Returns + -------- + :class:`Invite` + The invite that was created. + """ + + data = yield from self.http.create_invite(destination.id, **options) + self._fill_invite_data(data) + return Invite(**data) + + @asyncio.coroutine + def get_invite(self, url): + """|coro| + + Gets a :class:`Invite` from a discord.gg URL or ID. + + Note + ------ + If the invite is for a server you have not joined, the server and channel + attributes of the returned invite will be :class:`Object` with the names + patched in. + + Parameters + ----------- + url : str + The discord invite ID or URL (must be a discord.gg URL). + + Raises + ------- + NotFound + The invite has expired or is invalid. + HTTPException + Getting the invite failed. + + Returns + -------- + :class:`Invite` + The invite from the URL/ID. + """ + + invite_id = self._resolve_invite(url) + data = yield from self.http.get_invite(invite_id) + self._fill_invite_data(data) + return Invite(**data) + + @asyncio.coroutine + def invites_from(self, server): + """|coro| + + Returns a list of all active instant invites from a :class:`Server`. + + You must have proper permissions to get this information. + + Parameters + ---------- + server : :class:`Server` + The server to get invites from. + + Raises + ------- + Forbidden + You do not have proper permissions to get the information. + HTTPException + An error occurred while fetching the information. + + Returns + ------- + list of :class:`Invite` + The list of invites that are currently active. + """ + + data = yield from self.http.invites_from(server.id) + result = [] + for invite in data: + channel = server.get_channel(invite['channel']['id']) + invite['channel'] = channel + invite['server'] = server + result.append(Invite(**invite)) + + return result + + @asyncio.coroutine + def accept_invite(self, invite): + """|coro| + + Accepts an :class:`Invite`, URL or ID to an invite. + + The URL must be a discord.gg URL. e.g. "http://discord.gg/codehere". + An ID for the invite is just the "codehere" portion of the invite URL. + + Parameters + ----------- + invite + The :class:`Invite` or URL to an invite to accept. + + Raises + ------- + HTTPException + Accepting the invite failed. + NotFound + The invite is invalid or expired. + Forbidden + You are a bot user and cannot use this endpoint. + """ + + invite_id = self._resolve_invite(invite) + yield from self.http.accept_invite(invite_id) + + @asyncio.coroutine + def delete_invite(self, invite): + """|coro| + + Revokes an :class:`Invite`, URL, or ID to an invite. + + The ``invite`` parameter follows the same rules as + :meth:`accept_invite`. + + Parameters + ---------- + invite + The invite to revoke. + + Raises + ------- + Forbidden + You do not have permissions to revoke invites. + NotFound + The invite is invalid or expired. + HTTPException + Revoking the invite failed. + """ + + invite_id = self._resolve_invite(invite) + yield from self.http.delete_invite(invite_id) + + # Role management + + @asyncio.coroutine + def move_role(self, server, role, position): + """|coro| + + Moves the specified :class:`Role` to the given position in the :class:`Server`. + + The :class:`Role` object is not directly modified afterwards until the + corresponding WebSocket event is received. + + Parameters + ----------- + server : :class:`Server` + The server the role belongs to. + role : :class:`Role` + The role to edit. + position : int + The position to insert the role to. + + Raises + ------- + InvalidArgument + If position is 0, or role is server.default_role + Forbidden + You do not have permissions to change role order. + HTTPException + If moving the role failed, or you are of too low rank to move the role. + """ + + if position == 0: + raise InvalidArgument("Cannot move role to position 0") + + if role == server.default_role: + raise InvalidArgument("Cannot move default role") + + if role.position == position: + return # Save discord the extra request. + + change_range = range(min(role.position, position), max(role.position, position) + 1) + + roles = [r.id for r in sorted(filter(lambda x: (x.position in change_range) and x != role, server.roles), key=lambda x: x.position)] + + if role.position > position: + roles.insert(0, role.id) + else: + roles.append(role.id) + + payload = [{"id": z[0], "position": z[1]} for z in zip(roles, change_range)] + yield from self.http.move_role_position(server.id, payload) + + @asyncio.coroutine + def edit_role(self, server, role, **fields): + """|coro| + + Edits the specified :class:`Role` for the entire :class:`Server`. + + The :class:`Role` object is not directly modified afterwards until the + corresponding WebSocket event is received. + + All fields except ``server`` and ``role`` are optional. To change + the position of a role, use :func:`move_role` instead. + + .. versionchanged:: 0.8.0 + Editing now uses keyword arguments instead of editing the :class:`Role` object directly. + + Parameters + ----------- + server : :class:`Server` + The server the role belongs to. + role : :class:`Role` + The role to edit. + name : str + The new role name to change to. + permissions : :class:`Permissions` + The new permissions to change to. + colour : :class:`Colour` + The new colour to change to. (aliased to color as well) + hoist : bool + Indicates if the role should be shown separately in the online list. + mentionable : bool + Indicates if the role should be mentionable by others. + + Raises + ------- + Forbidden + You do not have permissions to change the role. + HTTPException + Editing the role failed. + """ + + colour = fields.get('colour') + if colour is None: + colour = fields.get('color', role.colour) + + payload = { + 'name': fields.get('name', role.name), + 'permissions': fields.get('permissions', role.permissions).value, + 'color': colour.value, + 'hoist': fields.get('hoist', role.hoist), + 'mentionable': fields.get('mentionable', role.mentionable) + } + + yield from self.http.edit_role(server.id, role.id, **payload) + + @asyncio.coroutine + def delete_role(self, server, role): + """|coro| + + Deletes the specified :class:`Role` for the entire :class:`Server`. + + Parameters + ----------- + server : :class:`Server` + The server the role belongs to. + role : :class:`Role` + The role to delete. + + Raises + -------- + Forbidden + You do not have permissions to delete the role. + HTTPException + Deleting the role failed. + """ + + yield from self.http.delete_role(server.id, role.id) + + @asyncio.coroutine + def _replace_roles(self, member, roles): + yield from self.http.replace_roles(member.id, member.server.id, roles) + + @asyncio.coroutine + def add_roles(self, member, *roles): + """|coro| + + Gives the specified :class:`Member` a number of :class:`Role` s. + + You must have the proper permissions to use this function. + + The :class:`Member` object is not directly modified afterwards until the + corresponding WebSocket event is received. + + Parameters + ----------- + member : :class:`Member` + The member to give roles to. + \*roles + An argument list of :class:`Role` s to give the member. + + Raises + ------- + Forbidden + You do not have permissions to add roles. + HTTPException + Adding roles failed. + """ + + new_roles = utils._unique(role.id for role in itertools.chain(member.roles, roles)) + yield from self._replace_roles(member, new_roles) + + @asyncio.coroutine + def remove_roles(self, member, *roles): + """|coro| + + Removes the :class:`Role` s from the :class:`Member`. + + You must have the proper permissions to use this function. + + The :class:`Member` object is not directly modified afterwards until the + corresponding WebSocket event is received. + + Parameters + ----------- + member : :class:`Member` + The member to revoke roles from. + \*roles + An argument list of :class:`Role` s to revoke the member. + + Raises + ------- + Forbidden + You do not have permissions to revoke roles. + HTTPException + Removing roles failed. + """ + new_roles = [x.id for x in member.roles] + for role in roles: + try: + new_roles.remove(role.id) + except ValueError: + pass + + yield from self._replace_roles(member, new_roles) + + @asyncio.coroutine + def replace_roles(self, member, *roles): + """|coro| + + Replaces the :class:`Member`'s roles. + + You must have the proper permissions to use this function. + + This function **replaces** all roles that the member has. + For example if the member has roles ``[a, b, c]`` and the + call is ``client.replace_roles(member, d, e, c)`` then + the member has the roles ``[d, e, c]``. + + The :class:`Member` object is not directly modified afterwards until the + corresponding WebSocket event is received. + + Parameters + ----------- + member : :class:`Member` + The member to replace roles from. + \*roles + An argument list of :class:`Role` s to replace the roles with. + + Raises + ------- + Forbidden + You do not have permissions to revoke roles. + HTTPException + Removing roles failed. + """ + + new_roles = utils._unique(role.id for role in roles) + yield from self._replace_roles(member, new_roles) + + @asyncio.coroutine + def create_role(self, server, **fields): + """|coro| + + Creates a :class:`Role`. + + This function is similar to :class:`edit_role` in both + the fields taken and exceptions thrown. + + Returns + -------- + :class:`Role` + The newly created role. This not the same role that + is stored in cache. + """ + + data = yield from self.http.create_role(server.id) + role = Role(server=server, **data) + + # we have to call edit because you can't pass a payload to the + # http request currently. + yield from self.edit_role(server, role, **fields) + return role + + @asyncio.coroutine + def edit_channel_permissions(self, channel, target, overwrite=None): + """|coro| + + Sets the channel specific permission overwrites for a target in the + specified :class:`Channel`. + + The ``target`` parameter should either be a :class:`Member` or a + :class:`Role` that belongs to the channel's server. + + You must have the proper permissions to do this. + + Examples + ---------- + + Setting allow and deny: :: + + overwrite = discord.PermissionOverwrite() + overwrite.read_messages = True + overwrite.ban_members = False + await client.edit_channel_permissions(message.channel, message.author, overwrite) + + Parameters + ----------- + channel : :class:`Channel` + The channel to give the specific permissions for. + target + The :class:`Member` or :class:`Role` to overwrite permissions for. + overwrite: :class:`PermissionOverwrite` + The permissions to allow and deny to the target. + + Raises + ------- + Forbidden + You do not have permissions to edit channel specific permissions. + NotFound + The channel specified was not found. + HTTPException + Editing channel specific permissions failed. + InvalidArgument + The overwrite parameter was not of type :class:`PermissionOverwrite` + or the target type was not :class:`Role` or :class:`Member`. + """ + + overwrite = PermissionOverwrite() if overwrite is None else overwrite + + + if not isinstance(overwrite, PermissionOverwrite): + raise InvalidArgument('allow and deny parameters must be PermissionOverwrite') + + allow, deny = overwrite.pair() + + if isinstance(target, Member): + perm_type = 'member' + elif isinstance(target, Role): + perm_type = 'role' + else: + raise InvalidArgument('target parameter must be either Member or Role') + + yield from self.http.edit_channel_permissions(channel.id, target.id, allow.value, deny.value, perm_type) + + @asyncio.coroutine + def delete_channel_permissions(self, channel, target): + """|coro| + + Removes a channel specific permission overwrites for a target + in the specified :class:`Channel`. + + The target parameter follows the same rules as :meth:`edit_channel_permissions`. + + You must have the proper permissions to do this. + + Parameters + ---------- + channel : :class:`Channel` + The channel to give the specific permissions for. + target + The :class:`Member` or :class:`Role` to overwrite permissions for. + + Raises + ------ + Forbidden + You do not have permissions to delete channel specific permissions. + NotFound + The channel specified was not found. + HTTPException + Deleting channel specific permissions failed. + """ + yield from self.http.delete_channel_permissions(channel.id, target.id) + + # Voice management + + @asyncio.coroutine + def move_member(self, member, channel): + """|coro| + + Moves a :class:`Member` to a different voice channel. + + You must have proper permissions to do this. + + Note + ----- + You cannot pass in a :class:`Object` instead of a :class:`Channel` + object in this function. + + Parameters + ----------- + member : :class:`Member` + The member to move to another voice channel. + channel : :class:`Channel` + The voice channel to move the member to. + + Raises + ------- + InvalidArgument + The channel provided is not a voice channel. + HTTPException + Moving the member failed. + Forbidden + You do not have permissions to move the member. + """ + + if getattr(channel, 'type', ChannelType.text) != ChannelType.voice: + raise InvalidArgument('The channel provided must be a voice channel.') + + yield from self.http.move_member(member.id, member.server.id, channel.id) + + @asyncio.coroutine + def join_voice_channel(self, channel): + """|coro| + + Joins a voice channel and creates a :class:`VoiceClient` to + establish your connection to the voice server. + + After this function is successfully called, :attr:`voice` is + set to the returned :class:`VoiceClient`. + + Parameters + ---------- + channel : :class:`Channel` + The voice channel to join to. + + Raises + ------- + InvalidArgument + The channel was not a voice channel. + asyncio.TimeoutError + Could not connect to the voice channel in time. + ClientException + You are already connected to a voice channel. + OpusNotLoaded + The opus library has not been loaded. + + Returns + ------- + :class:`VoiceClient` + A voice client that is fully connected to the voice server. + """ + if isinstance(channel, Object): + channel = self.get_channel(channel.id) + + if getattr(channel, 'type', ChannelType.text) != ChannelType.voice: + raise InvalidArgument('Channel passed must be a voice channel') + + server = channel.server + + if self.is_voice_connected(server): + raise ClientException('Already connected to a voice channel in this server') + + log.info('attempting to join voice channel {0.name}'.format(channel)) + + def session_id_found(data): + user_id = data.get('user_id') + guild_id = data.get('guild_id') + return user_id == self.user.id and guild_id == server.id + + # register the futures for waiting + session_id_future = self.ws.wait_for('VOICE_STATE_UPDATE', session_id_found) + voice_data_future = self.ws.wait_for('VOICE_SERVER_UPDATE', lambda d: d.get('guild_id') == server.id) + + # request joining + yield from self.ws.voice_state(server.id, channel.id) + + try: + session_id_data = yield from asyncio.wait_for(session_id_future, timeout=10.0, loop=self.loop) + data = yield from asyncio.wait_for(voice_data_future, timeout=10.0, loop=self.loop) + except asyncio.TimeoutError as e: + yield from self.ws.voice_state(server.id, None, self_mute=True) + raise e + + kwargs = { + 'user': self.user, + 'channel': channel, + 'data': data, + 'loop': self.loop, + 'session_id': session_id_data.get('session_id'), + 'main_ws': self.ws + } + + voice = VoiceClient(**kwargs) + try: + yield from voice.connect() + except asyncio.TimeoutError as e: + try: + yield from voice.disconnect() + except: + # we don't care if disconnect failed because connection failed + pass + raise e # re-raise + + self.connection._add_voice_client(server.id, voice) + return voice + + def is_voice_connected(self, server): + """Indicates if we are currently connected to a voice channel in the + specified server. + + Parameters + ----------- + server : :class:`Server` + The server to query if we're connected to it. + """ + voice = self.voice_client_in(server) + return voice is not None + + def voice_client_in(self, server): + """Returns the voice client associated with a server. + + If no voice client is found then ``None`` is returned. + + Parameters + ----------- + server : :class:`Server` + The server to query if we have a voice client for. + + Returns + -------- + :class:`VoiceClient` + The voice client associated with the server. + """ + return self.connection._get_voice_client(server.id) + + def group_call_in(self, channel): + """Returns the :class:`GroupCall` associated with a private channel. + + If no group call is found then ``None`` is returned. + + Parameters + ----------- + channel: :class:`PrivateChannel` + The group private channel to query the group call for. + + Returns + -------- + Optional[:class:`GroupCall`] + The group call. + """ + return self.connection._calls.get(channel.id) + + # Miscellaneous stuff + + @asyncio.coroutine + def application_info(self): + """|coro| + + Retrieve's the bot's application information. + + Returns + -------- + :class:`AppInfo` + A namedtuple representing the application info. + + Raises + ------- + HTTPException + Retrieving the information failed somehow. + """ + data = yield from self.http.application_info() + return AppInfo(id=data['id'], name=data['name'], + description=data['description'], icon=data['icon'], + owner=User(**data['owner'])) + + @asyncio.coroutine + def get_user_info(self, user_id): + """|coro| + + Retrieves a :class:`User` based on their ID. This can only + be used by bot accounts. You do not have to share any servers + with the user to get this information, however many operations + do require that you do. + + Parameters + ----------- + user_id: str + The user's ID to fetch from. + + Returns + -------- + :class:`User` + The user you requested. + + Raises + ------- + NotFound + A user with this ID does not exist. + HTTPException + Fetching the user failed. + """ + data = yield from self.http.get_user_info(user_id) + return User(**data) diff --git a/RBXLegacyDiscordBot/lib/discord/colour.py b/RBXLegacyDiscordBot/lib/discord/colour.py new file mode 100644 index 0000000..76b8bf0 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/colour.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +class Colour: + """Represents a Discord role colour. This class is similar + to an (red, green, blue) tuple. + + There is an alias for this called Color. + + Supported operations: + + +-----------+----------------------------------------+ + | Operation | Description | + +===========+========================================+ + | x == y | Checks if two colours are equal. | + +-----------+----------------------------------------+ + | x != y | Checks if two colours are not equal. | + +-----------+----------------------------------------+ + | hash(x) | Return the colour's hash. | + +-----------+----------------------------------------+ + | str(x) | Returns the hex format for the colour. | + +-----------+----------------------------------------+ + + Attributes + ------------ + value : int + The raw integer colour value. + """ + + __slots__ = [ 'value' ] + + def __init__(self, value): + self.value = value + + def _get_byte(self, byte): + return (self.value >> (8 * byte)) & 0xff + + def __eq__(self, other): + return isinstance(other, Colour) and self.value == other.value + + def __ne__(self, other): + return not self.__eq__(other) + + def __str__(self): + return '#{:0>6x}'.format(self.value) + + def __hash__(self): + return hash(self.value) + + @property + def r(self): + """Returns the red component of the colour.""" + return self._get_byte(2) + + @property + def g(self): + """Returns the green component of the colour.""" + return self._get_byte(1) + + @property + def b(self): + """Returns the blue component of the colour.""" + return self._get_byte(0) + + def to_tuple(self): + """Returns an (r, g, b) tuple representing the colour.""" + return (self.r, self.g, self.b) + + @classmethod + def default(cls): + """A factory method that returns a :class:`Colour` with a value of 0.""" + return cls(0) + + @classmethod + def teal(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x1abc9c``.""" + return cls(0x1abc9c) + + @classmethod + def dark_teal(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x11806a``.""" + return cls(0x11806a) + + @classmethod + def green(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x2ecc71``.""" + return cls(0x2ecc71) + + @classmethod + def dark_green(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x1f8b4c``.""" + return cls(0x1f8b4c) + + @classmethod + def blue(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x3498db``.""" + return cls(0x3498db) + + @classmethod + def dark_blue(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x206694``.""" + return cls(0x206694) + + @classmethod + def purple(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x9b59b6``.""" + return cls(0x9b59b6) + + @classmethod + def dark_purple(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x71368a``.""" + return cls(0x71368a) + + @classmethod + def magenta(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xe91e63``.""" + return cls(0xe91e63) + + @classmethod + def dark_magenta(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xad1457``.""" + return cls(0xad1457) + + @classmethod + def gold(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xf1c40f``.""" + return cls(0xf1c40f) + + @classmethod + def dark_gold(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xc27c0e``.""" + return cls(0xc27c0e) + + @classmethod + def orange(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xe67e22``.""" + return cls(0xe67e22) + + @classmethod + def dark_orange(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xa84300``.""" + return cls(0xa84300) + + @classmethod + def red(cls): + """A factory method that returns a :class:`Colour` with a value of ``0xe74c3c``.""" + return cls(0xe74c3c) + + @classmethod + def dark_red(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x992d22``.""" + return cls(0x992d22) + + @classmethod + def lighter_grey(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x95a5a6``.""" + return cls(0x95a5a6) + + @classmethod + def dark_grey(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x607d8b``.""" + return cls(0x607d8b) + + @classmethod + def light_grey(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x979c9f``.""" + return cls(0x979c9f) + + @classmethod + def darker_grey(cls): + """A factory method that returns a :class:`Colour` with a value of ``0x546e7a``.""" + return cls(0x546e7a) + + +Color = Colour diff --git a/RBXLegacyDiscordBot/lib/discord/compat.py b/RBXLegacyDiscordBot/lib/discord/compat.py new file mode 100644 index 0000000..2cb2ec9 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/compat.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import concurrent.futures +import asyncio + +try: + create_task = asyncio.ensure_future +except AttributeError: + create_task = asyncio.async + +try: + run_coroutine_threadsafe = asyncio.run_coroutine_threadsafe +except AttributeError: + # the following code is slightly modified from the + # official asyncio repository that could be found here: + # https://github.com/python/asyncio/blob/master/asyncio/futures.py + # with a commit hash of 5c7efbcdfbe6a5c25b4cd5df22d9a15ab4062c8e + # this portion is licensed under Apache license 2.0 + + def _set_concurrent_future_state(concurrent, source): + """Copy state from a future to a concurrent.futures.Future.""" + assert source.done() + if source.cancelled(): + concurrent.cancel() + if not concurrent.set_running_or_notify_cancel(): + return + exception = source.exception() + if exception is not None: + concurrent.set_exception(exception) + else: + result = source.result() + concurrent.set_result(result) + + + def _copy_future_state(source, dest): + """Internal helper to copy state from another Future. + The other Future may be a concurrent.futures.Future. + """ + assert source.done() + if dest.cancelled(): + return + assert not dest.done() + if source.cancelled(): + dest.cancel() + else: + exception = source.exception() + if exception is not None: + dest.set_exception(exception) + else: + result = source.result() + dest.set_result(result) + + def _chain_future(source, destination): + """Chain two futures so that when one completes, so does the other. + The result (or exception) of source will be copied to destination. + If destination is cancelled, source gets cancelled too. + Compatible with both asyncio.Future and concurrent.futures.Future. + """ + if not isinstance(source, (asyncio.Future, concurrent.futures.Future)): + raise TypeError('A future is required for source argument') + + if not isinstance(destination, (asyncio.Future, concurrent.futures.Future)): + raise TypeError('A future is required for destination argument') + + source_loop = source._loop if isinstance(source, asyncio.Future) else None + dest_loop = destination._loop if isinstance(destination, asyncio.Future) else None + + def _set_state(future, other): + if isinstance(future, asyncio.Future): + _copy_future_state(other, future) + else: + _set_concurrent_future_state(future, other) + + def _call_check_cancel(destination): + if destination.cancelled(): + if source_loop is None or source_loop is dest_loop: + source.cancel() + else: + source_loop.call_soon_threadsafe(source.cancel) + + def _call_set_state(source): + if dest_loop is None or dest_loop is source_loop: + _set_state(destination, source) + else: + dest_loop.call_soon_threadsafe(_set_state, destination, source) + + destination.add_done_callback(_call_check_cancel) + source.add_done_callback(_call_set_state) + + def run_coroutine_threadsafe(coro, loop): + """Submit a coroutine object to a given event loop. + + Return a concurrent.futures.Future to access the result. + """ + if not asyncio.iscoroutine(coro): + raise TypeError('A coroutine object is required') + + future = concurrent.futures.Future() + + def callback(): + try: + _chain_future(create_task(coro, loop=loop), future) + except Exception as exc: + if future.set_running_or_notify_cancel(): + future.set_exception(exc) + raise + loop.call_soon_threadsafe(callback) + return future diff --git a/RBXLegacyDiscordBot/lib/discord/embeds.py b/RBXLegacyDiscordBot/lib/discord/embeds.py new file mode 100644 index 0000000..e47b814 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/embeds.py @@ -0,0 +1,475 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import datetime + +from .colour import Colour +from . import utils + +class _EmptyEmbed: + def __bool__(self): + return False + + def __repr__(self): + return 'Embed.Empty' + +EmptyEmbed = _EmptyEmbed() + +class EmbedProxy: + def __init__(self, layer): + self.__dict__.update(layer) + + def __repr__(self): + return 'EmbedProxy(%s)' % ', '.join(('%s=%r' % (k, v) for k, v in self.__dict__.items() if not k.startswith('_'))) + + def __getattr__(self, attr): + return EmptyEmbed + +class Embed: + """Represents a Discord embed. + + The following attributes can be set during creation + of the object: + + Certain properties return an ``EmbedProxy``. Which is a type + that acts similar to a regular `dict` except access the attributes + via dotted access, e.g. ``embed.author.icon_url``. If the attribute + is invalid or empty, then a special sentinel value is returned, + :attr:`Embed.Empty`. + + For ease of use, all parameters that expect a ``str`` are implicitly + casted to ``str`` for you. + + Attributes + ----------- + title: str + The title of the embed. + type: str + The type of embed. Usually "rich". + description: str + The description of the embed. + url: str + The URL of the embed. + timestamp: `datetime.datetime` + The timestamp of the embed content. + colour: :class:`Colour` or int + The colour code of the embed. Aliased to ``color`` as well. + Empty + A special sentinel value used by ``EmbedProxy`` and this class + to denote that the value or attribute is empty. + """ + + __slots__ = ('title', 'url', 'type', '_timestamp', '_colour', '_footer', + '_image', '_thumbnail', '_video', '_provider', '_author', + '_fields', 'description') + + Empty = EmptyEmbed + + def __init__(self, **kwargs): + # swap the colour/color aliases + try: + colour = kwargs['colour'] + except KeyError: + colour = kwargs.get('color', EmptyEmbed) + + self.colour = colour + self.title = kwargs.get('title', EmptyEmbed) + self.type = kwargs.get('type', 'rich') + self.url = kwargs.get('url', EmptyEmbed) + self.description = kwargs.get('description', EmptyEmbed) + + try: + timestamp = kwargs['timestamp'] + except KeyError: + pass + else: + self.timestamp = timestamp + + @classmethod + def from_data(cls, data): + # we are bypassing __init__ here since it doesn't apply here + self = cls.__new__(cls) + + # fill in the basic fields + + self.title = data.get('title', EmptyEmbed) + self.type = data.get('type', EmptyEmbed) + self.description = data.get('description', EmptyEmbed) + self.url = data.get('url', EmptyEmbed) + + # try to fill in the more rich fields + + try: + self._colour = Colour(value=data['color']) + except KeyError: + pass + + try: + self._timestamp = utils.parse_time(data['timestamp']) + except KeyError: + pass + + for attr in ('thumbnail', 'video', 'provider', 'author', 'fields'): + try: + value = data[attr] + except KeyError: + continue + else: + setattr(self, '_' + attr, value) + + return self + + @property + def colour(self): + return getattr(self, '_colour', EmptyEmbed) + + @colour.setter + def colour(self, value): + if isinstance(value, (Colour, _EmptyEmbed)): + self._colour = value + elif isinstance(value, int): + self._colour = Colour(value=value) + else: + raise TypeError('Expected discord.Colour, int, or Embed.Empty but received %s instead.' % value.__class__.__name__) + + color = colour + + @property + def timestamp(self): + return getattr(self, '_timestamp', EmptyEmbed) + + @timestamp.setter + def timestamp(self, value): + if isinstance(value, (datetime.datetime, _EmptyEmbed)): + self._timestamp = value + else: + raise TypeError("Expected datetime.datetime or Embed.Empty received %s instead" % value.__class__.__name__) + + @property + def footer(self): + """Returns a ``EmbedProxy`` denoting the footer contents. + + See :meth:`set_footer` for possible values you can access. + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_footer', {})) + + def set_footer(self, *, text=EmptyEmbed, icon_url=EmptyEmbed): + """Sets the footer for the embed content. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + text: str + The footer text. + icon_url: str + The URL of the footer icon. Only HTTP(S) is supported. + """ + + self._footer = {} + if text is not EmptyEmbed: + self._footer['text'] = str(text) + + if icon_url is not EmptyEmbed: + self._footer['icon_url'] = str(icon_url) + + return self + + @property + def image(self): + """Returns a ``EmbedProxy`` denoting the image contents. + + Possible attributes you can access are: + + - ``url`` + - ``proxy_url`` + - ``width`` + - ``height`` + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_image', {})) + + def set_image(self, *, url): + """Sets the image for the embed content. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + url: str + The source URL for the image. Only HTTP(S) is supported. + """ + + self._image = { + 'url': str(url) + } + + return self + + @property + def thumbnail(self): + """Returns a ``EmbedProxy`` denoting the thumbnail contents. + + Possible attributes you can access are: + + - ``url`` + - ``proxy_url`` + - ``width`` + - ``height`` + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_thumbnail', {})) + + def set_thumbnail(self, *, url): + """Sets the thumbnail for the embed content. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + url: str + The source URL for the thumbnail. Only HTTP(S) is supported. + """ + + self._thumbnail = { + 'url': str(url) + } + + return self + + @property + def video(self): + """Returns a ``EmbedProxy`` denoting the video contents. + + Possible attributes include: + + - ``url`` for the video URL. + - ``height`` for the video height. + - ``width`` for the video width. + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_video', {})) + + @property + def provider(self): + """Returns a ``EmbedProxy`` denoting the provider contents. + + The only attributes that might be accessed are ``name`` and ``url``. + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_provider', {})) + + @property + def author(self): + """Returns a ``EmbedProxy`` denoting the author contents. + + See :meth:`set_author` for possible values you can access. + + If the attribute has no value then :attr:`Empty` is returned. + """ + return EmbedProxy(getattr(self, '_author', {})) + + def set_author(self, *, name, url=EmptyEmbed, icon_url=EmptyEmbed): + """Sets the author for the embed content. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + name: str + The name of the author. + url: str + The URL for the author. + icon_url: str + The URL of the author icon. Only HTTP(S) is supported. + """ + + self._author = { + 'name': str(name) + } + + if url is not EmptyEmbed: + self._author['url'] = str(url) + + if icon_url is not EmptyEmbed: + self._author['icon_url'] = str(icon_url) + + return self + + @property + def fields(self): + """Returns a list of ``EmbedProxy`` denoting the field contents. + + See :meth:`add_field` for possible values you can access. + + If the attribute has no value then :attr:`Empty` is returned. + """ + return [EmbedProxy(d) for d in getattr(self, '_fields', [])] + + def add_field(self, *, name, value, inline=True): + """Adds a field to the embed object. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + name: str + The name of the field. + value: str + The value of the field. + inline: bool + Whether the field should be displayed inline. + """ + + field = { + 'inline': inline, + 'name': str(name), + 'value': str(value) + } + + try: + self._fields.append(field) + except AttributeError: + self._fields = [field] + + return self + + def clear_fields(self): + """Removes all fields from this embed.""" + try: + self._fields.clear() + except AttributeError: + self._fields = [] + + def remove_field(self, index): + """Removes a field at a specified index. + + If the index is invalid or out of bounds then the error is + silently swallowed. + + .. note:: + + When deleting a field by index, the index of the other fields + shift to fill the gap just like a regular list. + + Parameters + ----------- + index: int + The index of the field to remove. + """ + try: + del self._fields[index] + except (AttributeError, IndexError): + pass + + def set_field_at(self, index, *, name, value, inline=True): + """Modifies a field to the embed object. + + The index must point to a valid pre-existing field. + + This function returns the class instance to allow for fluent-style + chaining. + + Parameters + ----------- + index: int + The index of the field to modify. + name: str + The name of the field. + value: str + The value of the field. + inline: bool + Whether the field should be displayed inline. + + Raises + ------- + IndexError + An invalid index was provided. + """ + + try: + field = self._fields[index] + except (TypeError, IndexError, AttributeError): + raise IndexError('field index out of range') + + field['name'] = str(name) + field['value'] = str(value) + field['inline'] = inline + return self + + def to_dict(self): + """Converts this embed object into a dict.""" + + # add in the raw data into the dict + result = { + key[1:]: getattr(self, key) + for key in self.__slots__ + if key[0] == '_' and hasattr(self, key) + } + + # deal with basic convenience wrappers + + try: + colour = result.pop('colour') + except KeyError: + pass + else: + if colour: + result['color'] = colour.value + + try: + timestamp = result.pop('timestamp') + except KeyError: + pass + else: + if timestamp: + result['timestamp'] = timestamp.isoformat() + + # add in the non raw attribute ones + if self.type: + result['type'] = self.type + + if self.description: + result['description'] = self.description + + if self.url: + result['url'] = self.url + + if self.title: + result['title'] = self.title + + return result diff --git a/RBXLegacyDiscordBot/lib/discord/emoji.py b/RBXLegacyDiscordBot/lib/discord/emoji.py new file mode 100644 index 0000000..82384aa --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/emoji.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from . import utils +from .mixins import Hashable + +class Emoji(Hashable): + """Represents a custom emoji. + + Depending on the way this object was created, some of the attributes can + have a value of ``None``. + + Supported Operations: + + +-----------+-----------------------------------------+ + | Operation | Description | + +===========+=========================================+ + | x == y | Checks if two emoji are the same. | + +-----------+-----------------------------------------+ + | x != y | Checks if two emoji are not the same. | + +-----------+-----------------------------------------+ + | hash(x) | Return the emoji's hash. | + +-----------+-----------------------------------------+ + | iter(x) | Returns an iterator of (field, value) | + | | pairs. This allows this class to be | + | | used as an iterable in list/dict/etc. | + | | constructions. | + +-----------+-----------------------------------------+ + | str(x) | Returns the emoji rendered for discord. | + +-----------+-----------------------------------------+ + + Attributes + ----------- + name : str + The name of the emoji. + id : str + The emoji's ID. + require_colons : bool + If colons are required to use this emoji in the client (:PJSalt: vs PJSalt). + managed : bool + If this emoji is managed by a Twitch integration. + server : :class:`Server` + The server the emoji belongs to. + roles : List[:class:`Role`] + A list of :class:`Role` that is allowed to use this emoji. If roles is empty, + the emoji is unrestricted. + """ + __slots__ = ["require_colons", "managed", "id", "name", "roles", 'server'] + + def __init__(self, **kwargs): + self.server = kwargs.pop('server') + self._from_data(kwargs) + + def _from_data(self, emoji): + self.require_colons = emoji.get('require_colons') + self.managed = emoji.get('managed') + self.id = emoji.get('id') + self.name = emoji.get('name') + self.roles = emoji.get('roles', []) + if self.roles: + roles = set(self.roles) + self.roles = [role for role in self.server.roles if role.id in roles] + + def _iterator(self): + for attr in self.__slots__: + value = getattr(self, attr, None) + if value is not None: + yield (attr, value) + + def __iter__(self): + return self._iterator() + + def __str__(self): + return "<:{0.name}:{0.id}>".format(self) + + @property + def created_at(self): + """Returns the emoji's creation time in UTC.""" + return utils.snowflake_time(self.id) + + @property + def url(self): + """Returns a URL version of the emoji.""" + return "https://discordapp.com/api/emojis/{0.id}.png".format(self) diff --git a/RBXLegacyDiscordBot/lib/discord/enums.py b/RBXLegacyDiscordBot/lib/discord/enums.py new file mode 100644 index 0000000..e8dc62d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/enums.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from enum import Enum + +class ChannelType(Enum): + text = 0 + private = 1 + voice = 2 + group = 3 + + def __str__(self): + return self.name + +class MessageType(Enum): + default = 0 + recipient_add = 1 + recipient_remove = 2 + call = 3 + channel_name_change = 4 + channel_icon_change = 5 + pins_add = 6 + +class ServerRegion(Enum): + us_west = 'us-west' + us_east = 'us-east' + us_south = 'us-south' + us_central = 'us-central' + eu_west = 'eu-west' + eu_central = 'eu-central' + singapore = 'singapore' + london = 'london' + sydney = 'sydney' + amsterdam = 'amsterdam' + frankfurt = 'frankfurt' + brazil = 'brazil' + vip_us_east = 'vip-us-east' + vip_us_west = 'vip-us-west' + vip_amsterdam = 'vip-amsterdam' + + def __str__(self): + return self.value + +class VerificationLevel(Enum): + none = 0 + low = 1 + medium = 2 + high = 3 + table_flip = 3 + + def __str__(self): + return self.name + +class Status(Enum): + online = 'online' + offline = 'offline' + idle = 'idle' + dnd = 'dnd' + do_not_disturb = 'dnd' + invisible = 'invisible' + + def __str__(self): + return self.value + +class DefaultAvatar(Enum): + blurple = 0 + grey = 1 + gray = 1 + green = 2 + orange = 3 + red = 4 + + def __str__(self): + return self.name + +def try_enum(cls, val): + """A function that tries to turn the value into enum ``cls``. + + If it fails it returns the value instead. + """ + try: + return cls(val) + except ValueError: + return val diff --git a/RBXLegacyDiscordBot/lib/discord/errors.py b/RBXLegacyDiscordBot/lib/discord/errors.py new file mode 100644 index 0000000..46d5e94 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/errors.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +class DiscordException(Exception): + """Base exception class for discord.py + + Ideally speaking, this could be caught to handle any exceptions thrown from this library. + """ + pass + +class ClientException(DiscordException): + """Exception that's thrown when an operation in the :class:`Client` fails. + + These are usually for exceptions that happened due to user input. + """ + pass + +class GatewayNotFound(DiscordException): + """An exception that is usually thrown when the gateway hub + for the :class:`Client` websocket is not found.""" + def __init__(self): + message = 'The gateway to connect to discord was not found.' + super(GatewayNotFound, self).__init__(message) + +class HTTPException(DiscordException): + """Exception that's thrown when an HTTP request operation fails. + + .. attribute:: response + + The response of the failed HTTP request. This is an + instance of `aiohttp.ClientResponse`__. + + __ http://aiohttp.readthedocs.org/en/stable/client_reference.html#aiohttp.ClientResponse + + .. attribute:: text + + The text of the error. Could be an empty string. + """ + + def __init__(self, response, message): + self.response = response + if type(message) is dict: + self.text = message.get('message', '') + self.code = message.get('code', 0) + else: + self.text = message + + fmt = '{0.reason} (status code: {0.status})' + if len(self.text): + fmt = fmt + ': {1}' + + super().__init__(fmt.format(self.response, self.text)) + +class Forbidden(HTTPException): + """Exception that's thrown for when status code 403 occurs. + + Subclass of :exc:`HTTPException` + """ + pass + +class NotFound(HTTPException): + """Exception that's thrown for when status code 404 occurs. + + Subclass of :exc:`HTTPException` + """ + pass + + +class InvalidArgument(ClientException): + """Exception that's thrown when an argument to a function + is invalid some way (e.g. wrong value or wrong type). + + This could be considered the analogous of ``ValueError`` and + ``TypeError`` except derived from :exc:`ClientException` and thus + :exc:`DiscordException`. + """ + pass + +class LoginFailure(ClientException): + """Exception that's thrown when the :meth:`Client.login` function + fails to log you in from improper credentials or some other misc. + failure. + """ + pass + +class ConnectionClosed(ClientException): + """Exception that's thrown when the gateway connection is + closed for reasons that could not be handled internally. + + Attributes + ----------- + code : int + The close code of the websocket. + reason : str + The reason provided for the closure. + """ + def __init__(self, original): + # This exception is just the same exception except + # reconfigured to subclass ClientException for users + self.code = original.code + self.reason = original.reason + super().__init__(str(original)) diff --git a/RBXLegacyDiscordBot/lib/discord/ext/__init__.py b/RBXLegacyDiscordBot/lib/discord/ext/__init__.py new file mode 100644 index 0000000..af6a008 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/ext/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- + +""" +discord.py extensions +~~~~~~~~~~~~~~~~~~~~~~ + +Extensions for the discord.py library live in this namespace. + +:copyright: (c) 2016 Rapptz +:license: MIT, see LICENSE for more details. + +""" diff --git a/RBXLegacyDiscordBot/lib/discord/ext/commands/__init__.py b/RBXLegacyDiscordBot/lib/discord/ext/commands/__init__.py new file mode 100644 index 0000000..d3b64a2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/ext/commands/__init__.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- + +""" +discord.ext.commands +~~~~~~~~~~~~~~~~~~~~~ + +An extension module to facilitate creation of bot commands. + +:copyright: (c) 2016 Rapptz +:license: MIT, see LICENSE for more details. +""" + +from .bot import Bot, when_mentioned, when_mentioned_or +from .context import Context +from .core import * +from .errors import * +from .formatter import HelpFormatter, Paginator +from .converter import * +from .cooldowns import BucketType diff --git a/RBXLegacyDiscordBot/lib/discord/ext/commands/bot.py b/RBXLegacyDiscordBot/lib/discord/ext/commands/bot.py new file mode 100644 index 0000000..03bbd1c --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/ext/commands/bot.py @@ -0,0 +1,857 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +import discord +import inspect +import importlib +import sys +import traceback +import re + +from .core import GroupMixin, Command, command +from .view import StringView +from .context import Context +from .errors import CommandNotFound, CommandError +from .formatter import HelpFormatter + +def _get_variable(name): + stack = inspect.stack() + try: + for frames in stack: + try: + frame = frames[0] + current_locals = frame.f_locals + if name in current_locals: + return current_locals[name] + finally: + del frame + finally: + del stack + +def when_mentioned(bot, msg): + """A callable that implements a command prefix equivalent + to being mentioned, e.g. ``@bot ``.""" + server = msg.server + if server is not None: + return '{0.me.mention} '.format(server) + return '{0.user.mention} '.format(bot) + +def when_mentioned_or(*prefixes): + """A callable that implements when mentioned or other prefixes provided. + + Example + -------- + + .. code-block:: python + + bot = commands.Bot(command_prefix=commands.when_mentioned_or('!')) + + See Also + ---------- + :func:`when_mentioned` + """ + def inner(bot, msg): + r = list(prefixes) + r.append(when_mentioned(bot, msg)) + return r + + return inner + +_mentions_transforms = { + '@everyone': '@\u200beveryone', + '@here': '@\u200bhere' +} + +_mention_pattern = re.compile('|'.join(_mentions_transforms.keys())) + +@asyncio.coroutine +def _default_help_command(ctx, *commands : str): + """Shows this message.""" + bot = ctx.bot + destination = ctx.message.author if bot.pm_help else ctx.message.channel + + def repl(obj): + return _mentions_transforms.get(obj.group(0), '') + + # help by itself just lists our own commands. + if len(commands) == 0: + pages = bot.formatter.format_help_for(ctx, bot) + elif len(commands) == 1: + # try to see if it is a cog name + name = _mention_pattern.sub(repl, commands[0]) + command = None + if name in bot.cogs: + command = bot.cogs[name] + else: + command = bot.commands.get(name) + if command is None: + yield from bot.send_message(destination, bot.command_not_found.format(name)) + return + + pages = bot.formatter.format_help_for(ctx, command) + else: + name = _mention_pattern.sub(repl, commands[0]) + command = bot.commands.get(name) + if command is None: + yield from bot.send_message(destination, bot.command_not_found.format(name)) + return + + for key in commands[1:]: + try: + key = _mention_pattern.sub(repl, key) + command = command.commands.get(key) + if command is None: + yield from bot.send_message(destination, bot.command_not_found.format(key)) + return + except AttributeError: + yield from bot.send_message(destination, bot.command_has_no_subcommands.format(command, key)) + return + + pages = bot.formatter.format_help_for(ctx, command) + + if bot.pm_help is None: + characters = sum(map(lambda l: len(l), pages)) + # modify destination based on length of pages. + if characters > 1000: + destination = ctx.message.author + + for page in pages: + yield from bot.send_message(destination, page) + + +class Bot(GroupMixin, discord.Client): + """Represents a discord bot. + + This class is a subclass of :class:`discord.Client` and as a result + anything that you can do with a :class:`discord.Client` you can do with + this bot. + + This class also subclasses :class:`GroupMixin` to provide the functionality + to manage commands. + + Attributes + ----------- + command_prefix + The command prefix is what the message content must contain initially + to have a command invoked. This prefix could either be a string to + indicate what the prefix should be, or a callable that takes in the bot + as its first parameter and :class:`discord.Message` as its second + parameter and returns the prefix. This is to facilitate "dynamic" + command prefixes. This callable can be either a regular function or + a coroutine. + + The command prefix could also be a list or a tuple indicating that + multiple checks for the prefix should be used and the first one to + match will be the invocation prefix. You can get this prefix via + :attr:`Context.prefix`. + description : str + The content prefixed into the default help message. + self_bot : bool + If ``True``, the bot will only listen to commands invoked by itself rather + than ignoring itself. If ``False`` (the default) then the bot will ignore + itself. This cannot be changed once initialised. + formatter : :class:`HelpFormatter` + The formatter used to format the help message. By default, it uses a + the :class:`HelpFormatter`. Check it for more info on how to override it. + If you want to change the help command completely (add aliases, etc) then + a call to :meth:`remove_command` with 'help' as the argument would do the + trick. + pm_help : Optional[bool] + A tribool that indicates if the help command should PM the user instead of + sending it to the channel it received it from. If the boolean is set to + ``True``, then all help output is PM'd. If ``False``, none of the help + output is PM'd. If ``None``, then the bot will only PM when the help + message becomes too long (dictated by more than 1000 characters). + Defaults to ``False``. + help_attrs : dict + A dictionary of options to pass in for the construction of the help command. + This allows you to change the command behaviour without actually changing + the implementation of the command. The attributes will be the same as the + ones passed in the :class:`Command` constructor. Note that ``pass_context`` + will always be set to ``True`` regardless of what you pass in. + command_not_found : str + The format string used when the help command is invoked with a command that + is not found. Useful for i18n. Defaults to ``"No command called {} found."``. + The only format argument is the name of the command passed. + command_has_no_subcommands : str + The format string used when the help command is invoked with requests for a + subcommand but the command does not have any subcommands. Defaults to + ``"Command {0.name} has no subcommands."``. The first format argument is the + :class:`Command` attempted to get a subcommand and the second is the name. + """ + def __init__(self, command_prefix, formatter=None, description=None, pm_help=False, **options): + super().__init__(**options) + self.command_prefix = command_prefix + self.extra_events = {} + self.cogs = {} + self.extensions = {} + self._checks = [] + self.description = inspect.cleandoc(description) if description else '' + self.pm_help = pm_help + self.command_not_found = options.pop('command_not_found', 'No command called "{}" found.') + self.command_has_no_subcommands = options.pop('command_has_no_subcommands', 'Command {0.name} has no subcommands.') + + self._skip_check = discord.User.__ne__ if options.pop('self_bot', False) else discord.User.__eq__ + + self.help_attrs = options.pop('help_attrs', {}) + self.help_attrs['pass_context'] = True + + if 'name' not in self.help_attrs: + self.help_attrs['name'] = 'help' + + if formatter is not None: + if not isinstance(formatter, HelpFormatter): + raise discord.ClientException('Formatter must be a subclass of HelpFormatter') + self.formatter = formatter + else: + self.formatter = HelpFormatter() + + # pay no mind to this ugliness. + self.command(**self.help_attrs)(_default_help_command) + + # internal helpers + + @asyncio.coroutine + def _get_prefix(self, message): + prefix = self.command_prefix + if callable(prefix): + ret = prefix(self, message) + if asyncio.iscoroutine(ret): + ret = yield from ret + return ret + else: + return prefix + + @asyncio.coroutine + def _run_extra(self, coro, event_name, *args, **kwargs): + try: + yield from coro(*args, **kwargs) + except asyncio.CancelledError: + pass + except Exception: + try: + yield from self.on_error(event_name, *args, **kwargs) + except asyncio.CancelledError: + pass + + def dispatch(self, event_name, *args, **kwargs): + super().dispatch(event_name, *args, **kwargs) + ev = 'on_' + event_name + if ev in self.extra_events: + for event in self.extra_events[ev]: + coro = self._run_extra(event, event_name, *args, **kwargs) + discord.compat.create_task(coro, loop=self.loop) + + @asyncio.coroutine + def close(self): + for extension in tuple(self.extensions): + try: + self.unload_extension(extension) + except: + pass + + for cog in tuple(self.cogs): + try: + self.remove_cog(cog) + except: + pass + + yield from super().close() + + @asyncio.coroutine + def on_command_error(self, exception, context): + """|coro| + + The default command error handler provided by the bot. + + By default this prints to ``sys.stderr`` however it could be + overridden to have a different implementation. + + This only fires if you do not specify any listeners for command error. + """ + if self.extra_events.get('on_command_error', None): + return + + if hasattr(context.command, "on_error"): + return + + print('Ignoring exception in command {}'.format(context.command), file=sys.stderr) + traceback.print_exception(type(exception), exception, exception.__traceback__, file=sys.stderr) + + # utility "send_*" functions + + @asyncio.coroutine + def _augmented_msg(self, coro, **kwargs): + msg = yield from coro + delete_after = kwargs.get('delete_after') + if delete_after is not None: + @asyncio.coroutine + def delete(): + yield from asyncio.sleep(delete_after, loop=self.loop) + yield from self.delete_message(msg) + + discord.compat.create_task(delete(), loop=self.loop) + + return msg + + def say(self, *args, **kwargs): + """|coro| + + A helper function that is equivalent to doing + + .. code-block:: python + + self.send_message(message.channel, *args, **kwargs) + + The following keyword arguments are "extensions" that augment the + behaviour of the standard wrapped call. + + Parameters + ------------ + delete_after: float + Number of seconds to wait before automatically deleting the + message. + + See Also + --------- + :meth:`Client.send_message` + """ + destination = _get_variable('_internal_channel') + + extensions = ('delete_after',) + params = { + k: kwargs.pop(k, None) for k in extensions + } + + coro = self.send_message(destination, *args, **kwargs) + return self._augmented_msg(coro, **params) + + def whisper(self, *args, **kwargs): + """|coro| + + A helper function that is equivalent to doing + + .. code-block:: python + + self.send_message(message.author, *args, **kwargs) + + The following keyword arguments are "extensions" that augment the + behaviour of the standard wrapped call. + + Parameters + ------------ + delete_after: float + Number of seconds to wait before automatically deleting the + message. + + See Also + --------- + :meth:`Client.send_message` + """ + destination = _get_variable('_internal_author') + + extensions = ('delete_after',) + params = { + k: kwargs.pop(k, None) for k in extensions + } + + coro = self.send_message(destination, *args, **kwargs) + return self._augmented_msg(coro, **params) + + def reply(self, content, *args, **kwargs): + """|coro| + + A helper function that is equivalent to doing + + .. code-block:: python + + msg = '{0.mention}, {1}'.format(message.author, content) + self.send_message(message.channel, msg, *args, **kwargs) + + The following keyword arguments are "extensions" that augment the + behaviour of the standard wrapped call. + + Parameters + ------------ + delete_after: float + Number of seconds to wait before automatically deleting the + message. + + See Also + --------- + :meth:`Client.send_message` + """ + author = _get_variable('_internal_author') + destination = _get_variable('_internal_channel') + fmt = '{0.mention}, {1}'.format(author, str(content)) + + extensions = ('delete_after',) + params = { + k: kwargs.pop(k, None) for k in extensions + } + + coro = self.send_message(destination, fmt, *args, **kwargs) + return self._augmented_msg(coro, **params) + + def upload(self, *args, **kwargs): + """|coro| + + A helper function that is equivalent to doing + + .. code-block:: python + + self.send_file(message.channel, *args, **kwargs) + + The following keyword arguments are "extensions" that augment the + behaviour of the standard wrapped call. + + Parameters + ------------ + delete_after: float + Number of seconds to wait before automatically deleting the + message. + + See Also + --------- + :meth:`Client.send_file` + """ + destination = _get_variable('_internal_channel') + + extensions = ('delete_after',) + params = { + k: kwargs.pop(k, None) for k in extensions + } + + coro = self.send_file(destination, *args, **kwargs) + return self._augmented_msg(coro, **params) + + def type(self): + """|coro| + + A helper function that is equivalent to doing + + .. code-block:: python + + self.send_typing(message.channel) + + See Also + --------- + The :meth:`Client.send_typing` function. + """ + destination = _get_variable('_internal_channel') + return self.send_typing(destination) + + # global check registration + + def check(self, func): + """A decorator that adds a global check to the bot. + + A global check is similar to a :func:`check` that is applied + on a per command basis except it is run before any command checks + have been verified and applies to every command the bot has. + + .. warning:: + + This function must be a *regular* function and not a coroutine. + + Similar to a command :func:`check`\, this takes a single parameter + of type :class:`Context` and can only raise exceptions derived from + :exc:`CommandError`. + + Example + --------- + + .. code-block:: python + + @bot.check + def whitelist(ctx): + return ctx.message.author.id in my_whitelist + + """ + self.add_check(func) + return func + + def add_check(self, func): + """Adds a global check to the bot. + + This is the non-decorator interface to :meth:`check`. + + Parameters + ----------- + func + The function that was used as a global check. + """ + self._checks.append(func) + + def remove_check(self, func): + """Removes a global check from the bot. + + This function is idempotent and will not raise an exception + if the function is not in the global checks. + + Parameters + ----------- + func + The function to remove from the global checks. + """ + + try: + self._checks.remove(func) + except ValueError: + pass + + def can_run(self, ctx): + return all(f(ctx) for f in self._checks) + + # listener registration + + def add_listener(self, func, name=None): + """The non decorator alternative to :meth:`listen`. + + Parameters + ----------- + func : coroutine + The extra event to listen to. + name : Optional[str] + The name of the command to use. Defaults to ``func.__name__``. + + Example + -------- + + .. code-block:: python + + async def on_ready(): pass + async def my_message(message): pass + + bot.add_listener(on_ready) + bot.add_listener(my_message, 'on_message') + + """ + name = func.__name__ if name is None else name + + if not asyncio.iscoroutinefunction(func): + raise discord.ClientException('Listeners must be coroutines') + + if name in self.extra_events: + self.extra_events[name].append(func) + else: + self.extra_events[name] = [func] + + def remove_listener(self, func, name=None): + """Removes a listener from the pool of listeners. + + Parameters + ----------- + func + The function that was used as a listener to remove. + name + The name of the event we want to remove. Defaults to + ``func.__name__``. + """ + + name = func.__name__ if name is None else name + + if name in self.extra_events: + try: + self.extra_events[name].remove(func) + except ValueError: + pass + + def listen(self, name=None): + """A decorator that registers another function as an external + event listener. Basically this allows you to listen to multiple + events from different places e.g. such as :func:`discord.on_ready` + + The functions being listened to must be a coroutine. + + Example + -------- + + .. code-block:: python + + @bot.listen() + async def on_message(message): + print('one') + + # in some other file... + + @bot.listen('on_message') + async def my_message(message): + print('two') + + Would print one and two in an unspecified order. + + Raises + ------- + discord.ClientException + The function being listened to is not a coroutine. + """ + + def decorator(func): + self.add_listener(func, name) + return func + + return decorator + + # cogs + + def add_cog(self, cog): + """Adds a "cog" to the bot. + + A cog is a class that has its own event listeners and commands. + + They are meant as a way to organize multiple relevant commands + into a singular class that shares some state or no state at all. + + The cog can also have a ``__check`` member function that allows + you to define a global check. See :meth:`check` for more info. + + More information will be documented soon. + + Parameters + ----------- + cog + The cog to register to the bot. + """ + + self.cogs[type(cog).__name__] = cog + + try: + check = getattr(cog, '_{.__class__.__name__}__check'.format(cog)) + except AttributeError: + pass + else: + self.add_check(check) + + members = inspect.getmembers(cog) + for name, member in members: + # register commands the cog has + if isinstance(member, Command): + if member.parent is None: + self.add_command(member) + continue + + # register event listeners the cog has + if name.startswith('on_'): + self.add_listener(member) + + def get_cog(self, name): + """Gets the cog instance requested. + + If the cog is not found, ``None`` is returned instead. + + Parameters + ----------- + name : str + The name of the cog you are requesting. + """ + return self.cogs.get(name) + + def remove_cog(self, name): + """Removes a cog from the bot. + + All registered commands and event listeners that the + cog has registered will be removed as well. + + If no cog is found then ``None`` is returned, otherwise + the cog instance that is being removed is returned. + + If the cog defines a special member function named ``__unload`` + then it is called when removal has completed. This function + **cannot** be a coroutine. It must be a regular function. + + Parameters + ----------- + name : str + The name of the cog to remove. + """ + + cog = self.cogs.pop(name, None) + if cog is None: + return cog + + members = inspect.getmembers(cog) + for name, member in members: + # remove commands the cog has + if isinstance(member, Command): + if member.parent is None: + self.remove_command(member.name) + continue + + # remove event listeners the cog has + if name.startswith('on_'): + self.remove_listener(member) + + try: + check = getattr(cog, '_{0.__class__.__name__}__check'.format(cog)) + except AttributeError: + pass + else: + self.remove_check(check) + + unloader_name = '_{0.__class__.__name__}__unload'.format(cog) + try: + unloader = getattr(cog, unloader_name) + except AttributeError: + pass + else: + unloader() + + del cog + + # extensions + + def load_extension(self, name): + if name in self.extensions: + return + + lib = importlib.import_module(name) + if not hasattr(lib, 'setup'): + del lib + del sys.modules[name] + raise discord.ClientException('extension does not have a setup function') + + lib.setup(self) + self.extensions[name] = lib + + def unload_extension(self, name): + lib = self.extensions.get(name) + if lib is None: + return + + # find all references to the module + + # remove the cogs registered from the module + for cogname, cog in self.cogs.copy().items(): + if inspect.getmodule(cog) is lib: + self.remove_cog(cogname) + + # first remove all the commands from the module + for command in self.commands.copy().values(): + if command.module is lib: + command.module = None + if isinstance(command, GroupMixin): + command.recursively_remove_all_commands() + self.remove_command(command.name) + + # then remove all the listeners from the module + for event_list in self.extra_events.copy().values(): + remove = [] + for index, event in enumerate(event_list): + if inspect.getmodule(event) is lib: + remove.append(index) + + for index in reversed(remove): + del event_list[index] + + try: + func = getattr(lib, 'teardown') + except AttributeError: + pass + else: + try: + func(self) + except: + pass + finally: + # finally remove the import.. + del lib + del self.extensions[name] + del sys.modules[name] + + # command processing + + @asyncio.coroutine + def process_commands(self, message): + """|coro| + + This function processes the commands that have been registered + to the bot and other groups. Without this coroutine, none of the + commands will be triggered. + + By default, this coroutine is called inside the :func:`on_message` + event. If you choose to override the :func:`on_message` event, then + you should invoke this coroutine as well. + + Warning + -------- + This function is necessary for :meth:`say`, :meth:`whisper`, + :meth:`type`, :meth:`reply`, and :meth:`upload` to work due to the + way they are written. It is also required for the :func:`on_command` + and :func:`on_command_completion` events. + + Parameters + ----------- + message : discord.Message + The message to process commands for. + """ + _internal_channel = message.channel + _internal_author = message.author + + view = StringView(message.content) + if self._skip_check(message.author, self.user): + return + + prefix = yield from self._get_prefix(message) + invoked_prefix = prefix + + if not isinstance(prefix, (tuple, list)): + if not view.skip_string(prefix): + return + else: + invoked_prefix = discord.utils.find(view.skip_string, prefix) + if invoked_prefix is None: + return + + + invoker = view.get_word() + tmp = { + 'bot': self, + 'invoked_with': invoker, + 'message': message, + 'view': view, + 'prefix': invoked_prefix + } + ctx = Context(**tmp) + del tmp + + if invoker in self.commands: + command = self.commands[invoker] + self.dispatch('command', command, ctx) + try: + yield from command.invoke(ctx) + except CommandError as e: + ctx.command.dispatch_error(e, ctx) + else: + self.dispatch('command_completion', command, ctx) + elif invoker: + exc = CommandNotFound('Command "{}" is not found'.format(invoker)) + self.dispatch('command_error', exc, ctx) + + @asyncio.coroutine + def on_message(self, message): + yield from self.process_commands(message) diff --git a/RBXLegacyDiscordBot/lib/discord/ext/commands/context.py b/RBXLegacyDiscordBot/lib/discord/ext/commands/context.py new file mode 100644 index 0000000..95996a6 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/ext/commands/context.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio + +class Context: + """Represents the context in which a command is being invoked under. + + This class contains a lot of meta data to help you understand more about + the invocation context. This class is not created manually and is instead + passed around to commands by passing in :attr:`Command.pass_context`. + + Attributes + ----------- + message : :class:`discord.Message` + The message that triggered the command being executed. + bot : :class:`Bot` + The bot that contains the command being executed. + args : list + The list of transformed arguments that were passed into the command. + If this is accessed during the :func:`on_command_error` event + then this list could be incomplete. + kwargs : dict + A dictionary of transformed arguments that were passed into the command. + Similar to :attr:`args`\, if this is accessed in the + :func:`on_command_error` event then this dict could be incomplete. + prefix : str + The prefix that was used to invoke the command. + command + The command (i.e. :class:`Command` or its superclasses) that is being + invoked currently. + invoked_with : str + The command name that triggered this invocation. Useful for finding out + which alias called the command. + invoked_subcommand + The subcommand (i.e. :class:`Command` or its superclasses) that was + invoked. If no valid subcommand was invoked then this is equal to + `None`. + subcommand_passed : Optional[str] + The string that was attempted to call a subcommand. This does not have + to point to a valid registered subcommand and could just point to a + nonsense string. If nothing was passed to attempt a call to a + subcommand then this is set to `None`. + """ + + def __init__(self, **attrs): + self.message = attrs.pop('message', None) + self.bot = attrs.pop('bot', None) + self.args = attrs.pop('args', []) + self.kwargs = attrs.pop('kwargs', {}) + self.prefix = attrs.pop('prefix') + self.command = attrs.pop('command', None) + self.view = attrs.pop('view', None) + self.invoked_with = attrs.pop('invoked_with', None) + self.invoked_subcommand = attrs.pop('invoked_subcommand', None) + self.subcommand_passed = attrs.pop('subcommand_passed', None) + + @asyncio.coroutine + def invoke(self, command, *args, **kwargs): + """|coro| + + Calls a command with the arguments given. + + This is useful if you want to just call the callback that a + :class:`Command` holds internally. + + Note + ------ + You do not pass in the context as it is done for you. + + Parameters + ----------- + command : :class:`Command` + A command or superclass of a command that is going to be called. + \*args + The arguments to to use. + \*\*kwargs + The keyword arguments to use. + """ + + arguments = [] + if command.instance is not None: + arguments.append(command.instance) + + if command.pass_context: + arguments.append(self) + + arguments.extend(args) + + ret = yield from command.callback(*arguments, **kwargs) + return ret + + @property + def cog(self): + """Returns the cog associated with this context's command. None if it does not exist.""" + + if self.command is None: + return None + return self.command.instance diff --git a/RBXLegacyDiscordBot/lib/discord/ext/commands/converter.py b/RBXLegacyDiscordBot/lib/discord/ext/commands/converter.py new file mode 100644 index 0000000..04dae3e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/ext/commands/converter.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import discord +import asyncio +import re +import inspect + +from .errors import BadArgument, NoPrivateMessage + +__all__ = [ 'Converter', 'MemberConverter', 'UserConverter', + 'ChannelConverter', 'InviteConverter', 'RoleConverter', + 'GameConverter', 'ColourConverter' ] + +def _get_from_servers(bot, getter, argument): + result = None + for server in bot.servers: + result = getattr(server, getter)(argument) + if result: + return result + return result + +class Converter: + """The base class of custom converters that require the :class:`Context` + to be passed to be useful. + + This allows you to implement converters that function similar to the + special cased ``discord`` classes. + + Classes that derive from this should override the :meth:`convert` method + to do its conversion logic. This method could be a coroutine or a regular + function. + + Attributes + ----------- + ctx: :class:`Context` + The invocation context that the argument is being used in. + argument: str + The argument that is being converted. + """ + def __init__(self, ctx, argument): + self.ctx = ctx + self.argument = argument + + def convert(self): + raise NotImplementedError('Derived classes need to implement this.') + +class IDConverter(Converter): + def __init__(self, ctx, argument): + super().__init__(ctx, argument) + self._id_regex = re.compile(r'([0-9]{15,21})$') + + def _get_id_match(self): + return self._id_regex.match(self.argument) + +class MemberConverter(IDConverter): + def convert(self): + message = self.ctx.message + bot = self.ctx.bot + match = self._get_id_match() or re.match(r'<@!?([0-9]+)>$', self.argument) + server = message.server + result = None + if match is None: + # not a mention... + if server: + result = server.get_member_named(self.argument) + else: + result = _get_from_servers(bot, 'get_member_named', self.argument) + else: + user_id = match.group(1) + if server: + result = server.get_member(user_id) + else: + result = _get_from_servers(bot, 'get_member', user_id) + + if result is None: + raise BadArgument('Member "{}" not found'.format(self.argument)) + + return result + +UserConverter = MemberConverter + +class ChannelConverter(IDConverter): + def convert(self): + message = self.ctx.message + bot = self.ctx.bot + + match = self._get_id_match() or re.match(r'<#([0-9]+)>$', self.argument) + result = None + server = message.server + if match is None: + # not a mention + if server: + result = discord.utils.get(server.channels, name=self.argument) + else: + result = discord.utils.get(bot.get_all_channels(), name=self.argument) + else: + channel_id = match.group(1) + if server: + result = server.get_channel(channel_id) + else: + result = _get_from_servers(bot, 'get_channel', channel_id) + + if result is None: + raise BadArgument('Channel "{}" not found.'.format(self.argument)) + + return result + +class ColourConverter(Converter): + def convert(self): + arg = self.argument.replace('0x', '').lower() + + if arg[0] == '#': + arg = arg[1:] + try: + value = int(arg, base=16) + return discord.Colour(value=value) + except ValueError: + method = getattr(discord.Colour, arg, None) + if method is None or not inspect.ismethod(method): + raise BadArgument('Colour "{}" is invalid.'.format(arg)) + return method() + +class RoleConverter(IDConverter): + def convert(self): + server = self.ctx.message.server + if not server: + raise NoPrivateMessage() + + match = self._get_id_match() or re.match(r'<@&([0-9]+)>$', self.argument) + params = dict(id=match.group(1)) if match else dict(name=self.argument) + result = discord.utils.get(server.roles, **params) + if result is None: + raise BadArgument('Role "{}" not found.'.format(self.argument)) + return result + +class GameConverter(Converter): + def convert(self): + return discord.Game(name=self.argument) + +class InviteConverter(Converter): + @asyncio.coroutine + def convert(self): + try: + invite = yield from self.ctx.bot.get_invite(self.argument) + return invite + except Exception as e: + raise BadArgument('Invite is invalid or expired') from e + +class EmojiConverter(IDConverter): + @asyncio.coroutine + def convert(self): + message = self.ctx.message + bot = self.ctx.bot + + match = self._get_id_match() or re.match(r'<:[a-zA-Z0-9]+:([0-9]+)>$', self.argument) + result = None + server = message.server + if match is None: + # Try to get the emoji by name. Try local server first. + if server: + result = discord.utils.get(server.emojis, name=self.argument) + + if result is None: + result = discord.utils.get(bot.get_all_emojis(), name=self.argument) + else: + emoji_id = match.group(1) + + # Try to look up emoji by id. + if server: + result = discord.utils.get(server.emojis, id=emoji_id) + + if result is None: + result = discord.utils.get(bot.get_all_emojis(), id=emoji_id) + + if result is None: + raise BadArgument('Emoji "{}" not found.'.format(self.argument)) + + return result diff --git a/RBXLegacyDiscordBot/lib/discord/ext/commands/cooldowns.py b/RBXLegacyDiscordBot/lib/discord/ext/commands/cooldowns.py new file mode 100644 index 0000000..c1aa10a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/ext/commands/cooldowns.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import enum +import time + +__all__ = ['BucketType', 'Cooldown', 'CooldownMapping'] + +class BucketType(enum.Enum): + default = 0 + user = 1 + server = 2 + channel = 3 + +class Cooldown: + __slots__ = ['rate', 'per', 'type', '_window', '_tokens', '_last'] + + def __init__(self, rate, per, type): + self.rate = int(rate) + self.per = float(per) + self.type = type + self._window = 0.0 + self._tokens = self.rate + self._last = 0.0 + + if not isinstance(self.type, BucketType): + raise TypeError('Cooldown type must be a BucketType') + + def is_rate_limited(self): + current = time.time() + self._last = current + + # first token used means that we start a new rate limit window + if self._tokens == self.rate: + self._window = current + + # check if our window has passed and we can refresh our tokens + if current > self._window + self.per: + self._tokens = self.rate + self._window = current + + # check if we're rate limited + if self._tokens == 0: + return self.per - (current - self._window) + + # we're not so decrement our tokens + self._tokens -= 1 + + # see if we got rate limited due to this token change, and if + # so update the window to point to our current time frame + if self._tokens == 0: + self._window = current + + def reset(self): + self._tokens = self.rate + self._last = 0.0 + + def copy(self): + return Cooldown(self.rate, self.per, self.type) + + def __repr__(self): + return ''.format(self) + +class CooldownMapping: + def __init__(self, original): + self._cache = {} + self._cooldown = original + + @property + def valid(self): + return self._cooldown is not None + + def _bucket_key(self, ctx): + msg = ctx.message + bucket_type = self._cooldown.type + if bucket_type is BucketType.user: + return msg.author.id + elif bucket_type is BucketType.server: + return getattr(msg.server, 'id', msg.author.id) + elif bucket_type is BucketType.channel: + return msg.channel.id + + def _verify_cache_integrity(self): + # we want to delete all cache objects that haven't been used + # in a cooldown window. e.g. if we have a command that has a + # cooldown of 60s and it has not been used in 60s then that key should be deleted + current = time.time() + dead_keys = [k for k, v in self._cache.items() if current > v._last + v.per] + for k in dead_keys: + del self._cache[k] + + def get_bucket(self, ctx): + if self._cooldown.type is BucketType.default: + return self._cooldown + + self._verify_cache_integrity() + key = self._bucket_key(ctx) + if key not in self._cache: + bucket = self._cooldown.copy() + self._cache[key] = bucket + else: + bucket = self._cache[key] + + return bucket diff --git a/RBXLegacyDiscordBot/lib/discord/ext/commands/core.py b/RBXLegacyDiscordBot/lib/discord/ext/commands/core.py new file mode 100644 index 0000000..b04cdab --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/ext/commands/core.py @@ -0,0 +1,943 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import asyncio +import inspect +import discord +import functools + +from .errors import * +from .cooldowns import Cooldown, BucketType, CooldownMapping +from .view import quoted_word +from . import converter as converters + +__all__ = [ 'Command', 'Group', 'GroupMixin', 'command', 'group', + 'has_role', 'has_permissions', 'has_any_role', 'check', + 'bot_has_role', 'bot_has_permissions', 'bot_has_any_role', + 'cooldown' ] + +def inject_context(ctx, coro): + @functools.wraps(coro) + @asyncio.coroutine + def wrapped(*args, **kwargs): + _internal_channel = ctx.message.channel + _internal_author = ctx.message.author + + try: + ret = yield from coro(*args, **kwargs) + except CommandError: + raise + except Exception as e: + raise CommandInvokeError(e) from e + return ret + return wrapped + +def _convert_to_bool(argument): + lowered = argument.lower() + if lowered in ('yes', 'y', 'true', 't', '1', 'enable', 'on'): + return True + elif lowered in ('no', 'n', 'false', 'f', '0', 'disable', 'off'): + return False + else: + raise BadArgument(lowered + ' is not a recognised boolean option') + +class Command: + """A class that implements the protocol for a bot text command. + + These are not created manually, instead they are created via the + decorator or functional interface. + + Attributes + ----------- + name : str + The name of the command. + callback : coroutine + The coroutine that is executed when the command is called. + help : str + The long help text for the command. + brief : str + The short help text for the command. If this is not specified + then the first line of the long help text is used instead. + aliases : list + The list of aliases the command can be invoked under. + pass_context : bool + A boolean that indicates that the current :class:`Context` should + be passed as the **first parameter**. Defaults to `False`. + enabled : bool + A boolean that indicates if the command is currently enabled. + If the command is invoked while it is disabled, then + :exc:`DisabledCommand` is raised to the :func:`on_command_error` + event. Defaults to ``True``. + parent : Optional[command] + The parent command that this command belongs to. ``None`` is there + isn't one. + checks + A list of predicates that verifies if the command could be executed + with the given :class:`Context` as the sole parameter. If an exception + is necessary to be thrown to signal failure, then one derived from + :exc:`CommandError` should be used. Note that if the checks fail then + :exc:`CheckFailure` exception is raised to the :func:`on_command_error` + event. + description : str + The message prefixed into the default help command. + hidden : bool + If ``True``\, the default help command does not show this in the + help output. + no_pm : bool + If ``True``\, then the command is not allowed to be executed in + private messages. Defaults to ``False``. Note that if it is executed + in private messages, then :func:`on_command_error` and local error handlers + are called with the :exc:`NoPrivateMessage` error. + rest_is_raw : bool + If ``False`` and a keyword-only argument is provided then the keyword + only argument is stripped and handled as if it was a regular argument + that handles :exc:`MissingRequiredArgument` and default values in a + regular matter rather than passing the rest completely raw. If ``True`` + then the keyword-only argument will pass in the rest of the arguments + in a completely raw matter. Defaults to ``False``. + ignore_extra : bool + If ``True``\, ignores extraneous strings passed to a command if all its + requirements are met (e.g. ``?foo a b c`` when only expecting ``a`` + and ``b``). Otherwise :func:`on_command_error` and local error handlers + are called with :exc:`TooManyArguments`. Defaults to ``True``. + """ + def __init__(self, name, callback, **kwargs): + self.name = name + if not isinstance(name, str): + raise TypeError('Name of a command must be a string.') + + self.callback = callback + self.enabled = kwargs.get('enabled', True) + self.help = kwargs.get('help') + self.brief = kwargs.get('brief') + self.rest_is_raw = kwargs.get('rest_is_raw', False) + self.aliases = kwargs.get('aliases', []) + self.pass_context = kwargs.get('pass_context', False) + self.description = inspect.cleandoc(kwargs.get('description', '')) + self.hidden = kwargs.get('hidden', False) + signature = inspect.signature(callback) + self.params = signature.parameters.copy() + self.checks = kwargs.get('checks', []) + self.module = inspect.getmodule(callback) + self.no_pm = kwargs.get('no_pm', False) + self.ignore_extra = kwargs.get('ignore_extra', True) + self.instance = None + self.parent = None + self._buckets = CooldownMapping(kwargs.get('cooldown')) + + def dispatch_error(self, error, ctx): + try: + coro = self.on_error + except AttributeError: + pass + else: + loop = ctx.bot.loop + injected = inject_context(ctx, coro) + if self.instance is not None: + discord.compat.create_task(injected(self.instance, error, ctx), loop=loop) + else: + discord.compat.create_task(injected(error, ctx), loop=loop) + finally: + ctx.bot.dispatch('command_error', error, ctx) + + def __get__(self, instance, owner): + if instance is not None: + self.instance = instance + return self + + @asyncio.coroutine + def do_conversion(self, ctx, converter, argument): + if converter is bool: + return _convert_to_bool(argument) + + if converter.__module__.startswith('discord.'): + converter = getattr(converters, converter.__name__ + 'Converter') + + if inspect.isclass(converter) and issubclass(converter, converters.Converter): + instance = converter(ctx, argument) + if asyncio.iscoroutinefunction(instance.convert): + return (yield from instance.convert()) + else: + return instance.convert() + + return converter(argument) + + def _get_converter(self, param): + converter = param.annotation + if converter is param.empty: + if param.default is not param.empty: + converter = str if param.default is None else type(param.default) + else: + converter = str + elif not inspect.isclass(type(converter)): + raise discord.ClientException('Function annotation must be a type') + + return converter + + @asyncio.coroutine + def transform(self, ctx, param): + required = param.default is param.empty + converter = self._get_converter(param) + consume_rest_is_special = param.kind == param.KEYWORD_ONLY and not self.rest_is_raw + view = ctx.view + view.skip_ws() + + if view.eof: + if param.kind == param.VAR_POSITIONAL: + raise RuntimeError() # break the loop + if required: + raise MissingRequiredArgument('{0.name} is a required argument that is missing.'.format(param)) + return param.default + + if consume_rest_is_special: + argument = view.read_rest().strip() + else: + argument = quoted_word(view) + + try: + return (yield from self.do_conversion(ctx, converter, argument)) + except CommandError as e: + raise e + except Exception as e: + raise BadArgument('Converting to "{0.__name__}" failed.'.format(converter)) from e + + @property + def clean_params(self): + """Retrieves the parameter OrderedDict without the context or self parameters. + + Useful for inspecting signature. + """ + result = self.params.copy() + if self.instance is not None: + # first parameter is self + result.popitem(last=False) + + if self.pass_context: + # first/second parameter is context + result.popitem(last=False) + + return result + + @property + def full_parent_name(self): + """Retrieves the fully qualified parent command name. + + This the base command name required to execute it. For example, + in ``?one two three`` the parent name would be ``one two``. + """ + entries = [] + command = self + while command.parent is not None: + command = command.parent + entries.append(command.name) + + return ' '.join(reversed(entries)) + + @property + def qualified_name(self): + """Retrieves the fully qualified command name. + + This is the full parent name with the command name as well. + For example, in ``?one two three`` the qualified name would be + ``one two three``. + """ + + parent = self.full_parent_name + if parent: + return parent + ' ' + self.name + else: + return self.name + + def __str__(self): + return self.qualified_name + + @asyncio.coroutine + def _parse_arguments(self, ctx): + ctx.args = [] if self.instance is None else [self.instance] + ctx.kwargs = {} + args = ctx.args + kwargs = ctx.kwargs + + first = True + view = ctx.view + iterator = iter(self.params.items()) + + if self.instance is not None: + # we have 'self' as the first parameter so just advance + # the iterator and resume parsing + try: + next(iterator) + except StopIteration: + fmt = 'Callback for {0.name} command is missing "self" parameter.' + raise discord.ClientException(fmt.format(self)) + + for name, param in iterator: + if first and self.pass_context: + args.append(ctx) + first = False + continue + + if param.kind == param.POSITIONAL_OR_KEYWORD: + transformed = yield from self.transform(ctx, param) + args.append(transformed) + elif param.kind == param.KEYWORD_ONLY: + # kwarg only param denotes "consume rest" semantics + if self.rest_is_raw: + converter = self._get_converter(param) + argument = view.read_rest() + kwargs[name] = yield from self.do_conversion(ctx, converter, argument) + else: + kwargs[name] = yield from self.transform(ctx, param) + break + elif param.kind == param.VAR_POSITIONAL: + while not view.eof: + try: + transformed = yield from self.transform(ctx, param) + args.append(transformed) + except RuntimeError: + break + + if not self.ignore_extra: + if not view.eof: + raise TooManyArguments('Too many arguments passed to ' + self.qualified_name) + + + def _verify_checks(self, ctx): + if not self.enabled: + raise DisabledCommand('{0.name} command is disabled'.format(self)) + + if self.no_pm and ctx.message.channel.is_private: + raise NoPrivateMessage('This command cannot be used in private messages.') + + if not ctx.bot.can_run(ctx): + raise CheckFailure('The global check functions for command {0.qualified_name} failed.'.format(self)) + + if not self.can_run(ctx): + raise CheckFailure('The check functions for command {0.qualified_name} failed.'.format(self)) + + @asyncio.coroutine + def prepare(self, ctx): + ctx.command = self + self._verify_checks(ctx) + yield from self._parse_arguments(ctx) + + if self._buckets.valid: + bucket = self._buckets.get_bucket(ctx) + retry_after = bucket.is_rate_limited() + if retry_after: + raise CommandOnCooldown(bucket, retry_after) + + def reset_cooldown(self, ctx): + """Resets the cooldown on this command. + + Parameters + ----------- + ctx: :class:`Context` + The invocation context to reset the cooldown under. + """ + if self._buckets.valid: + bucket = self._buckets.get_bucket(ctx) + bucket.reset() + + @asyncio.coroutine + def invoke(self, ctx): + yield from self.prepare(ctx) + + # terminate the invoked_subcommand chain. + # since we're in a regular command (and not a group) then + # the invoked subcommand is None. + ctx.invoked_subcommand = None + injected = inject_context(ctx, self.callback) + yield from injected(*ctx.args, **ctx.kwargs) + + def error(self, coro): + """A decorator that registers a coroutine as a local error handler. + + A local error handler is an :func:`on_command_error` event limited to + a single command. However, the :func:`on_command_error` is still + invoked afterwards as the catch-all. + + Parameters + ----------- + coro + The coroutine to register as the local error handler. + + Raises + ------- + discord.ClientException + The coroutine is not actually a coroutine. + """ + + if not asyncio.iscoroutinefunction(coro): + raise discord.ClientException('The error handler must be a coroutine.') + + self.on_error = coro + return coro + + @property + def cog_name(self): + """The name of the cog this command belongs to. None otherwise.""" + return type(self.instance).__name__ if self.instance is not None else None + + @property + def short_doc(self): + """Gets the "short" documentation of a command. + + By default, this is the :attr:`brief` attribute. + If that lookup leads to an empty string then the first line of the + :attr:`help` attribute is used instead. + """ + if self.brief: + return self.brief + if self.help: + return self.help.split('\n', 1)[0] + return '' + + def can_run(self, context): + """Checks if the command can be executed by checking all the predicates + inside the :attr:`checks` attribute. + + Parameters + ----------- + context : :class:`Context` + The context of the command currently being invoked. + + Returns + -------- + bool + A boolean indicating if the command can be invoked. + """ + + predicates = self.checks + if not predicates: + # since we have no checks, then we just return True. + return True + return all(predicate(context) for predicate in predicates) + +class GroupMixin: + """A mixin that implements common functionality for classes that behave + similar to :class:`Group` and are allowed to register commands. + + Attributes + ----------- + commands : dict + A mapping of command name to :class:`Command` or superclass + objects. + """ + def __init__(self, **kwargs): + self.commands = {} + super().__init__(**kwargs) + + def recursively_remove_all_commands(self): + for command in self.commands.copy().values(): + if isinstance(command, GroupMixin): + command.recursively_remove_all_commands() + self.remove_command(command.name) + + def add_command(self, command): + """Adds a :class:`Command` or its superclasses into the internal list + of commands. + + This is usually not called, instead the :meth:`command` or + :meth:`group` shortcut decorators are used instead. + + Parameters + ----------- + command + The command to add. + + Raises + ------- + discord.ClientException + If the command is already registered. + TypeError + If the command passed is not a subclass of :class:`Command`. + """ + + if not isinstance(command, Command): + raise TypeError('The command passed must be a subclass of Command') + + if isinstance(self, Command): + command.parent = self + + if command.name in self.commands: + raise discord.ClientException('Command {0.name} is already registered.'.format(command)) + + self.commands[command.name] = command + for alias in command.aliases: + if alias in self.commands: + raise discord.ClientException('The alias {} is already an existing command or alias.'.format(alias)) + self.commands[alias] = command + + def remove_command(self, name): + """Remove a :class:`Command` or subclasses from the internal list + of commands. + + This could also be used as a way to remove aliases. + + Parameters + ----------- + name : str + The name of the command to remove. + + Returns + -------- + Command or subclass + The command that was removed. If the name is not valid then + `None` is returned instead. + """ + command = self.commands.pop(name, None) + + # does not exist + if command is None: + return None + + if name in command.aliases: + # we're removing an alias so we don't want to remove the rest + return command + + # we're not removing the alias so let's delete the rest of them. + for alias in command.aliases: + self.commands.pop(alias, None) + return command + + def walk_commands(self): + """An iterator that recursively walks through all commands and subcommands.""" + for command in tuple(self.commands.values()): + yield command + if isinstance(command, GroupMixin): + yield from command.walk_commands() + + def get_command(self, name): + """Get a :class:`Command` or subclasses from the internal list + of commands. + + This could also be used as a way to get aliases. + + The name could be fully qualified (e.g. ``'foo bar'``) will get + the subcommand ``bar`` of the group command ``foo``. If a + subcommand is not found then ``None`` is returned just as usual. + + Parameters + ----------- + name: str + The name of the command to get. + + Returns + -------- + Command or subclass + The command that was requested. If not found, returns ``None``. + """ + + names = name.split() + obj = self.commands.get(names[0]) + if not isinstance(obj, GroupMixin): + return obj + + for name in names[1:]: + try: + obj = obj.commands[name] + except (AttributeError, KeyError): + return None + + return obj + + def command(self, *args, **kwargs): + """A shortcut decorator that invokes :func:`command` and adds it to + the internal command list via :meth:`add_command`. + """ + def decorator(func): + result = command(*args, **kwargs)(func) + self.add_command(result) + return result + + return decorator + + def group(self, *args, **kwargs): + """A shortcut decorator that invokes :func:`group` and adds it to + the internal command list via :meth:`add_command`. + """ + def decorator(func): + result = group(*args, **kwargs)(func) + self.add_command(result) + return result + + return decorator + +class Group(GroupMixin, Command): + """A class that implements a grouping protocol for commands to be + executed as subcommands. + + This class is a subclass of :class:`Command` and thus all options + valid in :class:`Command` are valid in here as well. + + Attributes + ----------- + invoke_without_command : bool + Indicates if the group callback should begin parsing and + invocation only if no subcommand was found. Useful for + making it an error handling function to tell the user that + no subcommand was found or to have different functionality + in case no subcommand was found. If this is ``False``, then + the group callback will always be invoked first. This means + that the checks and the parsing dictated by its parameters + will be executed. Defaults to ``False``. + """ + def __init__(self, **attrs): + self.invoke_without_command = attrs.pop('invoke_without_command', False) + super().__init__(**attrs) + + @asyncio.coroutine + def invoke(self, ctx): + early_invoke = not self.invoke_without_command + if early_invoke: + yield from self.prepare(ctx) + + view = ctx.view + previous = view.index + view.skip_ws() + trigger = view.get_word() + + if trigger: + ctx.subcommand_passed = trigger + ctx.invoked_subcommand = self.commands.get(trigger, None) + + if early_invoke: + injected = inject_context(ctx, self.callback) + yield from injected(*ctx.args, **ctx.kwargs) + + if trigger and ctx.invoked_subcommand: + ctx.invoked_with = trigger + yield from ctx.invoked_subcommand.invoke(ctx) + elif not early_invoke: + # undo the trigger parsing + view.index = previous + view.previous = previous + yield from super().invoke(ctx) + +# Decorators + +def command(name=None, cls=None, **attrs): + """A decorator that transforms a function into a :class:`Command` + or if called with :func:`group`, :class:`Group`. + + By default the ``help`` attribute is received automatically from the + docstring of the function and is cleaned up with the use of + ``inspect.cleandoc``. If the docstring is ``bytes``, then it is decoded + into ``str`` using utf-8 encoding. + + All checks added using the :func:`check` & co. decorators are added into + the function. There is no way to supply your own checks through this + decorator. + + Parameters + ----------- + name : str + The name to create the command with. By default this uses the + function name unchanged. + cls + The class to construct with. By default this is :class:`Command`. + You usually do not change this. + attrs + Keyword arguments to pass into the construction of the class denoted + by ``cls``. + + Raises + ------- + TypeError + If the function is not a coroutine or is already a command. + """ + if cls is None: + cls = Command + + def decorator(func): + if isinstance(func, Command): + raise TypeError('Callback is already a command.') + if not asyncio.iscoroutinefunction(func): + raise TypeError('Callback must be a coroutine.') + + try: + checks = func.__commands_checks__ + checks.reverse() + del func.__commands_checks__ + except AttributeError: + checks = [] + + try: + cooldown = func.__commands_cooldown__ + del func.__commands_cooldown__ + except AttributeError: + cooldown = None + + help_doc = attrs.get('help') + if help_doc is not None: + help_doc = inspect.cleandoc(help_doc) + else: + help_doc = inspect.getdoc(func) + if isinstance(help_doc, bytes): + help_doc = help_doc.decode('utf-8') + + attrs['help'] = help_doc + fname = name or func.__name__ + return cls(name=fname, callback=func, checks=checks, cooldown=cooldown, **attrs) + + return decorator + +def group(name=None, **attrs): + """A decorator that transforms a function into a :class:`Group`. + + This is similar to the :func:`command` decorator but creates a + :class:`Group` instead of a :class:`Command`. + """ + return command(name=name, cls=Group, **attrs) + +def check(predicate): + """A decorator that adds a check to the :class:`Command` or its + subclasses. These checks could be accessed via :attr:`Command.checks`. + + These checks should be predicates that take in a single parameter taking + a :class:`Context`. If the check returns a ``False``\-like value then + during invocation a :exc:`CheckFailure` exception is raised and sent to + the :func:`on_command_error` event. + + If an exception should be thrown in the predicate then it should be a + subclass of :exc:`CommandError`. Any exception not subclassed from it + will be propagated while those subclassed will be sent to + :func:`on_command_error`. + + Parameters + ----------- + predicate + The predicate to check if the command should be invoked. + + Examples + --------- + + Creating a basic check to see if the command invoker is you. + + .. code-block:: python + + def check_if_it_is_me(ctx): + return ctx.message.author.id == 'my-user-id' + + @bot.command() + @commands.check(check_if_it_is_me) + async def only_for_me(): + await bot.say('I know you!') + + Transforming common checks into its own decorator: + + .. code-block:: python + + def is_me(): + def predicate(ctx): + return ctx.message.author.id == 'my-user-id' + return commands.check(predicate) + + @bot.command() + @is_me() + async def only_me(): + await bot.say('Only you!') + + """ + + def decorator(func): + if isinstance(func, Command): + func.checks.append(predicate) + else: + if not hasattr(func, '__commands_checks__'): + func.__commands_checks__ = [] + + func.__commands_checks__.append(predicate) + + return func + return decorator + +def has_role(name): + """A :func:`check` that is added that checks if the member invoking the + command has the role specified via the name specified. + + The name is case sensitive and must be exact. No normalisation is done in + the input. + + If the message is invoked in a private message context then the check will + return ``False``. + + Parameters + ----------- + name : str + The name of the role to check. + """ + + def predicate(ctx): + msg = ctx.message + ch = msg.channel + if ch.is_private: + return False + + role = discord.utils.get(msg.author.roles, name=name) + return role is not None + + return check(predicate) + +def has_any_role(*names): + """A :func:`check` that is added that checks if the member invoking the + command has **any** of the roles specified. This means that if they have + one out of the three roles specified, then this check will return `True`. + + Similar to :func:`has_role`\, the names passed in must be exact. + + Parameters + ----------- + names + An argument list of names to check that the member has roles wise. + + Example + -------- + + .. code-block:: python + + @bot.command() + @commands.has_any_role('Library Devs', 'Moderators') + async def cool(): + await bot.say('You are cool indeed') + """ + def predicate(ctx): + msg = ctx.message + ch = msg.channel + if ch.is_private: + return False + + getter = functools.partial(discord.utils.get, msg.author.roles) + return any(getter(name=name) is not None for name in names) + return check(predicate) + +def has_permissions(**perms): + """A :func:`check` that is added that checks if the member has any of + the permissions necessary. + + The permissions passed in must be exactly like the properties shown under + :class:`discord.Permissions`. + + Parameters + ------------ + perms + An argument list of permissions to check for. + + Example + --------- + + .. code-block:: python + + @bot.command() + @commands.has_permissions(manage_messages=True) + async def test(): + await bot.say('You can manage messages.') + + """ + def predicate(ctx): + msg = ctx.message + ch = msg.channel + permissions = ch.permissions_for(msg.author) + return all(getattr(permissions, perm, None) == value for perm, value in perms.items()) + + return check(predicate) + +def bot_has_role(name): + """Similar to :func:`has_role` except checks if the bot itself has the + role. + """ + + def predicate(ctx): + ch = ctx.message.channel + if ch.is_private: + return False + me = ch.server.me + role = discord.utils.get(me.roles, name=name) + return role is not None + return check(predicate) + +def bot_has_any_role(*names): + """Similar to :func:`has_any_role` except checks if the bot itself has + any of the roles listed. + """ + def predicate(ctx): + ch = ctx.message.channel + if ch.is_private: + return False + me = ch.server.me + getter = functools.partial(discord.utils.get, me.roles) + return any(getter(name=name) is not None for name in names) + return check(predicate) + +def bot_has_permissions(**perms): + """Similar to :func:`has_permissions` except checks if the bot itself has + the permissions listed. + """ + def predicate(ctx): + ch = ctx.message.channel + me = ch.server.me if not ch.is_private else ctx.bot.user + permissions = ch.permissions_for(me) + return all(getattr(permissions, perm, None) == value for perm, value in perms.items()) + return check(predicate) + +def cooldown(rate, per, type=BucketType.default): + """A decorator that adds a cooldown to a :class:`Command` + or its subclasses. + + A cooldown allows a command to only be used a specific amount + of times in a specific time frame. These cooldowns can be based + either on a per-server, per-channel, per-user, or global basis. + Denoted by the third argument of ``type`` which must be of enum + type ``BucketType`` which could be either: + + - ``BucketType.default`` for a global basis. + - ``BucketType.user`` for a per-user basis. + - ``BucketType.server`` for a per-server basis. + - ``BucketType.channel`` for a per-channel basis. + + If a cooldown is triggered, then :exc:`CommandOnCooldown` is triggered in + :func:`on_command_error` and the local error handler. + + A command can only have a single cooldown. + + Parameters + ------------ + rate: int + The number of times a command can be used before triggering a cooldown. + per: float + The amount of seconds to wait for a cooldown when it's been triggered. + type: ``BucketType`` + The type of cooldown to have. + """ + + def decorator(func): + if isinstance(func, Command): + func._buckets = CooldownMapping(Cooldown(rate, per, type)) + else: + func.__commands_cooldown__ = Cooldown(rate, per, type) + return func + return decorator diff --git a/RBXLegacyDiscordBot/lib/discord/ext/commands/errors.py b/RBXLegacyDiscordBot/lib/discord/ext/commands/errors.py new file mode 100644 index 0000000..b42b87e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/ext/commands/errors.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from discord.errors import DiscordException + + +__all__ = [ 'CommandError', 'MissingRequiredArgument', 'BadArgument', + 'NoPrivateMessage', 'CheckFailure', 'CommandNotFound', + 'DisabledCommand', 'CommandInvokeError', 'TooManyArguments', + 'UserInputError', 'CommandOnCooldown' ] + +class CommandError(DiscordException): + """The base exception type for all command related errors. + + This inherits from :exc:`discord.DiscordException`. + + This exception and exceptions derived from it are handled + in a special way as they are caught and passed into a special event + from :class:`Bot`\, :func:`on_command_error`. + """ + def __init__(self, message=None, *args): + if message is not None: + # clean-up @everyone and @here mentions + m = message.replace('@everyone', '@\u200beveryone').replace('@here', '@\u200bhere') + super().__init__(m, *args) + else: + super().__init__(*args) + +class UserInputError(CommandError): + """The base exception type for errors that involve errors + regarding user input. + + This inherits from :exc:`CommandError`. + """ + pass + +class CommandNotFound(CommandError): + """Exception raised when a command is attempted to be invoked + but no command under that name is found. + + This is not raised for invalid subcommands, rather just the + initial main command that is attempted to be invoked. + """ + pass + +class MissingRequiredArgument(UserInputError): + """Exception raised when parsing a command and a parameter + that is required is not encountered. + """ + pass + +class TooManyArguments(UserInputError): + """Exception raised when the command was passed too many arguments and its + :attr:`Command.ignore_extra` attribute was not set to ``True``. + """ + pass + +class BadArgument(UserInputError): + """Exception raised when a parsing or conversion failure is encountered + on an argument to pass into a command. + """ + pass + +class NoPrivateMessage(CommandError): + """Exception raised when an operation does not work in private message + contexts. + """ + pass + +class CheckFailure(CommandError): + """Exception raised when the predicates in :attr:`Command.checks` have failed.""" + pass + +class DisabledCommand(CommandError): + """Exception raised when the command being invoked is disabled.""" + pass + +class CommandInvokeError(CommandError): + """Exception raised when the command being invoked raised an exception. + + Attributes + ----------- + original + The original exception that was raised. You can also get this via + the ``__cause__`` attribute. + """ + def __init__(self, e): + self.original = e + super().__init__('Command raised an exception: {0.__class__.__name__}: {0}'.format(e)) + +class CommandOnCooldown(CommandError): + """Exception raised when the command being invoked is on cooldown. + + Attributes + ----------- + cooldown: Cooldown + A class with attributes ``rate``, ``per``, and ``type`` similar to + the :func:`cooldown` decorator. + retry_after: float + The amount of seconds to wait before you can retry again. + """ + def __init__(self, cooldown, retry_after): + self.cooldown = cooldown + self.retry_after = retry_after + super().__init__('You are on cooldown. Try again in {:.2f}s'.format(retry_after)) diff --git a/RBXLegacyDiscordBot/lib/discord/ext/commands/formatter.py b/RBXLegacyDiscordBot/lib/discord/ext/commands/formatter.py new file mode 100644 index 0000000..0b12df5 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/ext/commands/formatter.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import itertools +import inspect + +from .core import GroupMixin, Command +from .errors import CommandError + +# help -> shows info of bot on top/bottom and lists subcommands +# help command -> shows detailed info of command +# help command -> same as above + +# + +# + +# + +# Cog: +# +# +# Other Cog: +# +# No Category: +# + +# Type help command for more info on a command. +# You can also type help category for more info on a category. + +class Paginator: + """A class that aids in paginating code blocks for Discord messages. + + Attributes + ----------- + prefix: str + The prefix inserted to every page. e.g. three backticks. + suffix: str + The suffix appended at the end of every page. e.g. three backticks. + max_size: int + The maximum amount of codepoints allowed in a page. + """ + def __init__(self, prefix='```', suffix='```', max_size=2000): + self.prefix = prefix + self.suffix = suffix + self.max_size = max_size - len(suffix) + self._current_page = [prefix] + self._count = len(prefix) + 1 # prefix + newline + self._pages = [] + + def add_line(self, line='', *, empty=False): + """Adds a line to the current page. + + If the line exceeds the :attr:`max_size` then an exception + is raised. + + Parameters + ----------- + line: str + The line to add. + empty: bool + Indicates if another empty line should be added. + + Raises + ------ + RuntimeError + The line was too big for the current :attr:`max_size`. + """ + if len(line) > self.max_size - len(self.prefix) - 2: + raise RuntimeError('Line exceeds maximum page size %s' % (self.max_size - len(self.prefix) - 2)) + + if self._count + len(line) + 1 > self.max_size: + self.close_page() + + self._count += len(line) + 1 + self._current_page.append(line) + + if empty: + self._current_page.append('') + self._count += 1 + + def close_page(self): + """Prematurely terminate a page.""" + self._current_page.append(self.suffix) + self._pages.append('\n'.join(self._current_page)) + self._current_page = [self.prefix] + self._count = len(self.prefix) + 1 # prefix + newline + + @property + def pages(self): + """Returns the rendered list of pages.""" + # we have more than just the prefix in our current page + if len(self._current_page) > 1: + self.close_page() + return self._pages + + def __repr__(self): + fmt = '' + return fmt.format(self) + +class HelpFormatter: + """The default base implementation that handles formatting of the help + command. + + To override the behaviour of the formatter, :meth:`format` + should be overridden. A number of utility functions are provided for use + inside that method. + + Parameters + ----------- + show_hidden : bool + Dictates if hidden commands should be shown in the output. + Defaults to ``False``. + show_check_failure : bool + Dictates if commands that have their :attr:`Command.checks` failed + shown. Defaults to ``False``. + width : int + The maximum number of characters that fit in a line. + Defaults to 80. + """ + def __init__(self, show_hidden=False, show_check_failure=False, width=80): + self.width = width + self.show_hidden = show_hidden + self.show_check_failure = show_check_failure + + def has_subcommands(self): + """bool : Specifies if the command has subcommands.""" + return isinstance(self.command, GroupMixin) + + def is_bot(self): + """bool : Specifies if the command being formatted is the bot itself.""" + return self.command is self.context.bot + + def is_cog(self): + """bool : Specifies if the command being formatted is actually a cog.""" + return not self.is_bot() and not isinstance(self.command, Command) + + def shorten(self, text): + """Shortens text to fit into the :attr:`width`.""" + if len(text) > self.width: + return text[:self.width - 3] + '...' + return text + + @property + def max_name_size(self): + """int : Returns the largest name length of a command or if it has subcommands + the largest subcommand name.""" + try: + commands = self.command.commands if not self.is_cog() else self.context.bot.commands + if commands: + return max(map(lambda c: len(c.name) if self.show_hidden or not c.hidden else 0, commands.values())) + return 0 + except AttributeError: + return len(self.command.name) + + @property + def clean_prefix(self): + """The cleaned up invoke prefix. i.e. mentions are ``@name`` instead of ``<@id>``.""" + user = self.context.bot.user + # this breaks if the prefix mention is not the bot itself but I + # consider this to be an *incredibly* strange use case. I'd rather go + # for this common use case rather than waste performance for the + # odd one. + return self.context.prefix.replace(user.mention, '@' + user.name) + + def get_command_signature(self): + """Retrieves the signature portion of the help page.""" + result = [] + prefix = self.clean_prefix + cmd = self.command + parent = cmd.full_parent_name + if len(cmd.aliases) > 0: + aliases = '|'.join(cmd.aliases) + fmt = '{0}[{1.name}|{2}]' + if parent: + fmt = '{0}{3} [{1.name}|{2}]' + result.append(fmt.format(prefix, cmd, aliases, parent)) + else: + name = prefix + cmd.name if not parent else prefix + parent + ' ' + cmd.name + result.append(name) + + params = cmd.clean_params + if len(params) > 0: + for name, param in params.items(): + if param.default is not param.empty: + # We don't want None or '' to trigger the [name=value] case and instead it should + # do [name] since [name=None] or [name=] are not exactly useful for the user. + should_print = param.default if isinstance(param.default, str) else param.default is not None + if should_print: + result.append('[{}={}]'.format(name, param.default)) + else: + result.append('[{}]'.format(name)) + elif param.kind == param.VAR_POSITIONAL: + result.append('[{}...]'.format(name)) + else: + result.append('<{}>'.format(name)) + + return ' '.join(result) + + def get_ending_note(self): + command_name = self.context.invoked_with + return "Type {0}{1} command for more info on a command.\n" \ + "You can also type {0}{1} category for more info on a category.".format(self.clean_prefix, command_name) + + def filter_command_list(self): + """Returns a filtered list of commands based on the two attributes + provided, :attr:`show_check_failure` and :attr:`show_hidden`. Also + filters based on if :meth:`is_cog` is valid. + + Returns + -------- + iterable + An iterable with the filter being applied. The resulting value is + a (key, value) tuple of the command name and the command itself. + """ + def predicate(tuple): + cmd = tuple[1] + if self.is_cog(): + # filter commands that don't exist to this cog. + if cmd.instance is not self.command: + return False + + if cmd.hidden and not self.show_hidden: + return False + + if self.show_check_failure: + # we don't wanna bother doing the checks if the user does not + # care about them, so just return true. + return True + + try: + return cmd.can_run(self.context) and self.context.bot.can_run(self.context) + except CommandError: + return False + + iterator = self.command.commands.items() if not self.is_cog() else self.context.bot.commands.items() + return filter(predicate, iterator) + + def _add_subcommands_to_page(self, max_width, commands): + for name, command in commands: + if name in command.aliases: + # skip aliases + continue + + entry = ' {0:<{width}} {1}'.format(name, command.short_doc, width=max_width) + shortened = self.shorten(entry) + self._paginator.add_line(shortened) + + def format_help_for(self, context, command_or_bot): + """Formats the help page and handles the actual heavy lifting of how + the help command looks like. To change the behaviour, override the + :meth:`format` method. + + Parameters + ----------- + context : :class:`Context` + The context of the invoked help command. + command_or_bot : :class:`Command` or :class:`Bot` + The bot or command that we are getting the help of. + + Returns + -------- + list + A paginated output of the help command. + """ + self.context = context + self.command = command_or_bot + return self.format() + + def format(self): + """Handles the actual behaviour involved with formatting. + + To change the behaviour, this method should be overridden. + + Returns + -------- + list + A paginated output of the help command. + """ + self._paginator = Paginator() + + # we need a padding of ~80 or so + + description = self.command.description if not self.is_cog() else inspect.getdoc(self.command) + + if description: + # portion + self._paginator.add_line(description, empty=True) + + if isinstance(self.command, Command): + # + signature = self.get_command_signature() + self._paginator.add_line(signature, empty=True) + + # section + if self.command.help: + self._paginator.add_line(self.command.help, empty=True) + + # end it here if it's just a regular command + if not self.has_subcommands(): + self._paginator.close_page() + return self._paginator.pages + + max_width = self.max_name_size + + def category(tup): + cog = tup[1].cog_name + # we insert the zero width space there to give it approximate + # last place sorting position. + return cog + ':' if cog is not None else '\u200bNo Category:' + + if self.is_bot(): + data = sorted(self.filter_command_list(), key=category) + for category, commands in itertools.groupby(data, key=category): + # there simply is no prettier way of doing this. + commands = list(commands) + if len(commands) > 0: + self._paginator.add_line(category) + + self._add_subcommands_to_page(max_width, commands) + else: + self._paginator.add_line('Commands:') + self._add_subcommands_to_page(max_width, self.filter_command_list()) + + # add the ending note + self._paginator.add_line() + ending_note = self.get_ending_note() + self._paginator.add_line(ending_note) + return self._paginator.pages diff --git a/RBXLegacyDiscordBot/lib/discord/ext/commands/view.py b/RBXLegacyDiscordBot/lib/discord/ext/commands/view.py new file mode 100644 index 0000000..c1a19ba --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/ext/commands/view.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .errors import BadArgument + +class StringView: + def __init__(self, buffer): + self.index = 0 + self.buffer = buffer + self.end = len(buffer) + self.previous = 0 + + @property + def current(self): + return None if self.eof else self.buffer[self.index] + + @property + def eof(self): + return self.index >= self.end + + def undo(self): + self.index = self.previous + + def skip_ws(self): + pos = 0 + while not self.eof: + try: + current = self.buffer[self.index + pos] + if not current.isspace(): + break + pos += 1 + except IndexError: + break + + self.previous = self.index + self.index += pos + return self.previous != self.index + + def skip_string(self, string): + strlen = len(string) + if self.buffer[self.index:self.index + strlen] == string: + self.previous = self.index + self.index += strlen + return True + return False + + def read_rest(self): + result = self.buffer[self.index:] + self.previous = self.index + self.index = self.end + return result + + def read(self, n): + result = self.buffer[self.index:self.index + n] + self.previous = self.index + self.index += n + return result + + def get(self): + try: + result = self.buffer[self.index + 1] + except IndexError: + result = None + + self.previous = self.index + self.index += 1 + return result + + def get_word(self): + pos = 0 + while not self.eof: + try: + current = self.buffer[self.index + pos] + if current.isspace(): + break + pos += 1 + except IndexError: + break + self.previous = self.index + result = self.buffer[self.index:self.index + pos] + self.index += pos + return result + + def __repr__(self): + return ''.format(self) + +# Parser + +def quoted_word(view): + current = view.current + + if current is None: + return None + + is_quoted = current == '"' + result = [] if is_quoted else [current] + + while not view.eof: + current = view.get() + if not current: + if is_quoted: + # unexpected EOF + raise BadArgument('Expected closing "') + return ''.join(result) + + # currently we accept strings in the format of "hello world" + # to embed a quote inside the string you must escape it: "a \"world\"" + if current == '\\': + next_char = view.get() + if not next_char: + # string ends with \ and no character after it + if is_quoted: + # if we're quoted then we're expecting a closing quote + raise BadArgument('Expected closing "') + # if we aren't then we just let it through + return ''.join(result) + + if next_char == '"': + # escaped quote + result.append('"') + else: + # different escape character, ignore it + view.undo() + result.append(current) + continue + + # closing quote + if current == '"': + next_char = view.get() + valid_eof = not next_char or next_char.isspace() + if is_quoted: + if not valid_eof: + raise BadArgument('Expected space after closing quotation') + + # we're quoted so it's okay + return ''.join(result) + else: + # we aren't quoted + raise BadArgument('Unexpected quote mark in non-quoted string') + + if current.isspace() and not is_quoted: + # end of word found + return ''.join(result) + + result.append(current) diff --git a/RBXLegacyDiscordBot/lib/discord/game.py b/RBXLegacyDiscordBot/lib/discord/game.py new file mode 100644 index 0000000..49e5a4f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/game.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +class Game: + """Represents a Discord game. + + Supported Operations: + + +-----------+------------------------------------+ + | Operation | Description | + +===========+====================================+ + | x == y | Checks if two games are equal. | + +-----------+------------------------------------+ + | x != y | Checks if two games are not equal. | + +-----------+------------------------------------+ + | hash(x) | Return the games's hash. | + +-----------+------------------------------------+ + | str(x) | Returns the games's name. | + +-----------+------------------------------------+ + + Attributes + ----------- + name : str + The game's name. + url : str + The game's URL. Usually used for twitch streaming. + type : int + The type of game being played. 1 indicates "Streaming". + """ + + __slots__ = ['name', 'type', 'url'] + + def __init__(self, **kwargs): + self.name = kwargs.get('name') + self.url = kwargs.get('url') + self.type = kwargs.get('type') + + def __str__(self): + return self.name + + def _iterator(self): + for attr in self.__slots__: + value = getattr(self, attr, None) + if value is not None: + yield (attr, value) + + def __iter__(self): + return self._iterator() + + def __eq__(self, other): + return isinstance(other, Game) and other.name == self.name + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self.name) diff --git a/RBXLegacyDiscordBot/lib/discord/gateway.py b/RBXLegacyDiscordBot/lib/discord/gateway.py new file mode 100644 index 0000000..da02d11 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/gateway.py @@ -0,0 +1,688 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import sys +import time +import websockets +import asyncio +import aiohttp +from . import utils, compat +from .enums import Status, try_enum +from .game import Game +from .errors import GatewayNotFound, ConnectionClosed, InvalidArgument +import logging +import zlib, time, json +from collections import namedtuple +import threading +import struct + +log = logging.getLogger(__name__) + +__all__ = [ 'ReconnectWebSocket', 'DiscordWebSocket', + 'KeepAliveHandler', 'VoiceKeepAliveHandler', + 'DiscordVoiceWebSocket', 'ResumeWebSocket' ] + +class ReconnectWebSocket(Exception): + """Signals to handle the RECONNECT opcode.""" + pass + +class ResumeWebSocket(Exception): + """Signals to initialise via RESUME opcode instead of IDENTIFY.""" + pass + +EventListener = namedtuple('EventListener', 'predicate event result future') + +@asyncio.coroutine +def _ensure_coroutine_connect(gateway, *, loop, klass): + # In 3.5+ websockets.connect does not return a coroutine, but an awaitable. + # The problem is that in 3.5.0 and in some cases 3.5.1, asyncio.ensure_future and + # by proxy, asyncio.wait_for, do not accept awaitables, but rather futures or coroutines. + # By wrapping it up into this function we ensure that it's in a coroutine and not an awaitable + # even for 3.5.0 users. + ws = yield from websockets.connect(gateway, loop=loop, klass=klass) + return ws + +class KeepAliveHandler(threading.Thread): + def __init__(self, *args, **kwargs): + ws = kwargs.pop('ws', None) + interval = kwargs.pop('interval', None) + threading.Thread.__init__(self, *args, **kwargs) + self.ws = ws + self.interval = interval + self.daemon = True + self.msg = 'Keeping websocket alive with sequence {0[d]}' + self._stop_ev = threading.Event() + self._last_ack = time.time() + + def run(self): + while not self._stop_ev.wait(self.interval): + if self._last_ack + 2 * self.interval < time.time(): + log.warn("We have stopped responding to the gateway.") + coro = self.ws.close(1001) + f = compat.run_coroutine_threadsafe(coro, loop=self.ws.loop) + + try: + f.result() + except: + pass + finally: + self.stop() + return + + data = self.get_payload() + log.debug(self.msg.format(data)) + coro = self.ws.send_as_json(data) + f = compat.run_coroutine_threadsafe(coro, loop=self.ws.loop) + try: + # block until sending is complete + f.result() + except Exception: + self.stop() + + def get_payload(self): + return { + 'op': self.ws.HEARTBEAT, + 'd': self.ws._connection.sequence + } + + def stop(self): + self._stop_ev.set() + + def ack(self): + self._last_ack = time.time() + +class VoiceKeepAliveHandler(KeepAliveHandler): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.msg = 'Keeping voice websocket alive with timestamp {0[d]}' + + def get_payload(self): + self.ack() + return { + 'op': self.ws.HEARTBEAT, + 'd': int(time.time() * 1000) + } + +class DiscordWebSocket(websockets.client.WebSocketClientProtocol): + """Implements a WebSocket for Discord's gateway v6. + + This is created through :func:`create_main_websocket`. Library + users should never create this manually. + + Attributes + ----------- + DISPATCH + Receive only. Denotes an event to be sent to Discord, such as READY. + HEARTBEAT + When received tells Discord to keep the connection alive. + When sent asks if your connection is currently alive. + IDENTIFY + Send only. Starts a new session. + PRESENCE + Send only. Updates your presence. + VOICE_STATE + Send only. Starts a new connection to a voice server. + VOICE_PING + Send only. Checks ping time to a voice server, do not use. + RESUME + Send only. Resumes an existing connection. + RECONNECT + Receive only. Tells the client to reconnect to a new gateway. + REQUEST_MEMBERS + Send only. Asks for the full member list of a server. + INVALIDATE_SESSION + Receive only. Tells the client to invalidate the session and IDENTIFY + again. + HELLO + Receive only. Tells the client the heartbeat interval. + HEARTBEAT_ACK + Receive only. Confirms receiving of a heartbeat. Not having it implies + a connection issue. + GUILD_SYNC + Send only. Requests a guild sync. + gateway + The gateway we are currently connected to. + token + The authentication token for discord. + """ + + DISPATCH = 0 + HEARTBEAT = 1 + IDENTIFY = 2 + PRESENCE = 3 + VOICE_STATE = 4 + VOICE_PING = 5 + RESUME = 6 + RECONNECT = 7 + REQUEST_MEMBERS = 8 + INVALIDATE_SESSION = 9 + HELLO = 10 + HEARTBEAT_ACK = 11 + GUILD_SYNC = 12 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.max_size = None + # an empty dispatcher to prevent crashes + self._dispatch = lambda *args: None + # generic event listeners + self._dispatch_listeners = [] + # the keep alive + self._keep_alive = None + + @classmethod + @asyncio.coroutine + def from_client(cls, client, *, resume=False): + """Creates a main websocket for Discord from a :class:`Client`. + + This is for internal use only. + """ + gateway = yield from client.http.get_gateway() + try: + ws = yield from asyncio.wait_for( + _ensure_coroutine_connect(gateway, loop=client.loop, klass=cls), + timeout=60, loop=client.loop) + except asyncio.TimeoutError: + log.warn('timed out waiting for client connect') + return (yield from cls.from_client(client, resume=resume)) + + # dynamically add attributes needed + ws.token = client.http.token + ws._connection = client.connection + ws._dispatch = client.dispatch + ws.gateway = gateway + ws.shard_id = client.shard_id + ws.shard_count = client.shard_count + + client.connection._update_references(ws) + + log.info('Created websocket connected to {}'.format(gateway)) + + # poll event for OP Hello + try: + yield from asyncio.wait_for(ws.poll_event(), timeout=60, loop=client.loop) + except asyncio.TimeoutError: + log.warn("timed out waiting for client HELLO") + yield from ws.close(1001) + return (yield from cls.from_client(client, resume=resume)) + + if not resume: + yield from ws.identify() + log.info('sent the identify payload to create the websocket') + return ws + + yield from ws.resume() + log.info('sent the resume payload to create the websocket') + try: + yield from ws.ensure_open() + except websockets.exceptions.ConnectionClosed: + # ws got closed so let's just do a regular IDENTIFY connect. + log.warn('RESUME failure.') + return (yield from cls.from_client(client)) + else: + return ws + + def wait_for(self, event, predicate, result=None): + """Waits for a DISPATCH'd event that meets the predicate. + + Parameters + ----------- + event : str + The event name in all upper case to wait for. + predicate + A function that takes a data parameter to check for event + properties. The data parameter is the 'd' key in the JSON message. + result + A function that takes the same data parameter and executes to send + the result to the future. If None, returns the data. + + Returns + -------- + asyncio.Future + A future to wait for. + """ + + future = asyncio.Future(loop=self.loop) + entry = EventListener(event=event, predicate=predicate, result=result, future=future) + self._dispatch_listeners.append(entry) + return future + + @asyncio.coroutine + def identify(self): + """Sends the IDENTIFY packet.""" + payload = { + 'op': self.IDENTIFY, + 'd': { + 'token': self.token, + 'properties': { + '$os': sys.platform, + '$browser': 'discord.py', + '$device': 'discord.py', + '$referrer': '', + '$referring_domain': '' + }, + 'compress': True, + 'large_threshold': 250, + 'v': 3 + } + } + + if not self._connection.is_bot: + payload['d']['synced_guilds'] = [] + + if self.shard_id is not None and self.shard_count is not None: + payload['d']['shard'] = [self.shard_id, self.shard_count] + + yield from self.send_as_json(payload) + + @asyncio.coroutine + def resume(self): + """Sends the RESUME packet.""" + state = self._connection + payload = { + 'op': self.RESUME, + 'd': { + 'seq': state.sequence, + 'session_id': state.session_id, + 'token': self.token + } + } + + yield from self.send_as_json(payload) + + @asyncio.coroutine + def received_message(self, msg): + self._dispatch('socket_raw_receive', msg) + + if isinstance(msg, bytes): + msg = zlib.decompress(msg, 15, 10490000) # This is 10 MiB + msg = msg.decode('utf-8') + + msg = json.loads(msg) + state = self._connection + + log.debug('WebSocket Event: {}'.format(msg)) + self._dispatch('socket_response', msg) + + op = msg.get('op') + data = msg.get('d') + seq = msg.get('s') + if seq is not None: + state.sequence = seq + + if op == self.RECONNECT: + # "reconnect" can only be handled by the Client + # so we terminate our connection and raise an + # internal exception signalling to reconnect. + log.info('Received RECONNECT opcode.') + yield from self.close() + raise ReconnectWebSocket() + + if op == self.HEARTBEAT_ACK: + self._keep_alive.ack() + return + + if op == self.HEARTBEAT: + beat = self._keep_alive.get_payload() + yield from self.send_as_json(beat) + return + + if op == self.HELLO: + interval = data['heartbeat_interval'] / 1000.0 + self._keep_alive = KeepAliveHandler(ws=self, interval=interval) + self._keep_alive.start() + return + + if op == self.INVALIDATE_SESSION: + if data == True: + yield from asyncio.sleep(5.0, loop=self.loop) + yield from self.close() + raise ResumeWebSocket() + + state.sequence = None + state.session_id = None + + yield from self.identify() + return + + if op != self.DISPATCH: + log.info('Unhandled op {}'.format(op)) + return + + event = msg.get('t') + is_ready = event == 'READY' + + if is_ready: + state.clear() + state.sequence = msg['s'] + state.session_id = data['session_id'] + + parser = 'parse_' + event.lower() + + try: + func = getattr(self._connection, parser) + except AttributeError: + log.info('Unhandled event {}'.format(event)) + else: + func(data) + + # remove the dispatched listeners + removed = [] + for index, entry in enumerate(self._dispatch_listeners): + if entry.event != event: + continue + + future = entry.future + if future.cancelled(): + removed.append(index) + continue + + try: + valid = entry.predicate(data) + except Exception as e: + future.set_exception(e) + removed.append(index) + else: + if valid: + ret = data if entry.result is None else entry.result(data) + future.set_result(ret) + removed.append(index) + + for index in reversed(removed): + del self._dispatch_listeners[index] + + def _can_handle_close(self, code): + return code not in (1000, 4004, 4010, 4011) + + @asyncio.coroutine + def poll_event(self): + """Polls for a DISPATCH event and handles the general gateway loop. + + Raises + ------ + ConnectionClosed + The websocket connection was terminated for unhandled reasons. + """ + try: + msg = yield from self.recv() + yield from self.received_message(msg) + except websockets.exceptions.ConnectionClosed as e: + if self._can_handle_close(e.code): + log.info('Websocket closed with {0.code} ({0.reason}), attempting a reconnect.'.format(e)) + raise ResumeWebSocket() from e + else: + raise ConnectionClosed(e) from e + + @asyncio.coroutine + def send(self, data): + self._dispatch('socket_raw_send', data) + yield from super().send(data) + + @asyncio.coroutine + def send_as_json(self, data): + try: + yield from super().send(utils.to_json(data)) + except websockets.exceptions.ConnectionClosed as e: + if not self._can_handle_close(e.code): + raise ConnectionClosed(e) from e + + @asyncio.coroutine + def change_presence(self, *, game=None, status=None, afk=False, since=0.0, idle=None): + if game is not None and not isinstance(game, Game): + raise InvalidArgument('game must be of type Game or None') + + if idle: + status = 'idle' + + if status == 'idle': + since = int(time.time() * 1000) + + sent_game = dict(game) if game else None + + payload = { + 'op': self.PRESENCE, + 'd': { + 'game': sent_game, + 'afk': afk, + 'since': since, + 'status': status + } + } + + sent = utils.to_json(payload) + log.debug('Sending "{}" to change status'.format(sent)) + yield from self.send(sent) + + status_enum = try_enum(Status, status) + if status_enum is Status.invisible: + status_enum = Status.offline + + for server in self._connection.servers: + me = server.me + if me is None: + continue + + me.game = game + me.status = status_enum + + @asyncio.coroutine + def request_sync(self, guild_ids): + payload = { + 'op': self.GUILD_SYNC, + 'd': list(guild_ids) + } + yield from self.send_as_json(payload) + + @asyncio.coroutine + def voice_state(self, guild_id, channel_id, self_mute=False, self_deaf=False): + payload = { + 'op': self.VOICE_STATE, + 'd': { + 'guild_id': guild_id, + 'channel_id': channel_id, + 'self_mute': self_mute, + 'self_deaf': self_deaf + } + } + + yield from self.send_as_json(payload) + + # we're leaving a voice channel so remove it from the client list + if channel_id is None: + self._connection._remove_voice_client(guild_id) + + @asyncio.coroutine + def close_connection(self, force=False): + if self._keep_alive: + self._keep_alive.stop() + + yield from super().close_connection(force=force) + +class DiscordVoiceWebSocket(websockets.client.WebSocketClientProtocol): + """Implements the websocket protocol for handling voice connections. + + Attributes + ----------- + IDENTIFY + Send only. Starts a new voice session. + SELECT_PROTOCOL + Send only. Tells discord what encryption mode and how to connect for voice. + READY + Receive only. Tells the websocket that the initial connection has completed. + HEARTBEAT + Send only. Keeps your websocket connection alive. + SESSION_DESCRIPTION + Receive only. Gives you the secret key required for voice. + SPEAKING + Send only. Notifies the client if you are currently speaking. + """ + + IDENTIFY = 0 + SELECT_PROTOCOL = 1 + READY = 2 + HEARTBEAT = 3 + SESSION_DESCRIPTION = 4 + SPEAKING = 5 + HELLO = 8 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.max_size = None + self._keep_alive = None + + @asyncio.coroutine + def send_as_json(self, data): + yield from self.send(utils.to_json(data)) + + @classmethod + @asyncio.coroutine + def from_client(cls, client): + """Creates a voice websocket for the :class:`VoiceClient`.""" + gateway = 'wss://' + client.endpoint + try: + ws = yield from asyncio.wait_for( + _ensure_coroutine_connect(gateway, loop=client.loop, klass=cls), + timeout=60, loop=client.loop) + except asyncio.TimeoutError: + log.warn("timed out waiting for voice client connect") + return (yield from cls.from_client(client)) + + ws.gateway = gateway + ws._connection = client + + identify = { + 'op': cls.IDENTIFY, + 'd': { + 'server_id': client.guild_id, + 'user_id': client.user.id, + 'session_id': client.session_id, + 'token': client.token + } + } + + yield from ws.send_as_json(identify) + + try: + # Wait until we have processed READY and keep alive is running + while not ws._keep_alive: + yield from asyncio.wait_for(ws.poll_event(), timeout=60, loop=client.loop) + except asyncio.TimeoutError: + log.warn("timed out waiting for voice client READY") + yield from ws.close(1001) + return (yield from cls.from_client(client)) + + return ws + + @asyncio.coroutine + def select_protocol(self, ip, port): + payload = { + 'op': self.SELECT_PROTOCOL, + 'd': { + 'protocol': 'udp', + 'data': { + 'address': ip, + 'port': port, + 'mode': 'xsalsa20_poly1305' + } + } + } + + yield from self.send_as_json(payload) + log.debug('Selected protocol as {}'.format(payload)) + + @asyncio.coroutine + def speak(self, is_speaking=True): + payload = { + 'op': self.SPEAKING, + 'd': { + 'speaking': is_speaking, + 'delay': 0 + } + } + + yield from self.send_as_json(payload) + log.debug('Voice speaking now set to {}'.format(is_speaking)) + + @asyncio.coroutine + def received_message(self, msg): + log.debug('Voice websocket frame received: {}'.format(msg)) + op = msg.get('op') + data = msg.get('d') + + if op == self.READY: + interval = data['heartbeat_interval'] / 1000.0 + self._keep_alive = VoiceKeepAliveHandler(ws=self, interval=interval) + self._keep_alive.start() + yield from self.initial_connection(data) + elif op == self.SESSION_DESCRIPTION: + yield from self.load_secret_key(data) + + @asyncio.coroutine + def initial_connection(self, data): + state = self._connection + state.ssrc = data.get('ssrc') + state.voice_port = data.get('port') + packet = bytearray(70) + struct.pack_into('>I', packet, 0, state.ssrc) + state.socket.sendto(packet, (state.endpoint_ip, state.voice_port)) + recv = yield from self.loop.sock_recv(state.socket, 70) + log.debug('received packet in initial_connection: {}'.format(recv)) + + # the ip is ascii starting at the 4th byte and ending at the first null + ip_start = 4 + ip_end = recv.index(0, ip_start) + state.ip = recv[ip_start:ip_end].decode('ascii') + + # the port is a little endian unsigned short in the last two bytes + # yes, this is different endianness from everything else + state.port = struct.unpack_from(' r.status >= 200: + log.debug(self.SUCCESS_LOG.format(method=method, url=url, text=data)) + return data + + # we are being rate limited + if r.status == 429: + fmt = 'We are being rate limited. Retrying in {:.2} seconds. Handled under the bucket "{}"' + + # sleep a bit + retry_after = data['retry_after'] / 1000.0 + log.info(fmt.format(retry_after, bucket)) + + # check if it's a global rate limit + is_global = data.get('global', False) + if is_global: + log.info('Global rate limit has been hit. Retrying in {:.2} seconds.'.format(retry_after)) + self._global_over.clear() + + yield from asyncio.sleep(retry_after, loop=self.loop) + log.debug('Done sleeping for the rate limit. Retrying...') + + # release the global lock now that the + # global rate limit has passed + if is_global: + self._global_over.set() + log.debug('Global rate limit is now over.') + + continue + + # we've received a 502, unconditional retry + if r.status == 502 and tries <= 5: + yield from asyncio.sleep(1 + tries * 2, loop=self.loop) + continue + + # the usual error cases + if r.status == 403: + raise Forbidden(r, data) + elif r.status == 404: + raise NotFound(r, data) + else: + raise HTTPException(r, data) + finally: + # clean-up just in case + yield from r.release() + + def get(self, *args, **kwargs): + return self.request('GET', *args, **kwargs) + + def put(self, *args, **kwargs): + return self.request('PUT', *args, **kwargs) + + def patch(self, *args, **kwargs): + return self.request('PATCH', *args, **kwargs) + + def delete(self, *args, **kwargs): + return self.request('DELETE', *args, **kwargs) + + def post(self, *args, **kwargs): + return self.request('POST', *args, **kwargs) + + # state management + + @asyncio.coroutine + def close(self): + yield from self.session.close() + + def recreate(self): + self.session = aiohttp.ClientSession(connector=self.connector, loop=self.loop) + + def _token(self, token, *, bot=True): + self.token = token + self.bot_token = bot + + # login management + + @asyncio.coroutine + def email_login(self, email, password): + payload = { + 'email': email, + 'password': password + } + + try: + data = yield from self.request(Route('POST', '/auth/login'), json=payload) + except HTTPException as e: + if e.response.status == 400: + raise LoginFailure('Improper credentials have been passed.') from e + raise + + self._token(data['token'], bot=False) + return data + + @asyncio.coroutine + def static_login(self, token, *, bot): + old_token, old_bot = self.token, self.bot_token + self._token(token, bot=bot) + + try: + data = yield from self.request(Route('GET', '/users/@me')) + except HTTPException as e: + self._token(old_token, bot=old_bot) + if e.response.status == 401: + raise LoginFailure('Improper token has been passed.') from e + raise e + + return data + + def logout(self): + return self.request(Route('POST', '/auth/logout')) + + # Message management + + def start_private_message(self, user_id): + payload = { + 'recipient_id': user_id + } + + return self.request(Route('POST', '/users/@me/channels'), json=payload) + + # TODO: remove guild_id parameters here + + def send_message(self, channel_id, content, *, guild_id=None, tts=False, embed=None): + r = Route('POST', '/channels/{channel_id}/messages', channel_id=channel_id) + payload = {} + + if content: + payload['content'] = content + + if tts: + payload['tts'] = True + + if embed: + payload['embed'] = embed + + return self.request(r, json=payload) + + def send_typing(self, channel_id): + return self.request(Route('POST', '/channels/{channel_id}/typing', channel_id=channel_id)) + + def send_file(self, channel_id, buffer, *, guild_id=None, filename=None, content=None, tts=False, embed=None): + r = Route('POST', '/channels/{channel_id}/messages', channel_id=channel_id) + form = aiohttp.FormData() + + payload = {'tts': tts} + if content: + payload['content'] = content + if embed: + payload['embed'] = embed + + form.add_field('payload_json', utils.to_json(payload)) + form.add_field('file', buffer, filename=filename, content_type='application/octet-stream') + + return self.request(r, data=form) + + def delete_message(self, channel_id, message_id, guild_id=None): + r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, + message_id=message_id) + return self.request(r) + + def delete_messages(self, channel_id, message_ids, guild_id=None): + r = Route('POST', '/channels/{channel_id}/messages/bulk_delete', channel_id=channel_id) + payload = { + 'messages': message_ids + } + + return self.request(r, json=payload) + + def edit_message(self, message_id, channel_id, content, *, guild_id=None, embed=None): + r = Route('PATCH', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, + message_id=message_id) + payload = {} + + if content: + payload['content'] = content + + if embed: + payload['embed'] = embed + + return self.request(r, json=payload) + + def add_reaction(self, message_id, channel_id, emoji): + r = Route('PUT', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me', + channel_id=channel_id, message_id=message_id, emoji=emoji) + return self.request(r, header_bypass_delay=0.25) + + def remove_reaction(self, message_id, channel_id, emoji, member_id): + r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/{member_id}', + channel_id=channel_id, message_id=message_id, member_id=member_id, emoji=emoji) + return self.request(r, header_bypass_delay=0.25) + + def get_reaction_users(self, message_id, channel_id, emoji, limit, after=None): + r = Route('GET', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}', + channel_id=channel_id, message_id=message_id, emoji=emoji) + + params = {'limit': limit} + if after: + params['after'] = after + return self.request(r, params=params) + + def clear_reactions(self, message_id, channel_id): + r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}/reactions', + channel_id=channel_id, message_id=message_id) + + return self.request(r) + + def get_message(self, channel_id, message_id): + r = Route('GET', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, message_id=message_id) + return self.request(r) + + def logs_from(self, channel_id, limit, before=None, after=None, around=None): + params = { + 'limit': limit + } + + if before: + params['before'] = before + if after: + params['after'] = after + if around: + params['around'] = around + + return self.request(Route('GET', '/channels/{channel_id}/messages', channel_id=channel_id), params=params) + + def pin_message(self, channel_id, message_id): + return self.request(Route('PUT', '/channels/{channel_id}/pins/{message_id}', + channel_id=channel_id, message_id=message_id)) + + def unpin_message(self, channel_id, message_id): + return self.request(Route('DELETE', '/channels/{channel_id}/pins/{message_id}', + channel_id=channel_id, message_id=message_id)) + + def pins_from(self, channel_id): + return self.request(Route('GET', '/channels/{channel_id}/pins', channel_id=channel_id)) + + # Member management + + def kick(self, user_id, guild_id): + r = Route('DELETE', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id) + return self.request(r) + + def ban(self, user_id, guild_id, delete_message_days=1): + r = Route('PUT', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id) + params = { + 'delete-message-days': delete_message_days + } + return self.request(r, params=params) + + def unban(self, user_id, guild_id): + r = Route('DELETE', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id) + return self.request(r) + + def server_voice_state(self, user_id, guild_id, *, mute=None, deafen=None): + r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id) + payload = {} + if mute is not None: + payload['mute'] = mute + + if deafen is not None: + payload['deaf'] = deafen + + return self.request(r, json=payload) + + def edit_profile(self, password, username, avatar, **fields): + payload = { + 'password': password, + 'username': username, + 'avatar': avatar + } + + if 'email' in fields: + payload['email'] = fields['email'] + + if 'new_password' in fields: + payload['new_password'] = fields['new_password'] + + return self.request(Route('PATCH', '/users/@me'), json=payload) + + def change_my_nickname(self, guild_id, nickname): + payload = { + 'nick': nickname + } + return self.request(Route('PATCH', '/guilds/{guild_id}/members/@me/nick', guild_id=guild_id), json=payload) + + def change_nickname(self, guild_id, user_id, nickname): + r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id) + payload = { + 'nick': nickname + } + return self.request(r, json=payload) + + def edit_member(self, guild_id, user_id, **fields): + r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id) + return self.request(r, json=fields) + + # Channel management + + def edit_channel(self, channel_id, **options): + valid_keys = ('name', 'topic', 'bitrate', 'user_limit', 'position') + payload = { + k: v for k, v in options.items() if k in valid_keys + } + + return self.request(Route('PATCH', '/channels/{channel_id}', channel_id=channel_id), json=payload) + + def move_channel_position(self, guild_id, positions): + r = Route('PATCH', '/guilds/{guild_id}/channels', guild_id=guild_id) + return self.request(r, json=positions) + + def create_channel(self, guild_id, name, channe_type, permission_overwrites=None): + payload = { + 'name': name, + 'type': channe_type + } + + if permission_overwrites is not None: + payload['permission_overwrites'] = permission_overwrites + + return self.request(Route('POST', '/guilds/{guild_id}/channels', guild_id=guild_id), json=payload) + + def delete_channel(self, channel_id): + return self.request(Route('DELETE', '/channels/{channel_id}', channel_id=channel_id)) + + # Guild management + + def leave_server(self, guild_id): + return self.request(Route('DELETE', '/users/@me/guilds/{guild_id}', guild_id=guild_id)) + + def delete_server(self, guild_id): + return self.request(Route('DELETE', '/guilds/{guild_id}', guild_id=guild_id)) + + def create_server(self, name, region, icon): + payload = { + 'name': name, + 'icon': icon, + 'region': region + } + + return self.request(Route('POST', '/guilds'), json=payload) + + def edit_server(self, guild_id, **fields): + valid_keys = ('name', 'region', 'icon', 'afk_timeout', 'owner_id', + 'afk_channel_id', 'splash', 'verification_level') + + payload = { + k: v for k, v in fields.items() if k in valid_keys + } + + return self.request(Route('PATCH', '/guilds/{guild_id}', guild_id=guild_id), json=payload) + + def get_bans(self, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}/bans', guild_id=guild_id)) + + def prune_members(self, guild_id, days): + params = { + 'days': days + } + return self.request(Route('POST', '/guilds/{guild_id}/prune', guild_id=guild_id), params=params) + + def estimate_pruned_members(self, guild_id, days): + params = { + 'days': days + } + return self.request(Route('GET', '/guilds/{guild_id}/prune', guild_id=guild_id), params=params) + + def create_custom_emoji(self, guild_id, name, image): + payload = { + 'name': name, + 'image': image + } + + r = Route('POST', '/guilds/{guild_id}/emojis', guild_id=guild_id) + return self.request(r, json=payload) + + def delete_custom_emoji(self, guild_id, emoji_id): + return self.request(Route('DELETE', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id)) + + def edit_custom_emoji(self, guild_id, emoji_id, *, name): + payload = { + 'name': name + } + r = Route('PATCH', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id) + return self.request(r, json=payload) + + # Invite management + + def create_invite(self, channel_id, **options): + r = Route('POST', '/channels/{channel_id}/invites', channel_id=channel_id) + payload = { + 'max_age': options.get('max_age', 0), + 'max_uses': options.get('max_uses', 0), + 'temporary': options.get('temporary', False), + 'unique': options.get('unique', True) + } + + return self.request(r, json=payload) + + def get_invite(self, invite_id): + return self.request(Route('GET', '/invite/{invite_id}', invite_id=invite_id)) + + def invites_from(self, guild_id): + return self.request(Route('GET', '/guilds/{guild_id}/invites', guild_id=guild_id)) + + def invites_from_channel(self, channel_id): + return self.request(Route('GET', '/channels/{channel_id}/invites', channel_id=channel_id)) + + def accept_invite(self, invite_id): + return self.request(Route('POST', '/invite/{invite_id}', invite_id=invite_id)) + + def delete_invite(self, invite_id): + return self.request(Route('DELETE', '/invite/{invite_id}', invite_id=invite_id)) + + # Role management + + def edit_role(self, guild_id, role_id, **fields): + r = Route('PATCH', '/guilds/{guild_id}/roles/{role_id}', guild_id=guild_id, role_id=role_id) + valid_keys = ('name', 'permissions', 'color', 'hoist', 'mentionable') + payload = { + k: v for k, v in fields.items() if k in valid_keys + } + return self.request(r, json=payload) + + def delete_role(self, guild_id, role_id): + r = Route('DELETE', '/guilds/{guild_id}/roles/{role_id}', guild_id=guild_id, role_id=role_id) + return self.request(r) + + def replace_roles(self, user_id, guild_id, role_ids): + return self.edit_member(guild_id=guild_id, user_id=user_id, roles=role_ids) + + def create_role(self, guild_id): + r = Route('POST', '/guilds/{guild_id}/roles', guild_id=guild_id) + return self.request(r) + + def move_role_position(self, guild_id, positions): + r = Route('PATCH', '/guilds/{guild_id}/roles', guild_id=guild_id) + return self.request(r, json=positions) + + def add_role(self, guild_id, user_id, role_id): + r = Route('PUT', '/guilds/{guild_id}/members/{user_id}/roles/{role_id}', + guild_id=guild_id, user_id=user_id, role_id=role_id) + return self.request(r) + + def remove_role(self, guild_id, user_id, role_id): + r = Route('DELETE', '/guilds/{guild_id}/members/{user_id}/roles/{role_id}', + guild_id=guild_id, user_id=user_id, role_id=role_id) + return self.request(r) + + def edit_channel_permissions(self, channel_id, target, allow, deny, type): + payload = { + 'id': target, + 'allow': allow, + 'deny': deny, + 'type': type + } + r = Route('PUT', '/channels/{channel_id}/permissions/{target}', channel_id=channel_id, target=target) + return self.request(r, json=payload) + + def delete_channel_permissions(self, channel_id, target): + r = Route('DELETE', '/channels/{channel_id}/permissions/{target}', channel_id=channel_id, target=target) + return self.request(r) + + # Voice management + + def move_member(self, user_id, guild_id, channel_id): + return self.edit_member(guild_id=guild_id, user_id=user_id, channel_id=channel_id) + + # Misc + + def application_info(self): + return self.request(Route('GET', '/oauth2/applications/@me')) + + @asyncio.coroutine + def get_gateway(self): + try: + data = yield from self.request(Route('GET', '/gateway')) + except HTTPException as e: + raise GatewayNotFound() from e + return data.get('url') + '?encoding=json&v=6' + + @asyncio.coroutine + def get_bot_gateway(self): + try: + data = yield from self.request(Route('GET', '/gateway/bot')) + except HTTPException as e: + raise GatewayNotFound() from e + else: + return data['shards'], data['url'] + '?encoding=json&v=6' + + def get_user_info(self, user_id): + return self.request(Route('GET', '/users/{user_id}', user_id=user_id)) diff --git a/RBXLegacyDiscordBot/lib/discord/invite.py b/RBXLegacyDiscordBot/lib/discord/invite.py new file mode 100644 index 0000000..b0fedcc --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/invite.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .user import User +from .utils import parse_time +from .mixins import Hashable + +class Invite(Hashable): + """Represents a Discord :class:`Server` or :class:`Channel` invite. + + Depending on the way this object was created, some of the attributes can + have a value of ``None``. + + Supported Operations: + + +-----------+--------------------------------------+ + | Operation | Description | + +===========+======================================+ + | x == y | Checks if two invites are equal. | + +-----------+--------------------------------------+ + | x != y | Checks if two invites are not equal. | + +-----------+--------------------------------------+ + | hash(x) | Return the invite's hash. | + +-----------+--------------------------------------+ + | str(x) | Returns the invite's URL. | + +-----------+--------------------------------------+ + + Attributes + ----------- + max_age : int + How long the before the invite expires in seconds. A value of 0 indicates that it doesn't expire. + code : str + The URL fragment used for the invite. :attr:`xkcd` is also a possible fragment. + server : :class:`Server` + The server the invite is for. + revoked : bool + Indicates if the invite has been revoked. + created_at : `datetime.datetime` + A datetime object denoting the time the invite was created. + temporary : bool + Indicates that the invite grants temporary membership. + If True, members who joined via this invite will be kicked upon disconnect. + uses : int + How many times the invite has been used. + max_uses : int + How many times the invite can be used. + xkcd : str + The URL fragment used for the invite if it is human readable. + inviter : :class:`User` + The user who created the invite. + channel : :class:`Channel` + The channel the invite is for. + """ + + + __slots__ = [ 'max_age', 'code', 'server', 'revoked', 'created_at', 'uses', + 'temporary', 'max_uses', 'xkcd', 'inviter', 'channel' ] + + def __init__(self, **kwargs): + self.max_age = kwargs.get('max_age') + self.code = kwargs.get('code') + self.server = kwargs.get('server') + self.revoked = kwargs.get('revoked') + self.created_at = parse_time(kwargs.get('created_at')) + self.temporary = kwargs.get('temporary') + self.uses = kwargs.get('uses') + self.max_uses = kwargs.get('max_uses') + self.xkcd = kwargs.get('xkcdpass') + + inviter_data = kwargs.get('inviter') + self.inviter = None if inviter_data is None else User(**inviter_data) + self.channel = kwargs.get('channel') + + def __str__(self): + return self.url + + @property + def id(self): + """Returns the proper code portion of the invite.""" + return self.xkcd if self.xkcd else self.code + + @property + def url(self): + """A property that retrieves the invite URL.""" + return 'http://discord.gg/{}'.format(self.id) + diff --git a/RBXLegacyDiscordBot/lib/discord/iterators.py b/RBXLegacyDiscordBot/lib/discord/iterators.py new file mode 100644 index 0000000..2ea5143 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/iterators.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import sys +import asyncio +import aiohttp +from .message import Message +from .object import Object + +PY35 = sys.version_info >= (3, 5) + + +class LogsFromIterator: + """Iterator for recieving logs. + + The messages endpoint has two behaviors we care about here: + If `before` is specified, the messages endpoint returns the `limit` + newest messages before `before`, sorted with newest first. For filling over + 100 messages, update the `before` parameter to the oldest message recieved. + Messages will be returned in order by time. + If `after` is specified, it returns the `limit` oldest messages after + `after`, sorted with newest first. For filling over 100 messages, update the + `after` parameter to the newest message recieved. If messages are not + reversed, they will be out of order (99-0, 199-100, so on) + + A note that if both before and after are specified, before is ignored by the + messages endpoint. + + Parameters + ----------- + client : class:`Client` + channel : class:`Channel` + Channel from which to request logs + limit : int + Maximum number of messages to retrieve + before : :class:`Message` or id-like + Message before which all messages must be. + after : :class:`Message` or id-like + Message after which all messages must be. + around : :class:`Message` or id-like + Message around which all messages must be. Limit max 101. Note that if + limit is an even number, this will return at most limit+1 messages. + reverse : bool + If set to true, return messages in oldest->newest order. Recommended + when using with "after" queries with limit over 100, otherwise messages + will be out of order. Defaults to False for backwards compatability. + """ + + def __init__(self, client, channel, limit, + before=None, after=None, around=None, reverse=False): + self.client = client + self.connection = client.connection + self.channel = channel + self.limit = limit + self.before = before + self.after = after + self.around = around + self.reverse = reverse + self._filter = None # message dict -> bool + self.messages = asyncio.Queue() + + if self.around: + if self.limit > 101: + raise ValueError("LogsFrom max limit 101 when specifying around parameter") + elif self.limit == 101: + self.limit = 100 # Thanks discord + elif self.limit == 1: + raise ValueError("Use get_message.") + + self._retrieve_messages = self._retrieve_messages_around_strategy + if self.before and self.after: + self._filter = lambda m: int(self.after.id) < int(m['id']) < int(self.before.id) + elif self.before: + self._filter = lambda m: int(m['id']) < int(self.before.id) + elif self.after: + self._filter = lambda m: int(self.after.id) < int(m['id']) + elif self.before and self.after: + if self.reverse: + self._retrieve_messages = self._retrieve_messages_after_strategy + self._filter = lambda m: int(m['id']) < int(self.before.id) + else: + self._retrieve_messages = self._retrieve_messages_before_strategy + self._filter = lambda m: int(m['id']) > int(self.after.id) + elif self.after: + self._retrieve_messages = self._retrieve_messages_after_strategy + else: + self._retrieve_messages = self._retrieve_messages_before_strategy + + @asyncio.coroutine + def iterate(self): + if self.messages.empty(): + yield from self.fill_messages() + + return self.messages.get_nowait() + + @asyncio.coroutine + def fill_messages(self): + if self.limit > 0: + retrieve = self.limit if self.limit <= 100 else 100 + data = yield from self._retrieve_messages(retrieve) + if self.reverse: + data = reversed(data) + if self._filter: + data = filter(self._filter, data) + for element in data: + yield from self.messages.put( + self.connection._create_message( + channel=self.channel, **element)) + + @asyncio.coroutine + def _retrieve_messages(self, retrieve): + """Retrieve messages and update next parameters.""" + pass + + @asyncio.coroutine + def _retrieve_messages_before_strategy(self, retrieve): + """Retrieve messages using before parameter.""" + data = yield from self.client._logs_from(self.channel, retrieve, before=self.before) + if len(data): + self.limit -= retrieve + self.before = Object(id=data[-1]['id']) + return data + + @asyncio.coroutine + def _retrieve_messages_after_strategy(self, retrieve): + """Retrieve messages using after parameter.""" + data = yield from self.client._logs_from(self.channel, retrieve, after=self.after) + if len(data): + self.limit -= retrieve + self.after = Object(id=data[0]['id']) + return data + + @asyncio.coroutine + def _retrieve_messages_around_strategy(self, retrieve): + """Retrieve messages using around parameter.""" + if self.around: + data = yield from self.client._logs_from(self.channel, retrieve, around=self.around) + self.around = None + return data + return [] + + if PY35: + @asyncio.coroutine + def __aiter__(self): + return self + + @asyncio.coroutine + def __anext__(self): + try: + msg = yield from self.iterate() + return msg + except asyncio.QueueEmpty: + # if we're still empty at this point... + # we didn't get any new messages so stop looping + raise StopAsyncIteration() diff --git a/RBXLegacyDiscordBot/lib/discord/member.py b/RBXLegacyDiscordBot/lib/discord/member.py new file mode 100644 index 0000000..50ad184 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/member.py @@ -0,0 +1,229 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .user import User +from .game import Game +from .permissions import Permissions +from . import utils +from .enums import Status, ChannelType +from .colour import Colour +import copy + +class VoiceState: + """Represents a Discord user's voice state. + + Attributes + ------------ + deaf: bool + Indicates if the user is currently deafened by the server. + mute: bool + Indicates if the user is currently muted by the server. + self_mute: bool + Indicates if the user is currently muted by their own accord. + self_deaf: bool + Indicates if the user is currently deafened by their own accord. + is_afk: bool + Indicates if the user is currently in the AFK channel in the server. + voice_channel: Optional[Union[:class:`Channel`, :class:`PrivateChannel`]] + The voice channel that the user is currently connected to. None if the user + is not currently in a voice channel. + """ + + __slots__ = [ 'session_id', 'deaf', 'mute', 'self_mute', + 'self_deaf', 'is_afk', 'voice_channel' ] + + def __init__(self, **kwargs): + self.session_id = kwargs.get('session_id') + self._update_voice_state(**kwargs) + + def _update_voice_state(self, **kwargs): + self.self_mute = kwargs.get('self_mute', False) + self.self_deaf = kwargs.get('self_deaf', False) + self.is_afk = kwargs.get('suppress', False) + self.mute = kwargs.get('mute', False) + self.deaf = kwargs.get('deaf', False) + self.voice_channel = kwargs.get('voice_channel') + +def flatten_voice_states(cls): + for attr in VoiceState.__slots__: + def getter(self, x=attr): + return getattr(self.voice, x) + setattr(cls, attr, property(getter)) + return cls + +@flatten_voice_states +class Member(User): + """Represents a Discord member to a :class:`Server`. + + This is a subclass of :class:`User` that extends more functionality + that server members have such as roles and permissions. + + Attributes + ---------- + voice: :class:`VoiceState` + The member's voice state. Properties are defined to mirror access of the attributes. + e.g. ``Member.is_afk`` is equivalent to `Member.voice.is_afk``. + roles + A list of :class:`Role` that the member belongs to. Note that the first element of this + list is always the default '@everyone' role. + joined_at : `datetime.datetime` + A datetime object that specifies the date and time in UTC that the member joined the server for + the first time. + status : :class:`Status` + The member's status. There is a chance that the status will be a ``str`` + if it is a value that is not recognised by the enumerator. + game : :class:`Game` + The game that the user is currently playing. Could be None if no game is being played. + server : :class:`Server` + The server that the member belongs to. + nick : Optional[str] + The server specific nickname of the user. + """ + + __slots__ = [ 'roles', 'joined_at', 'status', 'game', 'server', 'nick', 'voice' ] + + def __init__(self, **kwargs): + super().__init__(**kwargs.get('user')) + self.voice = VoiceState(**kwargs) + self.joined_at = utils.parse_time(kwargs.get('joined_at')) + self.roles = kwargs.get('roles', []) + self.status = Status.offline + game = kwargs.get('game', {}) + self.game = Game(**game) if game else None + self.server = kwargs.get('server', None) + self.nick = kwargs.get('nick', None) + + def _update_voice_state(self, **kwargs): + self.voice.self_mute = kwargs.get('self_mute', False) + self.voice.self_deaf = kwargs.get('self_deaf', False) + self.voice.is_afk = kwargs.get('suppress', False) + self.voice.mute = kwargs.get('mute', False) + self.voice.deaf = kwargs.get('deaf', False) + old_channel = getattr(self, 'voice_channel', None) + vc = kwargs.get('voice_channel') + + if old_channel is None and vc is not None: + # we joined a channel + vc.voice_members.append(self) + elif old_channel is not None: + try: + # we either left a channel or we switched channels + old_channel.voice_members.remove(self) + except ValueError: + pass + finally: + # we switched channels + if vc is not None: + vc.voice_members.append(self) + + self.voice.voice_channel = vc + + def _copy(self): + ret = copy.copy(self) + ret.voice = copy.copy(self.voice) + return ret + + @property + def colour(self): + """A property that returns a :class:`Colour` denoting the rendered colour + for the member. If the default colour is the one rendered then an instance + of :meth:`Colour.default` is returned. + + There is an alias for this under ``color``. + """ + + default_colour = Colour.default() + # highest order of the colour is the one that gets rendered. + # if the highest is the default colour then the next one with a colour + # is chosen instead + if self.roles: + roles = sorted(self.roles, key=lambda r: r.position, reverse=True) + for role in roles: + if role.colour == default_colour: + continue + else: + return role.colour + + return default_colour + + color = colour + + @property + def mention(self): + if self.nick: + return '<@!{}>'.format(self.id) + return '<@{}>'.format(self.id) + + def mentioned_in(self, message): + mentioned = super().mentioned_in(message) + if mentioned: + return True + + for role in message.role_mentions: + has_role = utils.get(self.roles, id=role.id) is not None + if has_role: + return True + + return False + + @property + def top_role(self): + """Returns the member's highest role. + + This is useful for figuring where a member stands in the role + hierarchy chain. + """ + + if self.roles: + roles = sorted(self.roles, reverse=True) + return roles[0] + return None + + @property + def server_permissions(self): + """Returns the member's server permissions. + + This only takes into consideration the server permissions + and not most of the implied permissions or any of the + channel permission overwrites. For 100% accurate permission + calculation, please use either :meth:`permissions_in` or + :meth:`Channel.permissions_for`. + + This does take into consideration server ownership and the + administrator implication. + """ + + if self.server.owner == self: + return Permissions.all() + + base = Permissions.none() + for r in self.roles: + base.value |= r.permissions.value + + if base.administrator: + return Permissions.all() + + return base diff --git a/RBXLegacyDiscordBot/lib/discord/message.py b/RBXLegacyDiscordBot/lib/discord/message.py new file mode 100644 index 0000000..e6e2fdd --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/message.py @@ -0,0 +1,330 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from . import utils +from .user import User +from .reaction import Reaction +from .object import Object +from .calls import CallMessage +import re +from .enums import MessageType, try_enum + +class Message: + """Represents a message from Discord. + + There should be no need to create one of these manually. + + Attributes + ----------- + edited_timestamp : Optional[datetime.datetime] + A naive UTC datetime object containing the edited time of the message. + timestamp : datetime.datetime + A naive UTC datetime object containing the time the message was created. + tts : bool + Specifies if the message was done with text-to-speech. + type: :class:`MessageType` + The type of message. In most cases this should not be checked, but it is helpful + in cases where it might be a system message for :attr:`system_content`. + author + A :class:`Member` that sent the message. If :attr:`channel` is a + private channel, then it is a :class:`User` instead. + content : str + The actual contents of the message. + nonce + The value used by the discord server and the client to verify that the message is successfully sent. + This is typically non-important. + embeds : list + A list of embedded objects. The elements are objects that meet oEmbed's specification_. + + .. _specification: http://oembed.com/ + channel + The :class:`Channel` that the message was sent from. + Could be a :class:`PrivateChannel` if it's a private message. + In :issue:`very rare cases <21>` this could be a :class:`Object` instead. + + For the sake of convenience, this :class:`Object` instance has an attribute ``is_private`` set to ``True``. + server : Optional[:class:`Server`] + The server that the message belongs to. If not applicable (i.e. a PM) then it's None instead. + call: Optional[:class:`CallMessage`] + The call that the message refers to. This is only applicable to messages of type + :attr:`MessageType.call`. + mention_everyone : bool + Specifies if the message mentions everyone. + + .. note:: + + This does not check if the ``@everyone`` text is in the message itself. + Rather this boolean indicates if the ``@everyone`` text is in the message + **and** it did end up mentioning everyone. + + mentions: list + A list of :class:`Member` that were mentioned. If the message is in a private message + then the list will be of :class:`User` instead. For messages that are not of type + :attr:`MessageType.default`\, this array can be used to aid in system messages. + For more information, see :attr:`system_content`. + + .. warning:: + + The order of the mentions list is not in any particular order so you should + not rely on it. This is a discord limitation, not one with the library. + + channel_mentions : list + A list of :class:`Channel` that were mentioned. If the message is in a private message + then the list is always empty. + role_mentions : list + A list of :class:`Role` that were mentioned. If the message is in a private message + then the list is always empty. + id : str + The message ID. + attachments : list + A list of attachments given to a message. + pinned: bool + Specifies if the message is currently pinned. + reactions : List[:class:`Reaction`] + Reactions to a message. Reactions can be either custom emoji or standard unicode emoji. + """ + + __slots__ = [ 'edited_timestamp', 'timestamp', 'tts', 'content', 'channel', + 'mention_everyone', 'embeds', 'id', 'mentions', 'author', + 'channel_mentions', 'server', '_raw_mentions', 'attachments', + '_clean_content', '_raw_channel_mentions', 'nonce', 'pinned', + 'role_mentions', '_raw_role_mentions', 'type', 'call', + '_system_content', 'reactions' ] + + def __init__(self, **kwargs): + self.reactions = kwargs.pop('reactions') + for reaction in self.reactions: + reaction.message = self + self._update(**kwargs) + + def _update(self, **data): + # at the moment, the timestamps seem to be naive so they have no time zone and operate on UTC time. + # we can use this to our advantage to use strptime instead of a complicated parsing routine. + # example timestamp: 2015-08-21T12:03:45.782000+00:00 + # sometimes the .%f modifier is missing + self.edited_timestamp = utils.parse_time(data.get('edited_timestamp')) + self.timestamp = utils.parse_time(data.get('timestamp')) + self.tts = data.get('tts', False) + self.pinned = data.get('pinned', False) + self.content = data.get('content') + self.mention_everyone = data.get('mention_everyone') + self.embeds = data.get('embeds') + self.id = data.get('id') + self.channel = data.get('channel') + self.author = User(**data.get('author', {})) + self.nonce = data.get('nonce') + self.attachments = data.get('attachments') + self.type = try_enum(MessageType, data.get('type')) + self._handle_upgrades(data.get('channel_id')) + self._handle_mentions(data.get('mentions', []), data.get('mention_roles', [])) + self._handle_call(data.get('call')) + + # clear the cached properties + cached = filter(lambda attr: attr[0] == '_', self.__slots__) + for attr in cached: + try: + delattr(self, attr) + except AttributeError: + pass + + def _handle_mentions(self, mentions, role_mentions): + self.mentions = [] + self.channel_mentions = [] + self.role_mentions = [] + if getattr(self.channel, 'is_private', True): + self.mentions = [User(**m) for m in mentions] + return + + if self.server is not None: + for mention in mentions: + id_search = mention.get('id') + member = self.server.get_member(id_search) + if member is not None: + self.mentions.append(member) + + it = filter(None, map(lambda m: self.server.get_channel(m), self.raw_channel_mentions)) + self.channel_mentions = utils._unique(it) + + for role_id in role_mentions: + role = utils.get(self.server.roles, id=role_id) + if role is not None: + self.role_mentions.append(role) + + def _handle_call(self, call): + if call is None or self.type is not MessageType.call: + self.call = None + return + + # we get the participant source from the mentions array or + # the author + + participants = [] + for uid in call.get('participants', []): + if uid == self.author.id: + participants.append(self.author) + else: + user = utils.find(lambda u: u.id == uid, self.mentions) + if user is not None: + participants.append(user) + + call['participants'] = participants + self.call = CallMessage(message=self, **call) + + @utils.cached_slot_property('_raw_mentions') + def raw_mentions(self): + """A property that returns an array of user IDs matched with + the syntax of <@user_id> in the message content. + + This allows you receive the user IDs of mentioned users + even in a private message context. + """ + return re.findall(r'<@!?([0-9]+)>', self.content) + + @utils.cached_slot_property('_raw_channel_mentions') + def raw_channel_mentions(self): + """A property that returns an array of channel IDs matched with + the syntax of <#channel_id> in the message content. + """ + return re.findall(r'<#([0-9]+)>', self.content) + + @utils.cached_slot_property('_raw_role_mentions') + def raw_role_mentions(self): + """A property that returns an array of role IDs matched with + the syntax of <@&role_id> in the message content. + """ + return re.findall(r'<@&([0-9]+)>', self.content) + + @utils.cached_slot_property('_clean_content') + def clean_content(self): + """A property that returns the content in a "cleaned up" + manner. This basically means that mentions are transformed + into the way the client shows it. e.g. ``<#id>`` will transform + into ``#name``. + + This will also transform @everyone and @here mentions into + non-mentions. + """ + + transformations = { + re.escape('<#{0.id}>'.format(channel)): '#' + channel.name + for channel in self.channel_mentions + } + + mention_transforms = { + re.escape('<@{0.id}>'.format(member)): '@' + member.display_name + for member in self.mentions + } + + # add the <@!user_id> cases as well.. + second_mention_transforms = { + re.escape('<@!{0.id}>'.format(member)): '@' + member.display_name + for member in self.mentions + } + + transformations.update(mention_transforms) + transformations.update(second_mention_transforms) + + if self.server is not None: + role_transforms = { + re.escape('<@&{0.id}>'.format(role)): '@' + role.name + for role in self.role_mentions + } + transformations.update(role_transforms) + + def repl(obj): + return transformations.get(re.escape(obj.group(0)), '') + + pattern = re.compile('|'.join(transformations.keys())) + result = pattern.sub(repl, self.content) + + transformations = { + '@everyone': '@\u200beveryone', + '@here': '@\u200bhere' + } + + def repl2(obj): + return transformations.get(obj.group(0), '') + + pattern = re.compile('|'.join(transformations.keys())) + return pattern.sub(repl2, result) + + def _handle_upgrades(self, channel_id): + self.server = None + if isinstance(self.channel, Object): + return + + if self.channel is None: + if channel_id is not None: + self.channel = Object(id=channel_id) + self.channel.is_private = True + return + + if not self.channel.is_private: + self.server = self.channel.server + found = self.server.get_member(self.author.id) + if found is not None: + self.author = found + + @utils.cached_slot_property('_system_content') + def system_content(self): + """A property that returns the content that is rendered + regardless of the :attr:`Message.type`. + + In the case of :attr:`MessageType.default`\, this just returns the + regular :attr:`Message.content`. Otherwise this returns an English + message denoting the contents of the system message. + """ + + if self.type is MessageType.default: + return self.content + + if self.type is MessageType.pins_add: + return '{0.name} pinned a message to this channel.'.format(self.author) + + if self.type is MessageType.recipient_add: + return '{0.name} added {1.name} to the group.'.format(self.author, self.mentions[0]) + + if self.type is MessageType.recipient_remove: + return '{0.name} removed {1.name} from the group.'.format(self.author, self.mentions[0]) + + if self.type is MessageType.channel_name_change: + return '{0.author.name} changed the channel name: {0.content}'.format(self) + + if self.type is MessageType.channel_icon_change: + return '{0.author.name} changed the channel icon.'.format(self) + + if self.type is MessageType.call: + # we're at the call message type now, which is a bit more complicated. + # we can make the assumption that Message.channel is a PrivateChannel + # with the type ChannelType.group or ChannelType.private + call_ended = self.call.ended_timestamp is not None + + if self.channel.me in self.call.participants: + return '{0.author.name} started a call.'.format(self) + elif call_ended: + return 'You missed a call from {0.author.name}'.format(self) + else: + return '{0.author.name} started a call \N{EM DASH} Join the call.'.format(self) diff --git a/RBXLegacyDiscordBot/lib/discord/mixins.py b/RBXLegacyDiscordBot/lib/discord/mixins.py new file mode 100644 index 0000000..da40d44 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/mixins.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +class EqualityComparable: + __slots__ = [] + + def __eq__(self, other): + return isinstance(other, self.__class__) and other.id == self.id + + def __ne__(self, other): + if isinstance(other, self.__class__): + return other.id != self.id + return True + +class Hashable(EqualityComparable): + __slots__ = [] + + def __hash__(self): + return hash(self.id) diff --git a/RBXLegacyDiscordBot/lib/discord/object.py b/RBXLegacyDiscordBot/lib/discord/object.py new file mode 100644 index 0000000..ad201d0 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/object.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from . import utils + +class Object: + """Represents a generic Discord object. + + The purpose of this class is to allow you to create 'miniature' + versions of data classes if you want to pass in just an ID. Most functions + that take in a specific data class with an ID can also take in this class + as a substitute instead. Note that even though this is the case, not all + objects (if any) actually inherit from this class. + + There are also some cases where some websocket events are received + in :issue:`strange order <21>` and when such events happened you would + receive this class rather than the actual data class. These cases are + extremely rare. + + Attributes + ----------- + id : str + The ID of the object. + """ + + def __init__(self, id): + self.id = id + + @property + def created_at(self): + """Returns the snowflake's creation time in UTC.""" + return utils.snowflake_time(self.id) diff --git a/RBXLegacyDiscordBot/lib/discord/opus.py b/RBXLegacyDiscordBot/lib/discord/opus.py new file mode 100644 index 0000000..5869599 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/opus.py @@ -0,0 +1,277 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +import ctypes +import ctypes.util +import array +from .errors import DiscordException +import logging +import sys +import os.path + +log = logging.getLogger(__name__) +c_int_ptr = ctypes.POINTER(ctypes.c_int) +c_int16_ptr = ctypes.POINTER(ctypes.c_int16) +c_float_ptr = ctypes.POINTER(ctypes.c_float) + +class EncoderStruct(ctypes.Structure): + pass + +EncoderStructPtr = ctypes.POINTER(EncoderStruct) + +# A list of exported functions. +# The first argument is obviously the name. +# The second one are the types of arguments it takes. +# The third is the result type. +exported_functions = [ + ('opus_strerror', [ctypes.c_int], ctypes.c_char_p), + ('opus_encoder_get_size', [ctypes.c_int], ctypes.c_int), + ('opus_encoder_create', [ctypes.c_int, ctypes.c_int, ctypes.c_int, c_int_ptr], EncoderStructPtr), + ('opus_encode', [EncoderStructPtr, c_int16_ptr, ctypes.c_int, ctypes.c_char_p, ctypes.c_int32], ctypes.c_int32), + ('opus_encoder_ctl', None, ctypes.c_int32), + ('opus_encoder_destroy', [EncoderStructPtr], None) +] + +def libopus_loader(name): + # create the library... + lib = ctypes.cdll.LoadLibrary(name) + + # register the functions... + for item in exported_functions: + try: + func = getattr(lib, item[0]) + except Exception as e: + raise e + + try: + if item[1]: + func.argtypes = item[1] + + func.restype = item[2] + except KeyError: + pass + + return lib + +try: + if sys.platform == 'win32': + _basedir = os.path.dirname(os.path.abspath(__file__)) + _bitness = 'x64' if sys.maxsize > 2**32 else 'x86' + _filename = os.path.join(_basedir, 'bin', 'libopus-0.{}.dll'.format(_bitness)) + _lib = libopus_loader(_filename) + else: + _lib = libopus_loader(ctypes.util.find_library('opus')) +except Exception as e: + _lib = None + +def load_opus(name): + """Loads the libopus shared library for use with voice. + + If this function is not called then the library uses the function + `ctypes.util.find_library`__ and then loads that one + if available. + + .. _find library: https://docs.python.org/3.5/library/ctypes.html#finding-shared-libraries + __ `find library`_ + + Not loading a library leads to voice not working. + + This function propagates the exceptions thrown. + + Warning + -------- + The bitness of the library must match the bitness of your python + interpreter. If the library is 64-bit then your python interpreter + must be 64-bit as well. Usually if there's a mismatch in bitness then + the load will throw an exception. + + Note + ---- + On Windows, the .dll extension is not necessary. However, on Linux + the full extension is required to load the library, e.g. ``libopus.so.1``. + On Linux however, `find library`_ will usually find the library automatically + without you having to call this. + + Parameters + ---------- + name: str + The filename of the shared library. + """ + global _lib + _lib = libopus_loader(name) + +def is_loaded(): + """Function to check if opus lib is successfully loaded either + via the ``ctypes.util.find_library`` call of :func:`load_opus`. + + This must return ``True`` for voice to work. + + Returns + ------- + bool + Indicates if the opus library has been loaded. + """ + global _lib + return _lib is not None + +class OpusError(DiscordException): + """An exception that is thrown for libopus related errors. + + Attributes + ---------- + code : int + The error code returned. + """ + + def __init__(self, code): + self.code = code + msg = _lib.opus_strerror(self.code).decode('utf-8') + log.info('"{}" has happened'.format(msg)) + super().__init__(msg) + +class OpusNotLoaded(DiscordException): + """An exception that is thrown for when libopus is not loaded.""" + pass + + +# Some constants... +OK = 0 +APPLICATION_AUDIO = 2049 +APPLICATION_VOIP = 2048 +APPLICATION_LOWDELAY = 2051 +CTL_SET_BITRATE = 4002 +CTL_SET_BANDWIDTH = 4008 +CTL_SET_FEC = 4012 +CTL_SET_PLP = 4014 +CTL_SET_SIGNAL = 4024 + +band_ctl = { + 'narrow': 1101, + 'medium': 1102, + 'wide': 1103, + 'superwide': 1104, + 'full': 1105, +} + +signal_ctl = { + 'auto': -1000, + 'voice': 3001, + 'music': 3002, +} + +class Encoder: + def __init__(self, sampling, channels, application=APPLICATION_AUDIO): + self.sampling_rate = sampling + self.channels = channels + self.application = application + + self.frame_length = 20 + self.sample_size = 2 * self.channels # (bit_rate / 8) but bit_rate == 16 + self.samples_per_frame = int(self.sampling_rate / 1000 * self.frame_length) + self.frame_size = self.samples_per_frame * self.sample_size + + if not is_loaded(): + raise OpusNotLoaded() + + self._state = self._create_state() + self.set_bitrate(128) + self.set_fec(True) + self.set_expected_packet_loss_percent(0.15) + self.set_bandwidth('full') + self.set_signal_type('auto') + + def __del__(self): + if hasattr(self, '_state'): + _lib.opus_encoder_destroy(self._state) + self._state = None + + def _create_state(self): + ret = ctypes.c_int() + result = _lib.opus_encoder_create(self.sampling_rate, self.channels, self.application, ctypes.byref(ret)) + + if ret.value != 0: + log.info('error has happened in state creation') + raise OpusError(ret.value) + + return result + + def set_bitrate(self, kbps): + kbps = min(128, max(16, int(kbps))) + + ret = _lib.opus_encoder_ctl(self._state, CTL_SET_BITRATE, kbps * 1024) + if ret < 0: + log.info('error has happened in set_bitrate') + raise OpusError(ret) + + return kbps + + def set_bandwidth(self, req): + if req not in band_ctl: + raise KeyError('%r is not a valid bandwidth setting. Try one of: %s' % (req, ','.join(band_ctl))) + + k = band_ctl[req] + ret = _lib.opus_encoder_ctl(self._state, CTL_SET_BANDWIDTH, k) + + if ret < 0: + log.info('error has happened in set_bandwidth') + raise OpusError(ret) + + def set_signal_type(self, req): + if req not in signal_ctl: + raise KeyError('%r is not a valid signal setting. Try one of: %s' % (req, ','.join(signal_ctl))) + + k = signal_ctl[req] + ret = _lib.opus_encoder_ctl(self._state, CTL_SET_SIGNAL, k) + + if ret < 0: + log.info('error has happened in set_signal_type') + raise OpusError(ret) + + def set_fec(self, enabled=True): + ret = _lib.opus_encoder_ctl(self._state, CTL_SET_FEC, 1 if enabled else 0) + + if ret < 0: + log.info('error has happened in set_fec') + raise OpusError(ret) + + def set_expected_packet_loss_percent(self, percentage): + ret = _lib.opus_encoder_ctl(self._state, CTL_SET_PLP, min(100, max(0, int(percentage * 100)))) + + if ret < 0: + log.info('error has happened in set_expected_packet_loss_percent') + raise OpusError(ret) + + def encode(self, pcm, frame_size): + max_data_bytes = len(pcm) + pcm = ctypes.cast(pcm, c_int16_ptr) + data = (ctypes.c_char * max_data_bytes)() + + ret = _lib.opus_encode(self._state, pcm, frame_size, data, max_data_bytes) + if ret < 0: + log.info('error has happened in encode') + raise OpusError(ret) + + return array.array('b', data[:ret]).tobytes() diff --git a/RBXLegacyDiscordBot/lib/discord/permissions.py b/RBXLegacyDiscordBot/lib/discord/permissions.py new file mode 100644 index 0000000..561cb13 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/permissions.py @@ -0,0 +1,597 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +class Permissions: + """Wraps up the Discord permission value. + + Supported operations: + + +-----------+------------------------------------------+ + | Operation | Description | + +===========+==========================================+ + | x == y | Checks if two permissions are equal. | + +-----------+------------------------------------------+ + | x != y | Checks if two permissions are not equal. | + +-----------+------------------------------------------+ + | x <= y | Checks if a permission is a subset | + | | of another permission. | + +-----------+------------------------------------------+ + | x >= y | Checks if a permission is a superset | + | | of another permission. | + +-----------+------------------------------------------+ + | x < y | Checks if a permission is a strict | + | | subset of another permission. | + +-----------+------------------------------------------+ + | x > y | Checks if a permission is a strict | + | | superset of another permission. | + +-----------+------------------------------------------+ + | hash(x) | Return the permission's hash. | + +-----------+------------------------------------------+ + | iter(x) | Returns an iterator of (perm, value) | + | | pairs. This allows this class to be used | + | | as an iterable in e.g. set/list/dict | + | | constructions. | + +-----------+------------------------------------------+ + + The properties provided are two way. You can set and retrieve individual bits using the properties as if they + were regular bools. This allows you to edit permissions. + + Attributes + ----------- + value + The raw value. This value is a bit array field of a 32-bit integer + representing the currently available permissions. You should query + permissions via the properties rather than using this raw value. + """ + + __slots__ = [ 'value' ] + def __init__(self, permissions=0, **kwargs): + self.value = permissions + + def __eq__(self, other): + return isinstance(other, Permissions) and self.value == other.value + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self.value) + + def _perm_iterator(self): + for attr in dir(self): + # check if it's a property, because if so it's a permission + is_property = isinstance(getattr(self.__class__, attr), property) + if is_property: + yield (attr, getattr(self, attr)) + + def __iter__(self): + return self._perm_iterator() + + def is_subset(self, other): + """Returns True if self has the same or fewer permissions as other.""" + if isinstance(other, Permissions): + return (self.value & other.value) == self.value + else: + raise TypeError("cannot compare {} with {}".format(self.__class__.__name__, other.__class__name)) + + def is_superset(self, other): + """Returns True if self has the same or more permissions as other.""" + if isinstance(other, Permissions): + return (self.value | other.value) == self.value + else: + raise TypeError("cannot compare {} with {}".format(self.__class__.__name__, other.__class__name)) + + def is_strict_subset(self, other): + """Returns True if the permissions on other are a strict subset of those on self.""" + return self.is_subset(other) and self != other + + def is_strict_superset(self, other): + """Returns True if the permissions on other are a strict superset of those on self.""" + return self.is_superset(other) and self != other + + __le__ = is_subset + __ge__ = is_superset + __lt__ = is_strict_subset + __gt__ = is_strict_superset + + @classmethod + def none(cls): + """A factory method that creates a :class:`Permissions` with all + permissions set to False.""" + return cls(0) + + @classmethod + def all(cls): + """A factory method that creates a :class:`Permissions` with all + permissions set to True.""" + return cls(0b01111111111101111111110011111111) + + @classmethod + def all_channel(cls): + """A :class:`Permissions` with all channel-specific permissions set to + True and the server-specific ones set to False. The server-specific + permissions are currently: + + - manager_server + - kick_members + - ban_members + - administrator + - change_nicknames + - manage_nicknames + """ + return cls(0b00110011111101111111110001010001) + + @classmethod + def general(cls): + """A factory method that creates a :class:`Permissions` with all + "General" permissions from the official Discord UI set to True.""" + return cls(0b01111100000000000000000010111111) + + @classmethod + def text(cls): + """A factory method that creates a :class:`Permissions` with all + "Text" permissions from the official Discord UI set to True.""" + return cls(0b00000000000001111111110001000000) + + @classmethod + def voice(cls): + """A factory method that creates a :class:`Permissions` with all + "Voice" permissions from the official Discord UI set to True.""" + return cls(0b00000011111100000000000000000000) + + def update(self, **kwargs): + """Bulk updates this permission object. + + Allows you to set multiple attributes by using keyword + arguments. The names must be equivalent to the properties + listed. Extraneous key/value pairs will be silently ignored. + + Parameters + ------------ + \*\*kwargs + A list of key/value pairs to bulk update permissions with. + """ + for key, value in kwargs.items(): + try: + is_property = isinstance(getattr(self.__class__, key), property) + except AttributeError: + continue + + if is_property: + setattr(self, key, value) + + def _bit(self, index): + return bool((self.value >> index) & 1) + + def _set(self, index, value): + if value == True: + self.value |= (1 << index) + elif value == False: + self.value &= ~(1 << index) + else: + raise TypeError('Value to set for Permissions must be a bool.') + + def handle_overwrite(self, allow, deny): + # Basically this is what's happening here. + # We have an original bit array, e.g. 1010 + # Then we have another bit array that is 'denied', e.g. 1111 + # And then we have the last one which is 'allowed', e.g. 0101 + # We want original OP denied to end up resulting in + # whatever is in denied to be set to 0. + # So 1010 OP 1111 -> 0000 + # Then we take this value and look at the allowed values. + # And whatever is allowed is set to 1. + # So 0000 OP2 0101 -> 0101 + # The OP is base & ~denied. + # The OP2 is base | allowed. + self.value = (self.value & ~deny) | allow + + @property + def create_instant_invite(self): + """Returns True if the user can create instant invites.""" + return self._bit(0) + + @create_instant_invite.setter + def create_instant_invite(self, value): + self._set(0, value) + + @property + def kick_members(self): + """Returns True if the user can kick users from the server.""" + return self._bit(1) + + @kick_members.setter + def kick_members(self, value): + self._set(1, value) + + @property + def ban_members(self): + """Returns True if a user can ban users from the server.""" + return self._bit(2) + + @ban_members.setter + def ban_members(self, value): + self._set(2, value) + + @property + def administrator(self): + """Returns True if a user is an administrator. This role overrides all other permissions. + + This also bypasses all channel-specific overrides. + """ + return self._bit(3) + + @administrator.setter + def administrator(self, value): + self._set(3, value) + + @property + def manage_channels(self): + """Returns True if a user can edit, delete, or create channels in the server. + + This also corresponds to the "manage channel" channel-specific override.""" + return self._bit(4) + + @manage_channels.setter + def manage_channels(self, value): + self._set(4, value) + + @property + def manage_server(self): + """Returns True if a user can edit server properties.""" + return self._bit(5) + + @manage_server.setter + def manage_server(self, value): + self._set(5, value) + + @property + def add_reactions(self): + """Returns True if a user can add reactions to messages.""" + return self._bit(6) + + @add_reactions.setter + def add_reactions(self, value): + self._set(6, value) + + @property + def view_audit_logs(self): + """Returns True if a user can view the server's audit log.""" + return self._bit(7) + + @view_audit_logs.setter + def view_audit_logs(self, value): + self._set(7, value) + + # 2 unused + + @property + def read_messages(self): + """Returns True if a user can read messages from all or specific text channels.""" + return self._bit(10) + + @read_messages.setter + def read_messages(self, value): + self._set(10, value) + + @property + def send_messages(self): + """Returns True if a user can send messages from all or specific text channels.""" + return self._bit(11) + + @send_messages.setter + def send_messages(self, value): + self._set(11, value) + + @property + def send_tts_messages(self): + """Returns True if a user can send TTS messages from all or specific text channels.""" + return self._bit(12) + + @send_tts_messages.setter + def send_tts_messages(self, value): + self._set(12, value) + + @property + def manage_messages(self): + """Returns True if a user can delete messages from a text channel. Note that there are currently no ways to edit other people's messages.""" + return self._bit(13) + + @manage_messages.setter + def manage_messages(self, value): + self._set(13, value) + + @property + def embed_links(self): + """Returns True if a user's messages will automatically be embedded by Discord.""" + return self._bit(14) + + @embed_links.setter + def embed_links(self, value): + self._set(14, value) + + @property + def attach_files(self): + """Returns True if a user can send files in their messages.""" + return self._bit(15) + + @attach_files.setter + def attach_files(self, value): + self._set(15, value) + + @property + def read_message_history(self): + """Returns True if a user can read a text channel's previous messages.""" + return self._bit(16) + + @read_message_history.setter + def read_message_history(self, value): + self._set(16, value) + + @property + def mention_everyone(self): + """Returns True if a user's @everyone will mention everyone in the text channel.""" + return self._bit(17) + + @mention_everyone.setter + def mention_everyone(self, value): + self._set(17, value) + + @property + def external_emojis(self): + """Returns True if a user can use emojis from other servers.""" + return self._bit(18) + + @external_emojis.setter + def external_emojis(self, value): + self._set(18, value) + + # 1 unused + + @property + def connect(self): + """Returns True if a user can connect to a voice channel.""" + return self._bit(20) + + @connect.setter + def connect(self, value): + self._set(20, value) + + @property + def speak(self): + """Returns True if a user can speak in a voice channel.""" + return self._bit(21) + + @speak.setter + def speak(self, value): + self._set(21, value) + + @property + def mute_members(self): + """Returns True if a user can mute other users.""" + return self._bit(22) + + @mute_members.setter + def mute_members(self, value): + self._set(22, value) + + @property + def deafen_members(self): + """Returns True if a user can deafen other users.""" + return self._bit(23) + + @deafen_members.setter + def deafen_members(self, value): + self._set(23, value) + + @property + def move_members(self): + """Returns True if a user can move users between other voice channels.""" + return self._bit(24) + + @move_members.setter + def move_members(self, value): + self._set(24, value) + + @property + def use_voice_activation(self): + """Returns True if a user can use voice activation in voice channels.""" + return self._bit(25) + + @use_voice_activation.setter + def use_voice_activation(self, value): + self._set(25, value) + + @property + def change_nickname(self): + """Returns True if a user can change their nickname in the server.""" + return self._bit(26) + + @change_nickname.setter + def change_nickname(self, value): + self._set(26, value) + + @property + def manage_nicknames(self): + """Returns True if a user can change other user's nickname in the server.""" + return self._bit(27) + + @manage_nicknames.setter + def manage_nicknames(self, value): + self._set(27, value) + + @property + def manage_roles(self): + """Returns True if a user can create or edit roles less than their role's position. + + This also corresponds to the "manage permissions" channel-specific override. + """ + return self._bit(28) + + @manage_roles.setter + def manage_roles(self, value): + self._set(28, value) + + @property + def manage_webhooks(self): + """Returns True if a user can create, edit, or delete webhooks.""" + return self._bit(29) + + @manage_webhooks.setter + def manage_webhooks(self, value): + self._set(29, value) + + @property + def manage_emojis(self): + """Returns True if a user can create, edit, or delete emojis.""" + return self._bit(30) + + @manage_emojis.setter + def manage_emojis(self, value): + self._set(30, value) + + # 1 unused + + # after these 32 bits, there's 21 more unused ones technically + +def augment_from_permissions(cls): + cls.VALID_NAMES = { name for name in dir(Permissions) if isinstance(getattr(Permissions, name), property) } + + # make descriptors for all the valid names + for name in cls.VALID_NAMES: + # god bless Python + def getter(self, x=name): + return self._values.get(x) + def setter(self, value, x=name): + self._set(x, value) + + prop = property(getter, setter) + setattr(cls, name, prop) + + return cls + +@augment_from_permissions +class PermissionOverwrite: + """A type that is used to represent a channel specific permission. + + Unlike a regular :class:`Permissions`\, the default value of a + permission is equivalent to ``None`` and not ``False``. Setting + a value to ``False`` is **explicitly** denying that permission, + while setting a value to ``True`` is **explicitly** allowing + that permission. + + The values supported by this are the same as :class:`Permissions` + with the added possibility of it being set to ``None``. + + Supported operations: + + +-----------+------------------------------------------+ + | Operation | Description | + +===========+==========================================+ + | iter(x) | Returns an iterator of (perm, value) | + | | pairs. This allows this class to be used | + | | as an iterable in e.g. set/list/dict | + | | constructions. | + +-----------+------------------------------------------+ + + Parameters + ----------- + \*\*kwargs + Set the value of permissions by their name. + """ + + def __init__(self, **kwargs): + self._values = {} + + for key, value in kwargs.items(): + setattr(self, key, value) + + def _set(self, key, value): + if value not in (True, None, False): + raise TypeError('Expected bool or NoneType, received {0.__class__.__name__}'.format(value)) + + self._values[key] = value + + def pair(self): + """Returns the (allow, deny) pair from this overwrite. + + The value of these pairs is :class:`Permissions`. + """ + + allow = Permissions.none() + deny = Permissions.none() + + for key, value in self._values.items(): + if value is True: + setattr(allow, key, True) + elif value is False: + setattr(deny, key, True) + + return allow, deny + + @classmethod + def from_pair(cls, allow, deny): + """Creates an overwrite from an allow/deny pair of :class:`Permissions`.""" + ret = cls() + for key, value in allow: + if value is True: + setattr(ret, key, True) + + for key, value in deny: + if value is True: + setattr(ret, key, False) + + return ret + + def is_empty(self): + """Checks if the permission overwrite is currently empty. + + An empty permission overwrite is one that has no overwrites set + to True or False. + """ + return all(x is None for x in self._values.values()) + + def update(self, **kwargs): + """Bulk updates this permission overwrite object. + + Allows you to set multiple attributes by using keyword + arguments. The names must be equivalent to the properties + listed. Extraneous key/value pairs will be silently ignored. + + Parameters + ------------ + \*\*kwargs + A list of key/value pairs to bulk update with. + """ + for key, value in kwargs.items(): + if key not in self.VALID_NAMES: + continue + + setattr(self, key, value) + + def __iter__(self): + for key in self.VALID_NAMES: + yield key, self._values.get(key) diff --git a/RBXLegacyDiscordBot/lib/discord/reaction.py b/RBXLegacyDiscordBot/lib/discord/reaction.py new file mode 100644 index 0000000..2e4f3ce --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/reaction.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .emoji import Emoji + +class Reaction: + """Represents a reaction to a message. + + Depending on the way this object was created, some of the attributes can + have a value of ``None``. + + Similar to members, the same reaction to a different message are equal. + + Supported Operations: + + +-----------+-------------------------------------------+ + | Operation | Description | + +===========+===========================================+ + | x == y | Checks if two reactions are the same. | + +-----------+-------------------------------------------+ + | x != y | Checks if two reactions are not the same. | + +-----------+-------------------------------------------+ + | hash(x) | Return the emoji's hash. | + +-----------+-------------------------------------------+ + + Attributes + ----------- + emoji : :class:`Emoji` or str + The reaction emoji. May be a custom emoji, or a unicode emoji. + custom_emoji : bool + If this is a custom emoji. + count : int + Number of times this reaction was made + me : bool + If the user sent this reaction. + message: :class:`Message` + Message this reaction is for. + """ + __slots__ = ['message', 'count', 'emoji', 'me', 'custom_emoji'] + + def __init__(self, **kwargs): + self.message = kwargs.get('message') + self.emoji = kwargs['emoji'] + self.count = kwargs.get('count', 1) + self.me = kwargs.get('me') + self.custom_emoji = isinstance(self.emoji, Emoji) + + def __eq__(self, other): + return isinstance(other, self.__class__) and other.emoji == self.emoji + + def __ne__(self, other): + if isinstance(other, self.__class__): + return other.emoji != self.emoji + return True + + def __hash__(self): + return hash(self.emoji) diff --git a/RBXLegacyDiscordBot/lib/discord/role.py b/RBXLegacyDiscordBot/lib/discord/role.py new file mode 100644 index 0000000..c375c22 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/role.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .permissions import Permissions +from .colour import Colour +from .mixins import Hashable +from .utils import snowflake_time + +class Role(Hashable): + """Represents a Discord role in a :class:`Server`. + + Supported Operations: + + +-----------+------------------------------------------------------------------+ + | Operation | Description | + +===========+==================================================================+ + | x == y | Checks if two roles are equal. | + +-----------+------------------------------------------------------------------+ + | x != y | Checks if two roles are not equal. | + +-----------+------------------------------------------------------------------+ + | x > y | Checks if a role is higher than another in the hierarchy. | + +-----------+------------------------------------------------------------------+ + | x < y | Checks if a role is lower than another in the hierarchy. | + +-----------+------------------------------------------------------------------+ + | x >= y | Checks if a role is higher or equal to another in the hierarchy. | + +-----------+------------------------------------------------------------------+ + | x <= y | Checks if a role is lower or equal to another in the hierarchy. | + +-----------+------------------------------------------------------------------+ + | hash(x) | Return the role's hash. | + +-----------+------------------------------------------------------------------+ + | str(x) | Returns the role's name. | + +-----------+------------------------------------------------------------------+ + + Attributes + ---------- + id : str + The ID for the role. + name : str + The name of the role. + permissions : :class:`Permissions` + Represents the role's permissions. + server : :class:`Server` + The server the role belongs to. + colour : :class:`Colour` + Represents the role colour. An alias exists under ``color``. + hoist : bool + Indicates if the role will be displayed separately from other members. + position : int + The position of the role. This number is usually positive. The bottom + role has a position of 0. + managed : bool + Indicates if the role is managed by the server through some form of + integrations such as Twitch. + mentionable : bool + Indicates if the role can be mentioned by users. + """ + + __slots__ = ['id', 'name', 'permissions', 'color', 'colour', 'position', + 'managed', 'mentionable', 'hoist', 'server' ] + + def __init__(self, **kwargs): + self.server = kwargs.pop('server') + self._update(**kwargs) + + def __str__(self): + return self.name + + def __lt__(self, other): + if not isinstance(other, Role) or not isinstance(self, Role): + return NotImplemented + + if self.server != other.server: + raise RuntimeError('cannot compare roles from two different servers.') + + if self.position < other.position: + return True + + if self.position == other.position: + return int(self.id) > int(other.id) + + return False + + def __le__(self, other): + r = Role.__lt__(other, self) + if r is NotImplemented: + return NotImplemented + return not r + + def __gt__(self, other): + return Role.__lt__(other, self) + + def __ge__(self, other): + r = Role.__lt__(self, other) + if r is NotImplemented: + return NotImplemented + return not r + + def _update(self, **kwargs): + self.id = kwargs.get('id') + self.name = kwargs.get('name') + self.permissions = Permissions(kwargs.get('permissions', 0)) + self.position = kwargs.get('position', 0) + self.colour = Colour(kwargs.get('color', 0)) + self.hoist = kwargs.get('hoist', False) + self.managed = kwargs.get('managed', False) + self.mentionable = kwargs.get('mentionable', False) + self.color = self.colour + + @property + def is_everyone(self): + """Checks if the role is the @everyone role.""" + return self.server.id == self.id + + @property + def created_at(self): + """Returns the role's creation time in UTC.""" + return snowflake_time(self.id) + + @property + def mention(self): + """Returns a string that allows you to mention a role.""" + return '<@&{}>'.format(self.id) diff --git a/RBXLegacyDiscordBot/lib/discord/server.py b/RBXLegacyDiscordBot/lib/discord/server.py new file mode 100644 index 0000000..df6bc7d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/server.py @@ -0,0 +1,341 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from . import utils +from .role import Role +from .member import Member +from .emoji import Emoji +from .game import Game +from .channel import Channel +from .enums import ServerRegion, Status, try_enum, VerificationLevel +from .mixins import Hashable + +class Server(Hashable): + """Represents a Discord server. + + Supported Operations: + + +-----------+--------------------------------------+ + | Operation | Description | + +===========+======================================+ + | x == y | Checks if two servers are equal. | + +-----------+--------------------------------------+ + | x != y | Checks if two servers are not equal. | + +-----------+--------------------------------------+ + | hash(x) | Returns the server's hash. | + +-----------+--------------------------------------+ + | str(x) | Returns the server's name. | + +-----------+--------------------------------------+ + + Attributes + ---------- + name : str + The server name. + me : :class:`Member` + Similar to :attr:`Client.user` except an instance of :class:`Member`. + This is essentially used to get the member version of yourself. + roles + A list of :class:`Role` that the server has available. + emojis + A list of :class:`Emoji` that the server owns. + region : :class:`ServerRegion` + The region the server belongs on. There is a chance that the region + will be a ``str`` if the value is not recognised by the enumerator. + afk_timeout : int + The timeout to get sent to the AFK channel. + afk_channel : :class:`Channel` + The channel that denotes the AFK channel. None if it doesn't exist. + members + An iterable of :class:`Member` that are currently on the server. + channels + An iterable of :class:`Channel` that are currently on the server. + icon : str + The server's icon. + id : str + The server's ID. + owner : :class:`Member` + The member who owns the server. + unavailable : bool + Indicates if the server is unavailable. If this is ``True`` then the + reliability of other attributes outside of :meth:`Server.id` is slim and they might + all be None. It is best to not do anything with the server if it is unavailable. + + Check the :func:`on_server_unavailable` and :func:`on_server_available` events. + large : bool + Indicates if the server is a 'large' server. A large server is defined as having + more than ``large_threshold`` count members, which for this library is set to + the maximum of 250. + voice_client: Optional[:class:`VoiceClient`] + The VoiceClient associated with this server. A shortcut for the + :meth:`Client.voice_client_in` call. + mfa_level: int + Indicates the server's two factor authorisation level. If this value is 0 then + the server does not require 2FA for their administrative members. If the value is + 1 then they do. + verification_level: :class:`VerificationLevel` + The server's verification level. + features: List[str] + A list of features that the server has. They are currently as follows: + + - ``VIP_REGIONS``: Server has VIP voice regions + - ``VANITY_URL``: Server has a vanity invite URL (e.g. discord.gg/discord-api) + - ``INVITE_SPLASH``: Server's invite page has a special splash. + + splash: str + The server's invite splash. + """ + + __slots__ = ['afk_timeout', 'afk_channel', '_members', '_channels', 'icon', + 'name', 'id', 'owner', 'unavailable', 'name', 'region', + '_default_role', '_default_channel', 'roles', '_member_count', + 'large', 'owner_id', 'mfa_level', 'emojis', 'features', + 'verification_level', 'splash' ] + + def __init__(self, **kwargs): + self._channels = {} + self.owner = None + self._members = {} + self._from_data(kwargs) + + @property + def channels(self): + return self._channels.values() + + def get_channel(self, channel_id): + """Returns a :class:`Channel` with the given ID. If not found, returns None.""" + return self._channels.get(channel_id) + + def _add_channel(self, channel): + self._channels[channel.id] = channel + + def _remove_channel(self, channel): + self._channels.pop(channel.id, None) + + @property + def members(self): + return self._members.values() + + def get_member(self, user_id): + """Returns a :class:`Member` with the given ID. If not found, returns None.""" + return self._members.get(user_id) + + def _add_member(self, member): + self._members[member.id] = member + + def _remove_member(self, member): + self._members.pop(member.id, None) + + def __str__(self): + return self.name + + def _update_voice_state(self, data): + user_id = data.get('user_id') + member = self.get_member(user_id) + before = None + if member is not None: + before = member._copy() + ch_id = data.get('channel_id') + channel = self.get_channel(ch_id) + member._update_voice_state(voice_channel=channel, **data) + return before, member + + def _add_role(self, role): + # roles get added to the bottom (position 1, pos 0 is @everyone) + # so since self.roles has the @everyone role, we can't increment + # its position because it's stuck at position 0. Luckily x += False + # is equivalent to adding 0. So we cast the position to a bool and + # increment it. + for r in self.roles: + r.position += bool(r.position) + + self.roles.append(role) + + def _remove_role(self, role): + # this raises ValueError if it fails.. + self.roles.remove(role) + + # since it didn't, we can change the positions now + # basically the same as above except we only decrement + # the position if we're above the role we deleted. + for r in self.roles: + r.position -= r.position > role.position + + def _from_data(self, guild): + # according to Stan, this is always available even if the guild is unavailable + # I don't have this guarantee when someone updates the server. + member_count = guild.get('member_count', None) + if member_count: + self._member_count = member_count + + self.name = guild.get('name') + self.region = try_enum(ServerRegion, guild.get('region')) + self.verification_level = try_enum(VerificationLevel, guild.get('verification_level')) + self.afk_timeout = guild.get('afk_timeout') + self.icon = guild.get('icon') + self.unavailable = guild.get('unavailable', False) + self.id = guild['id'] + self.roles = [Role(server=self, **r) for r in guild.get('roles', [])] + self.mfa_level = guild.get('mfa_level') + self.emojis = [Emoji(server=self, **r) for r in guild.get('emojis', [])] + self.features = guild.get('features', []) + self.splash = guild.get('splash') + + for mdata in guild.get('members', []): + roles = [self.default_role] + for role_id in mdata['roles']: + role = utils.find(lambda r: r.id == role_id, self.roles) + if role is not None: + roles.append(role) + + mdata['roles'] = roles + member = Member(**mdata) + member.server = self + self._add_member(member) + + self._sync(guild) + self.large = None if member_count is None else self._member_count >= 250 + + if 'owner_id' in guild: + self.owner_id = guild['owner_id'] + self.owner = self.get_member(self.owner_id) + + afk_id = guild.get('afk_channel_id') + self.afk_channel = self.get_channel(afk_id) + + for obj in guild.get('voice_states', []): + self._update_voice_state(obj) + + def _sync(self, data): + if 'large' in data: + self.large = data['large'] + + for presence in data.get('presences', []): + user_id = presence['user']['id'] + member = self.get_member(user_id) + if member is not None: + member.status = presence['status'] + try: + member.status = Status(member.status) + except: + pass + game = presence.get('game', {}) + member.game = Game(**game) if game else None + + if 'channels' in data: + channels = data['channels'] + for c in channels: + channel = Channel(server=self, **c) + self._add_channel(channel) + + + @utils.cached_slot_property('_default_role') + def default_role(self): + """Gets the @everyone role that all members have by default.""" + return utils.find(lambda r: r.is_everyone, self.roles) + + @utils.cached_slot_property('_default_channel') + def default_channel(self): + """Gets the default :class:`Channel` for the server.""" + return utils.find(lambda c: c.is_default, self.channels) + + @property + def icon_url(self): + """Returns the URL version of the server's icon. Returns an empty string if it has no icon.""" + if self.icon is None: + return '' + return 'https://cdn.discordapp.com/icons/{0.id}/{0.icon}.jpg'.format(self) + + @property + def splash_url(self): + """Returns the URL version of the server's invite splash. Returns an empty string if it has no splash.""" + if self.splash is None: + return '' + return 'https://cdn.discordapp.com/splashes/{0.id}/{0.splash}.jpg?size=2048'.format(self) + + @property + def member_count(self): + """Returns the true member count regardless of it being loaded fully or not.""" + return self._member_count + + @property + def created_at(self): + """Returns the server's creation time in UTC.""" + return utils.snowflake_time(self.id) + + @property + def role_hierarchy(self): + """Returns the server's roles in the order of the hierarchy. + + The first element of this list will be the highest role in the + hierarchy. + """ + return sorted(self.roles, reverse=True) + + def get_member_named(self, name): + """Returns the first member found that matches the name provided. + + The name can have an optional discriminator argument, e.g. "Jake#0001" + or "Jake" will both do the lookup. However the former will give a more + precise result. Note that the discriminator must have all 4 digits + for this to work. + + If a nickname is passed, then it is looked up via the nickname. Note + however, that a nickname + discriminator combo will not lookup the nickname + but rather the username + discriminator combo due to nickname + discriminator + not being unique. + + If no member is found, ``None`` is returned. + + Parameters + ----------- + name : str + The name of the member to lookup with an optional discriminator. + + Returns + -------- + :class:`Member` + The member in this server with the associated name. If not found + then ``None`` is returned. + """ + + result = None + members = self.members + if len(name) > 5 and name[-5] == '#': + # The 5 length is checking to see if #0000 is in the string, + # as a#0000 has a length of 6, the minimum for a potential + # discriminator lookup. + potential_discriminator = name[-4:] + + # do the actual lookup and return if found + # if it isn't found then we'll do a full name lookup below. + result = utils.get(members, name=name[:-5], discriminator=potential_discriminator) + if result is not None: + return result + + def pred(m): + return m.nick == name or m.name == name + + return utils.find(pred, members) diff --git a/RBXLegacyDiscordBot/lib/discord/state.py b/RBXLegacyDiscordBot/lib/discord/state.py new file mode 100644 index 0000000..87564ab --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/state.py @@ -0,0 +1,739 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .server import Server +from .user import User +from .game import Game +from .emoji import Emoji +from .reaction import Reaction +from .message import Message +from .channel import Channel, PrivateChannel +from .member import Member +from .role import Role +from . import utils, compat +from .enums import Status, ChannelType, try_enum +from .calls import GroupCall + +from collections import deque, namedtuple +import copy, enum, math +import datetime +import asyncio +import logging + +class ListenerType(enum.Enum): + chunk = 0 + +Listener = namedtuple('Listener', ('type', 'future', 'predicate')) +log = logging.getLogger(__name__) +ReadyState = namedtuple('ReadyState', ('launch', 'servers')) + +class ConnectionState: + def __init__(self, dispatch, chunker, syncer, max_messages, *, loop): + self.loop = loop + self.max_messages = max_messages + self.dispatch = dispatch + self.chunker = chunker + self.syncer = syncer + self.is_bot = None + self._listeners = [] + self.clear() + + def clear(self): + self.user = None + self.sequence = None + self.session_id = None + self._calls = {} + self._servers = {} + self._voice_clients = {} + self._private_channels = {} + # extra dict to look up private channels by user id + self._private_channels_by_user = {} + self.messages = deque(maxlen=self.max_messages) + + def process_listeners(self, listener_type, argument, result): + removed = [] + for i, listener in enumerate(self._listeners): + if listener.type != listener_type: + continue + + future = listener.future + if future.cancelled(): + removed.append(i) + continue + + try: + passed = listener.predicate(argument) + except Exception as e: + future.set_exception(e) + removed.append(i) + else: + if passed: + future.set_result(result) + removed.append(i) + if listener.type == ListenerType.chunk: + break + + for index in reversed(removed): + del self._listeners[index] + + @property + def voice_clients(self): + return self._voice_clients.values() + + def _get_voice_client(self, guild_id): + return self._voice_clients.get(guild_id) + + def _add_voice_client(self, guild_id, voice): + self._voice_clients[guild_id] = voice + + def _remove_voice_client(self, guild_id): + self._voice_clients.pop(guild_id, None) + + def _update_references(self, ws): + for vc in self.voice_clients: + vc.main_ws = ws + + @property + def servers(self): + return self._servers.values() + + def _get_server(self, server_id): + return self._servers.get(server_id) + + def _add_server(self, server): + self._servers[server.id] = server + + def _remove_server(self, server): + self._servers.pop(server.id, None) + + @property + def private_channels(self): + return self._private_channels.values() + + def _get_private_channel(self, channel_id): + return self._private_channels.get(channel_id) + + def _get_private_channel_by_user(self, user_id): + return self._private_channels_by_user.get(user_id) + + def _add_private_channel(self, channel): + self._private_channels[channel.id] = channel + if channel.type is ChannelType.private: + self._private_channels_by_user[channel.user.id] = channel + + def _remove_private_channel(self, channel): + self._private_channels.pop(channel.id, None) + if channel.type is ChannelType.private: + self._private_channels_by_user.pop(channel.user.id, None) + + def _get_message(self, msg_id): + return utils.find(lambda m: m.id == msg_id, self.messages) + + def _add_server_from_data(self, guild): + server = Server(**guild) + Server.me = property(lambda s: s.get_member(self.user.id)) + Server.voice_client = property(lambda s: self._get_voice_client(s.id)) + self._add_server(server) + return server + + def chunks_needed(self, server): + for chunk in range(math.ceil(server._member_count / 1000)): + yield self.receive_chunk(server.id) + + @asyncio.coroutine + def _delay_ready(self): + launch = self._ready_state.launch + while not launch.is_set(): + # this snippet of code is basically waiting 2 seconds + # until the last GUILD_CREATE was sent + launch.set() + yield from asyncio.sleep(2, loop=self.loop) + + servers = self._ready_state.servers + + # get all the chunks + chunks = [] + for server in servers: + chunks.extend(self.chunks_needed(server)) + + # we only want to request ~75 guilds per chunk request. + splits = [servers[i:i + 75] for i in range(0, len(servers), 75)] + for split in splits: + yield from self.chunker(split) + + # wait for the chunks + if chunks: + try: + yield from asyncio.wait(chunks, timeout=len(chunks) * 30.0, loop=self.loop) + except asyncio.TimeoutError: + log.info('Somehow timed out waiting for chunks.') + + # remove the state + try: + del self._ready_state + except AttributeError: + pass # already been deleted somehow + + # call GUILD_SYNC after we're done chunking + if not self.is_bot: + log.info('Requesting GUILD_SYNC for %s guilds' % len(self.servers)) + yield from self.syncer([s.id for s in self.servers]) + + # dispatch the event + self.dispatch('ready') + + def parse_ready(self, data): + self._ready_state = ReadyState(launch=asyncio.Event(), servers=[]) + self.user = User(**data['user']) + guilds = data.get('guilds') + + servers = self._ready_state.servers + for guild in guilds: + server = self._add_server_from_data(guild) + if (not self.is_bot and not server.unavailable) or server.large: + servers.append(server) + + for pm in data.get('private_channels'): + self._add_private_channel(PrivateChannel(self.user, **pm)) + + compat.create_task(self._delay_ready(), loop=self.loop) + + def parse_resumed(self, data): + self.dispatch('resumed') + + def parse_message_create(self, data): + channel = self.get_channel(data.get('channel_id')) + message = self._create_message(channel=channel, **data) + self.dispatch('message', message) + self.messages.append(message) + + def parse_message_delete(self, data): + message_id = data.get('id') + found = self._get_message(message_id) + if found is not None: + self.dispatch('message_delete', found) + self.messages.remove(found) + + def parse_message_delete_bulk(self, data): + message_ids = set(data.get('ids', [])) + to_be_deleted = list(filter(lambda m: m.id in message_ids, self.messages)) + for msg in to_be_deleted: + self.dispatch('message_delete', msg) + self.messages.remove(msg) + + def parse_message_update(self, data): + message = self._get_message(data.get('id')) + if message is not None: + older_message = copy.copy(message) + if 'call' in data: + # call state message edit + message._handle_call(data['call']) + elif 'content' not in data: + # embed only edit + message.embeds = data['embeds'] + else: + message._update(channel=message.channel, **data) + + self.dispatch('message_edit', older_message, message) + + def parse_message_reaction_add(self, data): + message = self._get_message(data['message_id']) + if message is not None: + emoji = self._get_reaction_emoji(**data.pop('emoji')) + reaction = utils.get(message.reactions, emoji=emoji) + + is_me = data['user_id'] == self.user.id + + if not reaction: + reaction = Reaction( + message=message, emoji=emoji, me=is_me, **data) + message.reactions.append(reaction) + else: + reaction.count += 1 + if is_me: + reaction.me = True + + channel = self.get_channel(data['channel_id']) + member = self._get_member(channel, data['user_id']) + + self.dispatch('reaction_add', reaction, member) + + def parse_message_reaction_remove_all(self, data): + message = self._get_message(data['message_id']) + if message is not None: + old_reactions = message.reactions.copy() + message.reactions.clear() + self.dispatch('reaction_clear', message, old_reactions) + + def parse_message_reaction_remove(self, data): + message = self._get_message(data['message_id']) + if message is not None: + emoji = self._get_reaction_emoji(**data['emoji']) + reaction = utils.get(message.reactions, emoji=emoji) + + # Eventual consistency means we can get out of order or duplicate removes. + if not reaction: + log.warning("Unexpected reaction remove {}".format(data)) + return + + reaction.count -= 1 + if data['user_id'] == self.user.id: + reaction.me = False + if reaction.count == 0: + message.reactions.remove(reaction) + + channel = self.get_channel(data['channel_id']) + member = self._get_member(channel, data['user_id']) + + self.dispatch('reaction_remove', reaction, member) + + def parse_presence_update(self, data): + server = self._get_server(data.get('guild_id')) + if server is None: + return + + status = data.get('status') + user = data['user'] + member_id = user['id'] + member = server.get_member(member_id) + if member is None: + if 'username' not in user: + # sometimes we receive 'incomplete' member data post-removal. + # skip these useless cases. + return + + member = self._make_member(server, data) + server._add_member(member) + + old_member = member._copy() + member.status = data.get('status') + try: + member.status = Status(member.status) + except: + pass + + game = data.get('game', {}) + member.game = Game(**game) if game else None + member.name = user.get('username', member.name) + member.avatar = user.get('avatar', member.avatar) + member.discriminator = user.get('discriminator', member.discriminator) + + self.dispatch('member_update', old_member, member) + + def parse_user_update(self, data): + self.user = User(**data) + + def parse_channel_delete(self, data): + server = self._get_server(data.get('guild_id')) + if server is not None: + channel_id = data.get('id') + channel = server.get_channel(channel_id) + if channel is not None: + server._remove_channel(channel) + self.dispatch('channel_delete', channel) + + def parse_channel_update(self, data): + channel_type = try_enum(ChannelType, data.get('type')) + channel_id = data.get('id') + if channel_type is ChannelType.group: + channel = self._get_private_channel(channel_id) + old_channel = copy.copy(channel) + channel._update_group(**data) + self.dispatch('channel_update', old_channel, channel) + return + + server = self._get_server(data.get('guild_id')) + if server is not None: + channel = server.get_channel(channel_id) + if channel is not None: + old_channel = copy.copy(channel) + channel._update(server=server, **data) + self.dispatch('channel_update', old_channel, channel) + + def parse_channel_create(self, data): + ch_type = try_enum(ChannelType, data.get('type')) + channel = None + if ch_type in (ChannelType.group, ChannelType.private): + channel = PrivateChannel(self.user, **data) + self._add_private_channel(channel) + else: + server = self._get_server(data.get('guild_id')) + if server is not None: + channel = Channel(server=server, **data) + server._add_channel(channel) + + self.dispatch('channel_create', channel) + + def parse_channel_recipient_add(self, data): + channel = self._get_private_channel(data.get('channel_id')) + user = User(**data.get('user', {})) + channel.recipients.append(user) + self.dispatch('group_join', channel, user) + + def parse_channel_recipient_remove(self, data): + channel = self._get_private_channel(data.get('channel_id')) + user = User(**data.get('user', {})) + try: + channel.recipients.remove(user) + except ValueError: + pass + else: + self.dispatch('group_remove', channel, user) + + def _make_member(self, server, data): + roles = [server.default_role] + for roleid in data.get('roles', []): + role = utils.get(server.roles, id=roleid) + if role is not None: + roles.append(role) + + data['roles'] = sorted(roles, key=lambda r: int(r.id)) + return Member(server=server, **data) + + def parse_guild_member_add(self, data): + server = self._get_server(data.get('guild_id')) + member = self._make_member(server, data) + server._add_member(member) + server._member_count += 1 + self.dispatch('member_join', member) + + def parse_guild_member_remove(self, data): + server = self._get_server(data.get('guild_id')) + if server is not None: + user_id = data['user']['id'] + member = server.get_member(user_id) + if member is not None: + server._remove_member(member) + server._member_count -= 1 + + # remove them from the voice channel member list + vc = member.voice_channel + if vc is not None: + try: + vc.voice_members.remove(member) + except: + pass + + self.dispatch('member_remove', member) + + def parse_guild_member_update(self, data): + server = self._get_server(data.get('guild_id')) + user_id = data['user']['id'] + member = server.get_member(user_id) + if member is not None: + user = data['user'] + old_member = member._copy() + member.name = user['username'] + member.discriminator = user['discriminator'] + member.avatar = user['avatar'] + member.bot = user.get('bot', False) + + # the nickname change is optional, + # if it isn't in the payload then it didn't change + if 'nick' in data: + member.nick = data['nick'] + + # update the roles + member.roles = [server.default_role] + for role in server.roles: + if role.id in data['roles']: + member.roles.append(role) + + # sort the roles by ID since they can be "randomised" + member.roles.sort(key=lambda r: int(r.id)) + self.dispatch('member_update', old_member, member) + + def parse_guild_emojis_update(self, data): + server = self._get_server(data.get('guild_id')) + before_emojis = server.emojis + server.emojis = [Emoji(server=server, **e) for e in data.get('emojis', [])] + self.dispatch('server_emojis_update', before_emojis, server.emojis) + + def _get_create_server(self, data): + if data.get('unavailable') == False: + # GUILD_CREATE with unavailable in the response + # usually means that the server has become available + # and is therefore in the cache + server = self._get_server(data.get('id')) + if server is not None: + server.unavailable = False + server._from_data(data) + return server + + return self._add_server_from_data(data) + + @asyncio.coroutine + def _chunk_and_dispatch(self, server, unavailable): + chunks = list(self.chunks_needed(server)) + yield from self.chunker(server) + if chunks: + try: + yield from asyncio.wait(chunks, timeout=len(chunks), loop=self.loop) + except asyncio.TimeoutError: + log.info('Somehow timed out waiting for chunks.') + + if unavailable == False: + self.dispatch('server_available', server) + else: + self.dispatch('server_join', server) + + def parse_guild_create(self, data): + unavailable = data.get('unavailable') + if unavailable == True: + # joined a server with unavailable == True so.. + return + + server = self._get_create_server(data) + + # check if it requires chunking + if server.large: + if unavailable == False: + # check if we're waiting for 'useful' READY + # and if we are, we don't want to dispatch any + # event such as server_join or server_available + # because we're still in the 'READY' phase. Or + # so we say. + try: + state = self._ready_state + state.launch.clear() + state.servers.append(server) + except AttributeError: + # the _ready_state attribute is only there during + # processing of useful READY. + pass + else: + return + + # since we're not waiting for 'useful' READY we'll just + # do the chunk request here + compat.create_task(self._chunk_and_dispatch(server, unavailable), loop=self.loop) + return + + # Dispatch available if newly available + if unavailable == False: + self.dispatch('server_available', server) + else: + self.dispatch('server_join', server) + + def parse_guild_sync(self, data): + server = self._get_server(data.get('id')) + server._sync(data) + + def parse_guild_update(self, data): + server = self._get_server(data.get('id')) + if server is not None: + old_server = copy.copy(server) + server._from_data(data) + self.dispatch('server_update', old_server, server) + + def parse_guild_delete(self, data): + server = self._get_server(data.get('id')) + if server is None: + return + + if data.get('unavailable', False) and server is not None: + # GUILD_DELETE with unavailable being True means that the + # server that was available is now currently unavailable + server.unavailable = True + self.dispatch('server_unavailable', server) + return + + # do a cleanup of the messages cache + self.messages = deque((msg for msg in self.messages if msg.server != server), maxlen=self.max_messages) + + self._remove_server(server) + self.dispatch('server_remove', server) + + def parse_guild_ban_add(self, data): + # we make the assumption that GUILD_BAN_ADD is done + # before GUILD_MEMBER_REMOVE is called + # hence we don't remove it from cache or do anything + # strange with it, the main purpose of this event + # is mainly to dispatch to another event worth listening to for logging + server = self._get_server(data.get('guild_id')) + if server is not None: + user_id = data.get('user', {}).get('id') + member = utils.get(server.members, id=user_id) + if member is not None: + self.dispatch('member_ban', member) + + def parse_guild_ban_remove(self, data): + server = self._get_server(data.get('guild_id')) + if server is not None: + if 'user' in data: + user = User(**data['user']) + self.dispatch('member_unban', server, user) + + def parse_guild_role_create(self, data): + server = self._get_server(data.get('guild_id')) + role_data = data.get('role', {}) + role = Role(server=server, **role_data) + server._add_role(role) + self.dispatch('server_role_create', role) + + def parse_guild_role_delete(self, data): + server = self._get_server(data.get('guild_id')) + if server is not None: + role_id = data.get('role_id') + role = utils.find(lambda r: r.id == role_id, server.roles) + try: + server._remove_role(role) + except ValueError: + return + else: + self.dispatch('server_role_delete', role) + + def parse_guild_role_update(self, data): + server = self._get_server(data.get('guild_id')) + if server is not None: + role_id = data['role']['id'] + role = utils.find(lambda r: r.id == role_id, server.roles) + if role is not None: + old_role = copy.copy(role) + role._update(**data['role']) + self.dispatch('server_role_update', old_role, role) + + def parse_guild_members_chunk(self, data): + server = self._get_server(data.get('guild_id')) + members = data.get('members', []) + for member in members: + m = self._make_member(server, member) + existing = server.get_member(m.id) + if existing is None or existing.joined_at is None: + server._add_member(m) + + # if the owner is offline, server.owner is potentially None + # therefore we should check if this chunk makes it point to a valid + # member. + server.owner = server.get_member(server.owner_id) + log.info('processed a chunk for {} members.'.format(len(members))) + self.process_listeners(ListenerType.chunk, server, len(members)) + + def parse_voice_state_update(self, data): + server = self._get_server(data.get('guild_id')) + if server is not None: + channel = server.get_channel(data.get('channel_id')) + if data.get('user_id') == self.user.id: + voice = self._get_voice_client(server.id) + if voice is not None: + voice.channel = channel + + before, after = server._update_voice_state(data) + if after is not None: + self.dispatch('voice_state_update', before, after) + else: + # in here we're either at private or group calls + call = self._calls.get(data.get('channel_id'), None) + if call is not None: + call._update_voice_state(data) + + def parse_typing_start(self, data): + channel = self.get_channel(data.get('channel_id')) + if channel is not None: + member = None + user_id = data.get('user_id') + is_private = getattr(channel, 'is_private', None) + if is_private == None: + return + + if is_private: + member = channel.user + else: + member = channel.server.get_member(user_id) + + if member is not None: + timestamp = datetime.datetime.utcfromtimestamp(data.get('timestamp')) + self.dispatch('typing', channel, member, timestamp) + + def parse_call_create(self, data): + message = self._get_message(data.get('message_id')) + if message is not None: + call = GroupCall(call=message, **data) + self._calls[data['channel_id']] = call + self.dispatch('call', call) + + def parse_call_update(self, data): + call = self._calls.get(data.get('channel_id'), None) + if call is not None: + before = copy.copy(call) + call._update(**data) + self.dispatch('call_update', before, call) + + def parse_call_delete(self, data): + call = self._calls.pop(data.get('channel_id'), None) + if call is not None: + self.dispatch('call_remove', call) + + def _get_member(self, channel, id): + if channel.is_private: + if id == self.user.id: + return self.user + return utils.get(channel.recipients, id=id) + else: + return channel.server.get_member(id) + + def _create_message(self, **message): + """Helper mostly for injecting reactions.""" + reactions = [ + self._create_reaction(**r) for r in message.pop('reactions', []) + ] + return Message(channel=message.pop('channel'), + reactions=reactions, **message) + + def _create_reaction(self, **reaction): + emoji = self._get_reaction_emoji(**reaction.pop('emoji')) + return Reaction(emoji=emoji, **reaction) + + def _get_reaction_emoji(self, **data): + id = data['id'] + + if not id: + return data['name'] + + for server in self.servers: + for emoji in server.emojis: + if emoji.id == id: + return emoji + return Emoji(server=None, **data) + + def get_channel(self, id): + if id is None: + return None + + for server in self.servers: + channel = server.get_channel(id) + if channel is not None: + return channel + + pm = self._get_private_channel(id) + if pm is not None: + return pm + + def receive_chunk(self, guild_id): + future = asyncio.Future(loop=self.loop) + listener = Listener(ListenerType.chunk, future, lambda s: s.id == guild_id) + self._listeners.append(listener) + return future diff --git a/RBXLegacyDiscordBot/lib/discord/user.py b/RBXLegacyDiscordBot/lib/discord/user.py new file mode 100644 index 0000000..c51955b --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/user.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from .utils import snowflake_time +from .enums import DefaultAvatar + +class User: + """Represents a Discord user. + + Supported Operations: + + +-----------+---------------------------------------------+ + | Operation | Description | + +===========+=============================================+ + | x == y | Checks if two users are equal. | + +-----------+---------------------------------------------+ + | x != y | Checks if two users are not equal. | + +-----------+---------------------------------------------+ + | hash(x) | Return the user's hash. | + +-----------+---------------------------------------------+ + | str(x) | Returns the user's name with discriminator. | + +-----------+---------------------------------------------+ + + Attributes + ----------- + name : str + The user's username. + id : str + The user's unique ID. + discriminator : str or int + The user's discriminator. This is given when the username has conflicts. + avatar : str + The avatar hash the user has. Could be None. + bot : bool + Specifies if the user is a bot account. + """ + + __slots__ = ['name', 'id', 'discriminator', 'avatar', 'bot'] + + def __init__(self, **kwargs): + self.name = kwargs.get('username') + self.id = kwargs.get('id') + self.discriminator = kwargs.get('discriminator') + self.avatar = kwargs.get('avatar') + self.bot = kwargs.get('bot', False) + + def __str__(self): + return '{0.name}#{0.discriminator}'.format(self) + + def __eq__(self, other): + return isinstance(other, User) and other.id == self.id + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self.id) + + @property + def avatar_url(self): + """Returns a friendly URL version of the avatar variable the user has. An empty string if + the user has no avatar.""" + if self.avatar is None: + return '' + + url = 'https://cdn.discordapp.com/avatars/{0.id}/{0.avatar}.{1}?size=1024' + if self.avatar.startswith('a_'): + return url.format(self, 'gif') + else: + return url.format(self, 'webp') + + @property + def default_avatar(self): + """Returns the default avatar for a given user. This is calculated by the user's descriminator""" + return DefaultAvatar(int(self.discriminator) % len(DefaultAvatar)) + + @property + def default_avatar_url(self): + """Returns a URL for a user's default avatar.""" + return 'https://cdn.discordapp.com/embed/avatars/{}.png'.format(self.default_avatar.value) + + @property + def mention(self): + """Returns a string that allows you to mention the given user.""" + return '<@{0.id}>'.format(self) + + def permissions_in(self, channel): + """An alias for :meth:`Channel.permissions_for`. + + Basically equivalent to: + + .. code-block:: python + + channel.permissions_for(self) + + Parameters + ----------- + channel + The channel to check your permissions for. + """ + return channel.permissions_for(self) + + @property + def created_at(self): + """Returns the user's creation time in UTC. + + This is when the user's discord account was created.""" + return snowflake_time(self.id) + + @property + def display_name(self): + """Returns the user's display name. + + For regular users this is just their username, but + if they have a server specific nickname then that + is returned instead. + """ + return getattr(self, 'nick', None) or self.name + + def mentioned_in(self, message): + """Checks if the user is mentioned in the specified message. + + Parameters + ----------- + message : :class:`Message` + The message to check if you're mentioned in. + """ + + if message.mention_everyone: + return True + + if self in message.mentions: + return True + + return False diff --git a/RBXLegacyDiscordBot/lib/discord/utils.py b/RBXLegacyDiscordBot/lib/discord/utils.py new file mode 100644 index 0000000..8312965 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/utils.py @@ -0,0 +1,253 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +from re import split as re_split +from .errors import HTTPException, Forbidden, NotFound, InvalidArgument +import datetime +from base64 import b64encode +import asyncio +import json +import warnings, functools + +DISCORD_EPOCH = 1420070400000 + +class cached_property: + def __init__(self, function): + self.function = function + self.__doc__ = getattr(function, '__doc__') + + def __get__(self, instance, owner): + if instance is None: + return self + + value = self.function(instance) + setattr(instance, self.function.__name__, value) + + return value + +class CachedSlotProperty: + def __init__(self, name, function): + self.name = name + self.function = function + self.__doc__ = getattr(function, '__doc__') + + def __get__(self, instance, owner): + if instance is None: + return self + + try: + return getattr(instance, self.name) + except AttributeError: + value = self.function(instance) + setattr(instance, self.name, value) + return value + +def cached_slot_property(name): + def decorator(func): + return CachedSlotProperty(name, func) + return decorator + +def parse_time(timestamp): + if timestamp: + return datetime.datetime(*map(int, re_split(r'[^\d]', timestamp.replace('+00:00', '')))) + return None + +def deprecated(instead=None): + def actual_decorator(func): + @functools.wraps(func) + def decorated(*args, **kwargs): + warnings.simplefilter('always', DeprecationWarning) # turn off filter + if instead: + fmt = "{0.__name__} is deprecated, use {1} instead." + else: + fmt = '{0.__name__} is deprecated.' + + warnings.warn(fmt.format(func, instead), stacklevel=3, category=DeprecationWarning) + warnings.simplefilter('default', DeprecationWarning) # reset filter + return func(*args, **kwargs) + return decorated + return actual_decorator + +def oauth_url(client_id, permissions=None, server=None, redirect_uri=None): + """A helper function that returns the OAuth2 URL for inviting the bot + into servers. + + Parameters + ----------- + client_id : str + The client ID for your bot. + permissions : :class:`Permissions` + The permissions you're requesting. If not given then you won't be requesting any + permissions. + server : :class:`Server` + The server to pre-select in the authorization screen, if available. + redirect_uri : str + An optional valid redirect URI. + """ + url = 'https://discordapp.com/oauth2/authorize?client_id={}&scope=bot'.format(client_id) + if permissions is not None: + url = url + '&permissions=' + str(permissions.value) + if server is not None: + url = url + "&guild_id=" + server.id + if redirect_uri is not None: + from urllib.parse import urlencode + url = url + "&response_type=code&" + urlencode({'redirect_uri': redirect_uri}) + return url + + +def snowflake_time(id): + """Returns the creation date in UTC of a discord id.""" + return datetime.datetime.utcfromtimestamp(((int(id) >> 22) + DISCORD_EPOCH) / 1000) + +def time_snowflake(datetime_obj, high=False): + """Returns a numeric snowflake pretending to be created at the given date. + + When using as the lower end of a range, use time_snowflake(high=False) - 1 to be inclusive, high=True to be exclusive + When using as the higher end of a range, use time_snowflake(high=True) + 1 to be inclusive, high=False to be exclusive + + Parameters + ----------- + datetime_obj + A timezone-naive datetime object representing UTC time. + high + Whether or not to set the lower 22 bit to high or low. + """ + unix_seconds = (datetime_obj - type(datetime_obj)(1970, 1, 1)).total_seconds() + discord_millis = int(unix_seconds * 1000 - DISCORD_EPOCH) + + return (discord_millis << 22) + (2**22-1 if high else 0) + +def find(predicate, seq): + """A helper to return the first element found in the sequence + that meets the predicate. For example: :: + + member = find(lambda m: m.name == 'Mighty', channel.server.members) + + would find the first :class:`Member` whose name is 'Mighty' and return it. + If an entry is not found, then ``None`` is returned. + + This is different from `filter`_ due to the fact it stops the moment it finds + a valid entry. + + + .. _filter: https://docs.python.org/3.6/library/functions.html#filter + + Parameters + ----------- + predicate + A function that returns a boolean-like result. + seq : iterable + The iterable to search through. + """ + + for element in seq: + if predicate(element): + return element + return None + +def get(iterable, **attrs): + """A helper that returns the first element in the iterable that meets + all the traits passed in ``attrs``. This is an alternative for + :func:`discord.utils.find`. + + When multiple attributes are specified, they are checked using + logical AND, not logical OR. Meaning they have to meet every + attribute passed in and not one of them. + + To have a nested attribute search (i.e. search by ``x.y``) then + pass in ``x__y`` as the keyword argument. + + If nothing is found that matches the attributes passed, then + ``None`` is returned. + + Examples + --------- + + Basic usage: + + .. code-block:: python + + member = discord.utils.get(message.server.members, name='Foo') + + Multiple attribute matching: + + .. code-block:: python + + channel = discord.utils.get(server.channels, name='Foo', type=ChannelType.voice) + + Nested attribute matching: + + .. code-block:: python + + channel = discord.utils.get(client.get_all_channels(), server__name='Cool', name='general') + + Parameters + ----------- + iterable + An iterable to search through. + \*\*attrs + Keyword arguments that denote attributes to search with. + """ + + def predicate(elem): + for attr, val in attrs.items(): + nested = attr.split('__') + obj = elem + for attribute in nested: + obj = getattr(obj, attribute) + + if obj != val: + return False + return True + + return find(predicate, iterable) + + +def _unique(iterable): + seen = set() + adder = seen.add + return [x for x in iterable if not (x in seen or adder(x))] + +def _null_event(*args, **kwargs): + pass + +def _get_mime_type_for_image(data): + if data.startswith(b'\x89\x50\x4E\x47\x0D\x0A\x1A\x0A'): + return 'image/png' + elif data.startswith(b'\xFF\xD8') and data.endswith(b'\xFF\xD9'): + return 'image/jpeg' + else: + raise InvalidArgument('Unsupported image type given') + +def _bytes_to_base64_data(data): + fmt = 'data:{mime};base64,{data}' + mime = _get_mime_type_for_image(data) + b64 = b64encode(data).decode('ascii') + return fmt.format(mime=mime, data=b64) + +def to_json(obj): + return json.dumps(obj, separators=(',', ':'), ensure_ascii=True) + diff --git a/RBXLegacyDiscordBot/lib/discord/voice_client.py b/RBXLegacyDiscordBot/lib/discord/voice_client.py new file mode 100644 index 0000000..647e340 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/discord/voice_client.py @@ -0,0 +1,695 @@ +# -*- coding: utf-8 -*- + +""" +The MIT License (MIT) + +Copyright (c) 2015-2016 Rapptz + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +""" + +"""Some documentation to refer to: + +- Our main web socket (mWS) sends opcode 4 with a server ID and channel ID. +- The mWS receives VOICE_STATE_UPDATE and VOICE_SERVER_UPDATE. +- We pull the session_id from VOICE_STATE_UPDATE. +- We pull the token, endpoint and guild_id from VOICE_SERVER_UPDATE. +- Then we initiate the voice web socket (vWS) pointing to the endpoint. +- We send opcode 0 with the user_id, guild_id, session_id and token using the vWS. +- The vWS sends back opcode 2 with an ssrc, port, modes(array) and hearbeat_interval. +- We send a UDP discovery packet to endpoint:port and receive our IP and our port in LE. +- Then we send our IP and port via vWS with opcode 1. +- When that's all done, we receive opcode 4 from the vWS. +- Finally we can transmit data to endpoint:port. +""" + +import asyncio +import websockets +import socket +import json, time +import logging +import struct +import threading +import subprocess +import shlex +import functools +import datetime +import audioop +import inspect + +log = logging.getLogger(__name__) + +try: + import nacl.secret + has_nacl = True +except ImportError: + has_nacl = False + +from . import utils, opus +from .gateway import * +from .errors import ClientException, InvalidArgument, ConnectionClosed + +class StreamPlayer(threading.Thread): + def __init__(self, stream, encoder, connected, player, after, **kwargs): + threading.Thread.__init__(self, **kwargs) + self.daemon = True + self.buff = stream + self.frame_size = encoder.frame_size + self.player = player + self._end = threading.Event() + self._resumed = threading.Event() + self._resumed.set() # we are not paused + self._connected = connected + self.after = after + self.delay = encoder.frame_length / 1000.0 + self._volume = 1.0 + self._current_error = None + + if after is not None and not callable(after): + raise TypeError('Expected a callable for the "after" parameter.') + + def _do_run(self): + self.loops = 0 + self._start = time.time() + while not self._end.is_set(): + # are we paused? + if not self._resumed.is_set(): + # wait until we aren't + self._resumed.wait() + + if not self._connected.is_set(): + self.stop() + break + + self.loops += 1 + data = self.buff.read(self.frame_size) + + if self._volume != 1.0: + data = audioop.mul(data, 2, min(self._volume, 2.0)) + + if len(data) != self.frame_size: + self.stop() + break + + self.player(data) + next_time = self._start + self.delay * self.loops + delay = max(0, self.delay + (next_time - time.time())) + time.sleep(delay) + + def run(self): + try: + self._do_run() + except Exception as e: + self._current_error = e + self.stop() + finally: + self._call_after() + + def _call_after(self): + if self.after is not None: + try: + arg_count = len(inspect.signature(self.after).parameters) + except: + # if this ended up happening, a mistake was made. + arg_count = 0 + + try: + if arg_count == 0: + self.after() + else: + self.after(self) + except: + pass + + def stop(self): + self._end.set() + + @property + def error(self): + return self._current_error + + @property + def volume(self): + return self._volume + + @volume.setter + def volume(self, value): + self._volume = max(value, 0.0) + + def pause(self): + self._resumed.clear() + + def resume(self): + self.loops = 0 + self._start = time.time() + self._resumed.set() + + def is_playing(self): + return self._resumed.is_set() and not self.is_done() + + def is_done(self): + return not self._connected.is_set() or self._end.is_set() + +class ProcessPlayer(StreamPlayer): + def __init__(self, process, client, after, **kwargs): + super().__init__(process.stdout, client.encoder, + client._connected, client.play_audio, after, **kwargs) + self.process = process + + def run(self): + super().run() + + self.process.kill() + if self.process.poll() is None: + self.process.communicate() + + +class VoiceClient: + """Represents a Discord voice connection. + + This client is created solely through :meth:`Client.join_voice_channel` + and its only purpose is to transmit voice. + + Warning + -------- + In order to play audio, you must have loaded the opus library + through :func:`opus.load_opus`. + + If you don't do this then the library will not be able to + transmit audio. + + Attributes + ----------- + session_id : str + The voice connection session ID. + token : str + The voice connection token. + user : :class:`User` + The user connected to voice. + endpoint : str + The endpoint we are connecting to. + channel : :class:`Channel` + The voice channel connected to. + server : :class:`Server` + The server the voice channel is connected to. + Shorthand for ``channel.server``. + loop + The event loop that the voice client is running on. + """ + def __init__(self, user, main_ws, session_id, channel, data, loop): + if not has_nacl: + raise RuntimeError("PyNaCl library needed in order to use voice") + + self.user = user + self.main_ws = main_ws + self.channel = channel + self.session_id = session_id + self.loop = loop + self._connected = asyncio.Event(loop=self.loop) + self.token = data.get('token') + self.guild_id = data.get('guild_id') + self.endpoint = data.get('endpoint') + self.sequence = 0 + self.timestamp = 0 + self.encoder = opus.Encoder(48000, 2) + log.info('created opus encoder with {0.__dict__}'.format(self.encoder)) + + warn_nacl = not has_nacl + + @property + def server(self): + return self.channel.server + + def checked_add(self, attr, value, limit): + val = getattr(self, attr) + if val + value > limit: + setattr(self, attr, 0) + else: + setattr(self, attr, val + value) + + # connection related + + @asyncio.coroutine + def connect(self): + log.info('voice connection is connecting...') + self.endpoint = self.endpoint.replace(':80', '') + self.endpoint_ip = socket.gethostbyname(self.endpoint) + self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + self.socket.setblocking(False) + + log.info('Voice endpoint found {0.endpoint} (IP: {0.endpoint_ip})'.format(self)) + + self.ws = yield from DiscordVoiceWebSocket.from_client(self) + while not self._connected.is_set(): + yield from self.ws.poll_event() + if hasattr(self, 'secret_key'): + # we have a secret key, so we don't need to poll + # websocket events anymore + self._connected.set() + break + + self.loop.create_task(self.poll_voice_ws()) + + @asyncio.coroutine + def poll_voice_ws(self): + """|coro| + Reads from the voice websocket while connected. + """ + while self._connected.is_set(): + try: + yield from self.ws.poll_event() + except ConnectionClosed as e: + if e.code == 1000: + break + else: + raise + + @asyncio.coroutine + def disconnect(self): + """|coro| + + Disconnects all connections to the voice client. + + In order to reconnect, you must create another voice client + using :meth:`Client.join_voice_channel`. + """ + if not self._connected.is_set(): + return + + self._connected.clear() + try: + yield from self.ws.close() + yield from self.main_ws.voice_state(self.guild_id, None, self_mute=True) + finally: + self.socket.close() + + @asyncio.coroutine + def move_to(self, channel): + """|coro| + + Moves you to a different voice channel. + + .. warning:: + + :class:`Object` instances do not work with this function. + + Parameters + ----------- + channel : :class:`Channel` + The channel to move to. Must be a voice channel. + + Raises + ------- + InvalidArgument + Not a voice channel. + """ + + if str(getattr(channel, 'type', 'text')) != 'voice': + raise InvalidArgument('Must be a voice channel.') + + yield from self.main_ws.voice_state(self.guild_id, channel.id) + + def is_connected(self): + """bool : Indicates if the voice client is connected to voice.""" + return self._connected.is_set() + + # audio related + + def _get_voice_packet(self, data): + header = bytearray(12) + nonce = bytearray(24) + box = nacl.secret.SecretBox(bytes(self.secret_key)) + + # Formulate header + header[0] = 0x80 + header[1] = 0x78 + struct.pack_into('>H', header, 2, self.sequence) + struct.pack_into('>I', header, 4, self.timestamp) + struct.pack_into('>I', header, 8, self.ssrc) + + # Copy header to nonce's first 12 bytes + nonce[:12] = header + + # Encrypt and return the data + return header + box.encrypt(bytes(data), bytes(nonce)).ciphertext + + def create_ffmpeg_player(self, filename, *, use_avconv=False, pipe=False, stderr=None, options=None, before_options=None, headers=None, after=None): + """Creates a stream player for ffmpeg that launches in a separate thread to play + audio. + + The ffmpeg player launches a subprocess of ``ffmpeg`` to a specific + filename and then plays that file. + + You must have the ffmpeg or avconv executable in your path environment variable + in order for this to work. + + The operations that can be done on the player are the same as those in + :meth:`create_stream_player`. + + Examples + ---------- + + Basic usage: :: + + voice = await client.join_voice_channel(channel) + player = voice.create_ffmpeg_player('cool.mp3') + player.start() + + Parameters + ----------- + filename + The filename that ffmpeg will take and convert to PCM bytes. + If ``pipe`` is True then this is a file-like object that is + passed to the stdin of ``ffmpeg``. + use_avconv: bool + Use ``avconv`` instead of ``ffmpeg``. + pipe : bool + If true, denotes that ``filename`` parameter will be passed + to the stdin of ffmpeg. + stderr + A file-like object or ``subprocess.PIPE`` to pass to the Popen + constructor. + options : str + Extra command line flags to pass to ``ffmpeg`` after the ``-i`` flag. + before_options : str + Command line flags to pass to ``ffmpeg`` before the ``-i`` flag. + headers: dict + HTTP headers dictionary to pass to ``-headers`` command line option + after : callable + The finalizer that is called after the stream is done being + played. All exceptions the finalizer throws are silently discarded. + + Raises + ------- + ClientException + Popen failed to due to an error in ``ffmpeg`` or ``avconv``. + + Returns + -------- + StreamPlayer + A stream player with specific operations. + See :meth:`create_stream_player`. + """ + command = 'ffmpeg' if not use_avconv else 'avconv' + input_name = '-' if pipe else shlex.quote(filename) + before_args = "" + if isinstance(headers, dict): + for key, value in headers.items(): + before_args += "{}: {}\r\n".format(key, value) + before_args = ' -headers ' + shlex.quote(before_args) + + if isinstance(before_options, str): + before_args += ' ' + before_options + + cmd = command + '{} -i {} -f s16le -ar {} -ac {} -loglevel warning' + cmd = cmd.format(before_args, input_name, self.encoder.sampling_rate, self.encoder.channels) + + if isinstance(options, str): + cmd = cmd + ' ' + options + + cmd += ' pipe:1' + + stdin = None if not pipe else filename + args = shlex.split(cmd) + try: + p = subprocess.Popen(args, stdin=stdin, stdout=subprocess.PIPE, stderr=stderr) + return ProcessPlayer(p, self, after) + except FileNotFoundError as e: + raise ClientException('ffmpeg/avconv was not found in your PATH environment variable') from e + except subprocess.SubprocessError as e: + raise ClientException('Popen failed: {0.__name__} {1}'.format(type(e), str(e))) from e + + + @asyncio.coroutine + def create_ytdl_player(self, url, *, ytdl_options=None, **kwargs): + """|coro| + + Creates a stream player for youtube or other services that launches + in a separate thread to play the audio. + + The player uses the ``youtube_dl`` python library to get the information + required to get audio from the URL. Since this uses an external library, + you must install it yourself. You can do so by calling + ``pip install youtube_dl``. + + You must have the ffmpeg or avconv executable in your path environment + variable in order for this to work. + + The operations that can be done on the player are the same as those in + :meth:`create_stream_player`. The player has been augmented and enhanced + to have some info extracted from the URL. If youtube-dl fails to extract + the information then the attribute is ``None``. The ``yt``, ``url``, and + ``download_url`` attributes are always available. + + +---------------------+---------------------------------------------------------+ + | Operation | Description | + +=====================+=========================================================+ + | player.yt | The `YoutubeDL ` instance. | + +---------------------+---------------------------------------------------------+ + | player.url | The URL that is currently playing. | + +---------------------+---------------------------------------------------------+ + | player.download_url | The URL that is currently being downloaded to ffmpeg. | + +---------------------+---------------------------------------------------------+ + | player.title | The title of the audio stream. | + +---------------------+---------------------------------------------------------+ + | player.description | The description of the audio stream. | + +---------------------+---------------------------------------------------------+ + | player.uploader | The uploader of the audio stream. | + +---------------------+---------------------------------------------------------+ + | player.upload_date | A datetime.date object of when the stream was uploaded. | + +---------------------+---------------------------------------------------------+ + | player.duration | The duration of the audio in seconds. | + +---------------------+---------------------------------------------------------+ + | player.likes | How many likes the audio stream has. | + +---------------------+---------------------------------------------------------+ + | player.dislikes | How many dislikes the audio stream has. | + +---------------------+---------------------------------------------------------+ + | player.is_live | Checks if the audio stream is currently livestreaming. | + +---------------------+---------------------------------------------------------+ + | player.views | How many views the audio stream has. | + +---------------------+---------------------------------------------------------+ + + .. _ytdl: https://github.com/rg3/youtube-dl/blob/master/youtube_dl/YoutubeDL.py#L128-L278 + + Examples + ---------- + + Basic usage: :: + + voice = await client.join_voice_channel(channel) + player = await voice.create_ytdl_player('https://www.youtube.com/watch?v=d62TYemN6MQ') + player.start() + + Parameters + ----------- + url : str + The URL that ``youtube_dl`` will take and download audio to pass + to ``ffmpeg`` or ``avconv`` to convert to PCM bytes. + ytdl_options : dict + A dictionary of options to pass into the ``YoutubeDL`` instance. + See `the documentation `_ for more details. + \*\*kwargs + The rest of the keyword arguments are forwarded to + :func:`create_ffmpeg_player`. + + Raises + ------- + ClientException + Popen failure from either ``ffmpeg``/``avconv``. + + Returns + -------- + StreamPlayer + An augmented StreamPlayer that uses ffmpeg. + See :meth:`create_stream_player` for base operations. + """ + import youtube_dl + + use_avconv = kwargs.get('use_avconv', False) + opts = { + 'format': 'webm[abr>0]/bestaudio/best', + 'prefer_ffmpeg': not use_avconv + } + + if ytdl_options is not None and isinstance(ytdl_options, dict): + opts.update(ytdl_options) + + ydl = youtube_dl.YoutubeDL(opts) + func = functools.partial(ydl.extract_info, url, download=False) + info = yield from self.loop.run_in_executor(None, func) + if "entries" in info: + info = info['entries'][0] + + log.info('playing URL {}'.format(url)) + download_url = info['url'] + player = self.create_ffmpeg_player(download_url, **kwargs) + + # set the dynamic attributes from the info extraction + player.download_url = download_url + player.url = url + player.yt = ydl + player.views = info.get('view_count') + player.is_live = bool(info.get('is_live')) + player.likes = info.get('like_count') + player.dislikes = info.get('dislike_count') + player.duration = info.get('duration') + player.uploader = info.get('uploader') + + is_twitch = 'twitch' in url + if is_twitch: + # twitch has 'title' and 'description' sort of mixed up. + player.title = info.get('description') + player.description = None + else: + player.title = info.get('title') + player.description = info.get('description') + + # upload date handling + date = info.get('upload_date') + if date: + try: + date = datetime.datetime.strptime(date, '%Y%M%d').date() + except ValueError: + date = None + + player.upload_date = date + return player + + def encoder_options(self, *, sample_rate, channels=2): + """Sets the encoder options for the OpusEncoder. + + Calling this after you create a stream player + via :meth:`create_ffmpeg_player` or :meth:`create_stream_player` + has no effect. + + Parameters + ---------- + sample_rate : int + Sets the sample rate of the OpusEncoder. The unit is in Hz. + channels : int + Sets the number of channels for the OpusEncoder. + 2 for stereo, 1 for mono. + + Raises + ------- + InvalidArgument + The values provided are invalid. + """ + if sample_rate not in (8000, 12000, 16000, 24000, 48000): + raise InvalidArgument('Sample rate out of range. Valid: [8000, 12000, 16000, 24000, 48000]') + if channels not in (1, 2): + raise InvalidArgument('Channels must be either 1 or 2.') + + self.encoder = opus.Encoder(sample_rate, channels) + log.info('created opus encoder with {0.__dict__}'.format(self.encoder)) + + def create_stream_player(self, stream, *, after=None): + """Creates a stream player that launches in a separate thread to + play audio. + + The stream player assumes that ``stream.read`` is a valid function + that returns a *bytes-like* object. + + The finalizer, ``after`` is called after the stream has been exhausted + or an error occurred (see below). + + The following operations are valid on the ``StreamPlayer`` object: + + +---------------------+-----------------------------------------------------+ + | Operation | Description | + +=====================+=====================================================+ + | player.start() | Starts the audio stream. | + +---------------------+-----------------------------------------------------+ + | player.stop() | Stops the audio stream. | + +---------------------+-----------------------------------------------------+ + | player.is_done() | Returns a bool indicating if the stream is done. | + +---------------------+-----------------------------------------------------+ + | player.is_playing() | Returns a bool indicating if the stream is playing. | + +---------------------+-----------------------------------------------------+ + | player.pause() | Pauses the audio stream. | + +---------------------+-----------------------------------------------------+ + | player.resume() | Resumes the audio stream. | + +---------------------+-----------------------------------------------------+ + | player.volume | Allows you to set the volume of the stream. 1.0 is | + | | equivalent to 100% and 0.0 is equal to 0%. The | + | | maximum the volume can be set to is 2.0 for 200%. | + +---------------------+-----------------------------------------------------+ + | player.error | The exception that stopped the player. If no error | + | | happened, then this returns None. | + +---------------------+-----------------------------------------------------+ + + The stream must have the same sampling rate as the encoder and the same + number of channels. The defaults are 48000 Hz and 2 channels. You + could change the encoder options by using :meth:`encoder_options` + but this must be called **before** this function. + + If an error happens while the player is running, the exception is caught and + the player is then stopped. The caught exception could then be retrieved + via ``player.error``\. When the player is stopped in this matter, the + finalizer under ``after`` is called. + + Parameters + ----------- + stream + The stream object to read from. + after + The finalizer that is called after the stream is exhausted. + All exceptions it throws are silently discarded. This function + can have either no parameters or a single parameter taking in the + current player. + + Returns + -------- + StreamPlayer + A stream player with the operations noted above. + """ + return StreamPlayer(stream, self.encoder, self._connected, self.play_audio, after) + + def play_audio(self, data, *, encode=True): + """Sends an audio packet composed of the data. + + You must be connected to play audio. + + Parameters + ---------- + data : bytes + The *bytes-like object* denoting PCM or Opus voice data. + encode : bool + Indicates if ``data`` should be encoded into Opus. + + Raises + ------- + ClientException + You are not connected. + OpusError + Encoding the data failed. + """ + + self.checked_add('sequence', 1, 65535) + if encode: + encoded_data = self.encoder.encode(data, self.encoder.samples_per_frame) + else: + encoded_data = data + packet = self._get_voice_packet(encoded_data) + try: + sent = self.socket.sendto(packet, (self.endpoint_ip, self.voice_port)) + except BlockingIOError: + log.warning('A packet has been dropped (seq: {0.sequence}, timestamp: {0.timestamp})'.format(self)) + + self.checked_add('timestamp', self.encoder.samples_per_frame, 4294967295) diff --git a/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/DESCRIPTION.rst b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..11103a8 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/DESCRIPTION.rst @@ -0,0 +1,178 @@ +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This is the latest version of the protocol and is sometimes referred to as +“IDNA 2008”. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +This acts as a suitable replacement for the “encodings.idna” module that +comes with the Python standard library, but only supports the +old, deprecated IDNA specification (`RFC 3490 `_). + +Basic functions are simply executed: + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + # Python 2 + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use pip: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library works with Python 2.6 or later, and Python 3.3 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to A-labels or U-labels respectively. + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + # Python 2 + >>> import idna.codec + >>> print u'домена.испытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + # Python 2 + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification no longer normalizes input from different potential ways a user +may input a domain name. This functionality, known as a “mapping”, is now +considered by the specification to be a local user-interface issue distinct +from IDNA conversion functionality. + +This library provides one such mapping, that was developed by the Unicode +Consortium. Known as `Unicode IDNA Compatibility Processing `_, +it provides for both a regular mapping for typical applications, as well as +a transitional mapping to help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL +LETTER K* is not allowed (nor are capital letters in general). UTS 46 will +convert this into lower case prior to applying the IDNA conversion. + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two +*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + # Python 2 + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as tests that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/INSTALLER b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/METADATA b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/METADATA new file mode 100644 index 0000000..89ddba7 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/METADATA @@ -0,0 +1,204 @@ +Metadata-Version: 2.0 +Name: idna +Version: 2.5 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-like +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This is the latest version of the protocol and is sometimes referred to as +“IDNA 2008”. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +This acts as a suitable replacement for the “encodings.idna” module that +comes with the Python standard library, but only supports the +old, deprecated IDNA specification (`RFC 3490 `_). + +Basic functions are simply executed: + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + # Python 2 + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use pip: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library works with Python 2.6 or later, and Python 3.3 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to A-labels or U-labels respectively. + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + # Python 2 + >>> import idna.codec + >>> print u'домена.испытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + # Python 2 + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification no longer normalizes input from different potential ways a user +may input a domain name. This functionality, known as a “mapping”, is now +considered by the specification to be a local user-interface issue distinct +from IDNA conversion functionality. + +This library provides one such mapping, that was developed by the Unicode +Consortium. Known as `Unicode IDNA Compatibility Processing `_, +it provides for both a regular mapping for typical applications, as well as +a transitional mapping to help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL +LETTER K* is not allowed (nor are capital letters in general). UTS 46 will +convert this into lower case prior to applying the IDNA conversion. + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two +*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + # Python 2 + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as tests that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/RECORD b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/RECORD new file mode 100644 index 0000000..06c9363 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/RECORD @@ -0,0 +1,22 @@ +idna/__init__.py,sha256=K0kNy26Vm6A-1V5lST3ily6yVsNLUbiqk6AZDFm2nJI,20 +idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 +idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 +idna/core.py,sha256=GafiWdYQIK5TSjWdRzCYCho704ALtMCrV_dnXXn57U0,11390 +idna/idnadata.py,sha256=HT83KByXFJjxmeLJ95q15mfFo18srcOTRUSPPk972h8,32979 +idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 +idna/uts46data.py,sha256=r_Ppkke_Er-7ZWfyCJB0nbzYueRhmgZiswkthUC2pQ0,184931 +idna-2.5.dist-info/DESCRIPTION.rst,sha256=Ct2w7AA_3OBOlXOS6H6deReWxIJ5I9qobLJjVaLbNIw,6110 +idna-2.5.dist-info/METADATA,sha256=O4xKBcPWpRbR_oXNjhgi96TGdMdmwAWRLkiU96Sx2Ww,7148 +idna-2.5.dist-info/RECORD,, +idna-2.5.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 +idna-2.5.dist-info/metadata.json,sha256=eBIyY66_C21Gr5-WM5N-Hk_rx7UsfS4HK-_sDCYrjG4,1097 +idna-2.5.dist-info/pbr.json,sha256=GhOFmE955LEY9AT1sraeaHrg0AxaCZEJzUEZ-xlLrsk,46 +idna-2.5.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna-2.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna/__pycache__/codec.cpython-36.pyc,, +idna/__pycache__/compat.cpython-36.pyc,, +idna/__pycache__/core.cpython-36.pyc,, +idna/__pycache__/idnadata.cpython-36.pyc,, +idna/__pycache__/intranges.cpython-36.pyc,, +idna/__pycache__/uts46data.cpython-36.pyc,, +idna/__pycache__/__init__.cpython-36.pyc,, diff --git a/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/WHEEL b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/WHEEL new file mode 100644 index 0000000..0de529b --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/top_level.txt b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/top_level.txt new file mode 100644 index 0000000..c40472e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna-2.5.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/RBXLegacyDiscordBot/lib/idna/__init__.py b/RBXLegacyDiscordBot/lib/idna/__init__.py new file mode 100644 index 0000000..bb67a43 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna/__init__.py @@ -0,0 +1 @@ +from .core import * diff --git a/RBXLegacyDiscordBot/lib/idna/codec.py b/RBXLegacyDiscordBot/lib/idna/codec.py new file mode 100644 index 0000000..98c65ea --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna/codec.py @@ -0,0 +1,118 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re + +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return "", 0 + + return encode(data), len(data) + + def decode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return u"", 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return ("", 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return (u"", 0) + + # IDNA allows decoding to operate on Unicode strings, too. + if isinstance(data, unicode): + labels = _unicode_dots_re.split(data) + else: + # Must be ASCII string + data = str(data) + unicode(data, "ascii") + labels = data.split(".") + + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = u'.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = u'.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result = u".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + +class StreamReader(Codec, codecs.StreamReader): + pass + +def getregentry(): + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/RBXLegacyDiscordBot/lib/idna/compat.py b/RBXLegacyDiscordBot/lib/idna/compat.py new file mode 100644 index 0000000..4d47f33 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna/compat.py @@ -0,0 +1,12 @@ +from .core import * +from .codec import * + +def ToASCII(label): + return encode(label) + +def ToUnicode(label): + return decode(label) + +def nameprep(s): + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") + diff --git a/RBXLegacyDiscordBot/lib/idna/core.py b/RBXLegacyDiscordBot/lib/idna/core.py new file mode 100644 index 0000000..b55b664 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna/core.py @@ -0,0 +1,387 @@ +from . import idnadata +import bisect +import unicodedata +import re +import sys +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +if sys.version_info[0] == 3: + unicode = str + unichr = chr + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp): + return unicodedata.combining(unichr(cp)) + +def _is_script(cp, script): + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s): + return s.encode('punycode') + +def _unot(s): + return 'U+{0:04X}'.format(s) + + +def valid_label_length(label): + + if len(label) > 63: + return False + return True + + +def valid_string_length(label, trailing_dot): + + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label, check_ltr=False): + + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + break + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label): + + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label): + + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label): + + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label, pos): + + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('L'), ord('D')]: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('R'), ord('D')]: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label, pos, exception=False): + + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == u'\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + +def check_label(label): + + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label): + + try: + label = label.encode('ascii') + try: + ulabel(label) + except IDNAError: + raise IDNAError('The label {0} is not a valid A-label'.format(label)) + if not valid_label_length(label): + raise IDNAError('Label too long') + return label + except UnicodeEncodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = unicode(label) + check_label(label) + label = _punycode(label) + label = _alabel_prefix + label + + if not valid_label_length(label): + raise IDNAError('Label too long') + + return label + + +def ulabel(label): + + if not isinstance(label, (bytes, bytearray)): + try: + label = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + + label = label.lower() + if label.startswith(_alabel_prefix): + label = label[len(_alabel_prefix):] + else: + check_label(label) + return label.decode('ascii') + + label = label.decode('punycode') + check_label(label) + return label + + +def uts46_remap(domain, std3_rules=True, transitional=False): + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = u"" + try: + for pos, char in enumerate(domain): + code_point = ord(char) + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement = uts46row[2] if len(uts46row) == 3 else None + if (status == "V" or + (status == "D" and not transitional) or + (status == "3" and std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == "M" or + (status == "3" and std3_rules) or + (status == "D" and transitional)): + output += replacement + elif status != "I": + raise IndexError() + return unicodedata.normalize("NFC", output) + except IndexError: + raise InvalidCodepoint( + "Codepoint {0} not allowed at position {1} in {2}".format( + _unot(code_point), pos + 1, repr(domain))) + + +def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + while labels and not labels[0]: + del labels[0] + if not labels: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + result.append(alabel(label)) + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s, strict=False, uts46=False, std3_rules=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(u'.') + while labels and not labels[0]: + del labels[0] + if not labels: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + result.append(ulabel(label)) + if trailing_dot: + result.append(u'') + return u'.'.join(result) diff --git a/RBXLegacyDiscordBot/lib/idna/idnadata.py b/RBXLegacyDiscordBot/lib/idna/idnadata.py new file mode 100644 index 0000000..2ff30fe --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna/idnadata.py @@ -0,0 +1,1584 @@ +# This file is automatically generated by build-idnadata.py + +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0x101400001018b, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004db6, + 0x4e0000009fcd, + 0xf9000000fa6e, + 0xfa700000fada, + 0x200000002a6d7, + 0x2a7000002b735, + 0x2b7400002b81e, + 0x2f8000002fa1e, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5f0000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b002, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1b0000001b001, + ), +} +joining_types = { + 0x600: 85, + 0x601: 85, + 0x602: 85, + 0x603: 85, + 0x604: 85, + 0x608: 85, + 0x60b: 85, + 0x620: 68, + 0x621: 85, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x66e: 68, + 0x66f: 68, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x674: 85, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6dd: 85, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x710: 82, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7fa: 67, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 68, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 82, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 85, + 0x857: 85, + 0x858: 85, + 0x8a0: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x1806: 85, + 0x1807: 68, + 0x180a: 67, + 0x180e: 85, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1880: 85, + 0x1881: 85, + 0x1882: 85, + 0x1883: 85, + 0x1884: 85, + 0x1885: 85, + 0x1886: 85, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18aa: 68, + 0x200c: 85, + 0x200d: 67, + 0x2066: 85, + 0x2067: 85, + 0x2068: 85, + 0x2069: 85, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa873: 85, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5590000055a, + 0x56100000587, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5f0000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x8000000082e, + 0x8400000085c, + 0x8a0000008a1, + 0x8a2000008ad, + 0x8e4000008ff, + 0x90000000958, + 0x96000000964, + 0x96600000970, + 0x97100000978, + 0x97900000980, + 0x98100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5600000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0100000c04, + 0xc0500000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c34, + 0xc3500000c3a, + 0xc3d00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5a, + 0xc6000000c64, + 0xc6600000c70, + 0xc8200000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcde00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf3, + 0xd0200000d04, + 0xd0500000d0d, + 0xd0e00000d11, + 0xd1200000d3b, + 0xd3d00000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5700000d58, + 0xd6000000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8200000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8700000e89, + 0xe8a00000e8b, + 0xe8d00000e8e, + 0xe9400000e98, + 0xe9900000ea0, + 0xea100000ea4, + 0xea500000ea6, + 0xea700000ea8, + 0xeaa00000eac, + 0xead00000eb3, + 0xeb400000eba, + 0xebb00000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ece, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f5, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x17000000170d, + 0x170e00001715, + 0x172000001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001878, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191d, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1b0000001b4c, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cf7, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001de7, + 0x1dfc00001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c5f, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x31050000312e, + 0x31a0000031bb, + 0x31f000003200, + 0x340000004db6, + 0x4e0000009fcd, + 0xa0000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa69f0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a78f, + 0xa7910000a792, + 0xa7930000a794, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7fa0000a828, + 0xa8400000a874, + 0xa8800000a8c5, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa9000000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aa7c, + 0xaa800000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe27, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x103000001031f, + 0x1033000010341, + 0x103420001034a, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a34, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10c0000010c49, + 0x1100000011047, + 0x1106600011070, + 0x11080000110bb, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x11180000111c5, + 0x111d0000111da, + 0x11680000116b8, + 0x116c0000116ca, + 0x120000001236f, + 0x130000001342f, + 0x1680000016a39, + 0x16f0000016f45, + 0x16f5000016f7f, + 0x16f8f00016fa0, + 0x1b0000001b002, + 0x200000002a6d7, + 0x2a7000002b735, + 0x2b7400002b81e, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/RBXLegacyDiscordBot/lib/idna/intranges.py b/RBXLegacyDiscordBot/lib/idna/intranges.py new file mode 100644 index 0000000..fa8a735 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna/intranges.py @@ -0,0 +1,53 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect + +def intranges_from_list(list_): + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start, end): + return (start << 32) | end + +def _decode_range(r): + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_, ranges): + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/RBXLegacyDiscordBot/lib/idna/uts46data.py b/RBXLegacyDiscordBot/lib/idna/uts46data.py new file mode 100644 index 0000000..48da840 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/idna/uts46data.py @@ -0,0 +1,7633 @@ +# This file is automatically generated by tools/build-uts46data.py +# vim: set fileencoding=utf-8 : + +"""IDNA Mapping Table from UTS46.""" + + +def _seg_0(): + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', u'a'), + (0x42, 'M', u'b'), + (0x43, 'M', u'c'), + (0x44, 'M', u'd'), + (0x45, 'M', u'e'), + (0x46, 'M', u'f'), + (0x47, 'M', u'g'), + (0x48, 'M', u'h'), + (0x49, 'M', u'i'), + (0x4A, 'M', u'j'), + (0x4B, 'M', u'k'), + (0x4C, 'M', u'l'), + (0x4D, 'M', u'm'), + (0x4E, 'M', u'n'), + (0x4F, 'M', u'o'), + (0x50, 'M', u'p'), + (0x51, 'M', u'q'), + (0x52, 'M', u'r'), + (0x53, 'M', u's'), + (0x54, 'M', u't'), + (0x55, 'M', u'u'), + (0x56, 'M', u'v'), + (0x57, 'M', u'w'), + (0x58, 'M', u'x'), + (0x59, 'M', u'y'), + (0x5A, 'M', u'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1(): + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', u' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', u' ̈'), + (0xA9, 'V'), + (0xAA, 'M', u'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', u' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', u'2'), + (0xB3, 'M', u'3'), + (0xB4, '3', u' ́'), + (0xB5, 'M', u'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', u' ̧'), + (0xB9, 'M', u'1'), + (0xBA, 'M', u'o'), + (0xBB, 'V'), + (0xBC, 'M', u'1⁄4'), + (0xBD, 'M', u'1⁄2'), + (0xBE, 'M', u'3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', u'à'), + (0xC1, 'M', u'á'), + (0xC2, 'M', u'â'), + (0xC3, 'M', u'ã'), + (0xC4, 'M', u'ä'), + (0xC5, 'M', u'å'), + (0xC6, 'M', u'æ'), + (0xC7, 'M', u'ç'), + ] + +def _seg_2(): + return [ + (0xC8, 'M', u'è'), + (0xC9, 'M', u'é'), + (0xCA, 'M', u'ê'), + (0xCB, 'M', u'ë'), + (0xCC, 'M', u'ì'), + (0xCD, 'M', u'í'), + (0xCE, 'M', u'î'), + (0xCF, 'M', u'ï'), + (0xD0, 'M', u'ð'), + (0xD1, 'M', u'ñ'), + (0xD2, 'M', u'ò'), + (0xD3, 'M', u'ó'), + (0xD4, 'M', u'ô'), + (0xD5, 'M', u'õ'), + (0xD6, 'M', u'ö'), + (0xD7, 'V'), + (0xD8, 'M', u'ø'), + (0xD9, 'M', u'ù'), + (0xDA, 'M', u'ú'), + (0xDB, 'M', u'û'), + (0xDC, 'M', u'ü'), + (0xDD, 'M', u'ý'), + (0xDE, 'M', u'þ'), + (0xDF, 'D', u'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', u'ā'), + (0x101, 'V'), + (0x102, 'M', u'ă'), + (0x103, 'V'), + (0x104, 'M', u'ą'), + (0x105, 'V'), + (0x106, 'M', u'ć'), + (0x107, 'V'), + (0x108, 'M', u'ĉ'), + (0x109, 'V'), + (0x10A, 'M', u'ċ'), + (0x10B, 'V'), + (0x10C, 'M', u'č'), + (0x10D, 'V'), + (0x10E, 'M', u'ď'), + (0x10F, 'V'), + (0x110, 'M', u'đ'), + (0x111, 'V'), + (0x112, 'M', u'ē'), + (0x113, 'V'), + (0x114, 'M', u'ĕ'), + (0x115, 'V'), + (0x116, 'M', u'ė'), + (0x117, 'V'), + (0x118, 'M', u'ę'), + (0x119, 'V'), + (0x11A, 'M', u'ě'), + (0x11B, 'V'), + (0x11C, 'M', u'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', u'ğ'), + (0x11F, 'V'), + (0x120, 'M', u'ġ'), + (0x121, 'V'), + (0x122, 'M', u'ģ'), + (0x123, 'V'), + (0x124, 'M', u'ĥ'), + (0x125, 'V'), + (0x126, 'M', u'ħ'), + (0x127, 'V'), + (0x128, 'M', u'ĩ'), + (0x129, 'V'), + (0x12A, 'M', u'ī'), + (0x12B, 'V'), + ] + +def _seg_3(): + return [ + (0x12C, 'M', u'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', u'į'), + (0x12F, 'V'), + (0x130, 'M', u'i̇'), + (0x131, 'V'), + (0x132, 'M', u'ij'), + (0x134, 'M', u'ĵ'), + (0x135, 'V'), + (0x136, 'M', u'ķ'), + (0x137, 'V'), + (0x139, 'M', u'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', u'ļ'), + (0x13C, 'V'), + (0x13D, 'M', u'ľ'), + (0x13E, 'V'), + (0x13F, 'M', u'l·'), + (0x141, 'M', u'ł'), + (0x142, 'V'), + (0x143, 'M', u'ń'), + (0x144, 'V'), + (0x145, 'M', u'ņ'), + (0x146, 'V'), + (0x147, 'M', u'ň'), + (0x148, 'V'), + (0x149, 'M', u'ʼn'), + (0x14A, 'M', u'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', u'ō'), + (0x14D, 'V'), + (0x14E, 'M', u'ŏ'), + (0x14F, 'V'), + (0x150, 'M', u'ő'), + (0x151, 'V'), + (0x152, 'M', u'œ'), + (0x153, 'V'), + (0x154, 'M', u'ŕ'), + (0x155, 'V'), + (0x156, 'M', u'ŗ'), + (0x157, 'V'), + (0x158, 'M', u'ř'), + (0x159, 'V'), + (0x15A, 'M', u'ś'), + (0x15B, 'V'), + (0x15C, 'M', u'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', u'ş'), + (0x15F, 'V'), + (0x160, 'M', u'š'), + (0x161, 'V'), + (0x162, 'M', u'ţ'), + (0x163, 'V'), + (0x164, 'M', u'ť'), + (0x165, 'V'), + (0x166, 'M', u'ŧ'), + (0x167, 'V'), + (0x168, 'M', u'ũ'), + (0x169, 'V'), + (0x16A, 'M', u'ū'), + (0x16B, 'V'), + (0x16C, 'M', u'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', u'ů'), + (0x16F, 'V'), + (0x170, 'M', u'ű'), + (0x171, 'V'), + (0x172, 'M', u'ų'), + (0x173, 'V'), + (0x174, 'M', u'ŵ'), + (0x175, 'V'), + (0x176, 'M', u'ŷ'), + (0x177, 'V'), + (0x178, 'M', u'ÿ'), + (0x179, 'M', u'ź'), + (0x17A, 'V'), + (0x17B, 'M', u'ż'), + (0x17C, 'V'), + (0x17D, 'M', u'ž'), + (0x17E, 'V'), + (0x17F, 'M', u's'), + (0x180, 'V'), + (0x181, 'M', u'ɓ'), + (0x182, 'M', u'ƃ'), + (0x183, 'V'), + (0x184, 'M', u'ƅ'), + (0x185, 'V'), + (0x186, 'M', u'ɔ'), + (0x187, 'M', u'ƈ'), + (0x188, 'V'), + (0x189, 'M', u'ɖ'), + (0x18A, 'M', u'ɗ'), + (0x18B, 'M', u'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', u'ǝ'), + (0x18F, 'M', u'ə'), + (0x190, 'M', u'ɛ'), + (0x191, 'M', u'ƒ'), + (0x192, 'V'), + (0x193, 'M', u'ɠ'), + ] + +def _seg_4(): + return [ + (0x194, 'M', u'ɣ'), + (0x195, 'V'), + (0x196, 'M', u'ɩ'), + (0x197, 'M', u'ɨ'), + (0x198, 'M', u'ƙ'), + (0x199, 'V'), + (0x19C, 'M', u'ɯ'), + (0x19D, 'M', u'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', u'ɵ'), + (0x1A0, 'M', u'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', u'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', u'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', u'ʀ'), + (0x1A7, 'M', u'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', u'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', u'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', u'ʈ'), + (0x1AF, 'M', u'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', u'ʊ'), + (0x1B2, 'M', u'ʋ'), + (0x1B3, 'M', u'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', u'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', u'ʒ'), + (0x1B8, 'M', u'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', u'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', u'dž'), + (0x1C7, 'M', u'lj'), + (0x1CA, 'M', u'nj'), + (0x1CD, 'M', u'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', u'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', u'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', u'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', u'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', u'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', u'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', u'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', u'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', u'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', u'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', u'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', u'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', u'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', u'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', u'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', u'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', u'dz'), + (0x1F4, 'M', u'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', u'ƕ'), + (0x1F7, 'M', u'ƿ'), + (0x1F8, 'M', u'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', u'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', u'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', u'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', u'ȁ'), + (0x201, 'V'), + (0x202, 'M', u'ȃ'), + (0x203, 'V'), + (0x204, 'M', u'ȅ'), + (0x205, 'V'), + (0x206, 'M', u'ȇ'), + (0x207, 'V'), + (0x208, 'M', u'ȉ'), + (0x209, 'V'), + (0x20A, 'M', u'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', u'ȍ'), + ] + +def _seg_5(): + return [ + (0x20D, 'V'), + (0x20E, 'M', u'ȏ'), + (0x20F, 'V'), + (0x210, 'M', u'ȑ'), + (0x211, 'V'), + (0x212, 'M', u'ȓ'), + (0x213, 'V'), + (0x214, 'M', u'ȕ'), + (0x215, 'V'), + (0x216, 'M', u'ȗ'), + (0x217, 'V'), + (0x218, 'M', u'ș'), + (0x219, 'V'), + (0x21A, 'M', u'ț'), + (0x21B, 'V'), + (0x21C, 'M', u'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', u'ȟ'), + (0x21F, 'V'), + (0x220, 'M', u'ƞ'), + (0x221, 'V'), + (0x222, 'M', u'ȣ'), + (0x223, 'V'), + (0x224, 'M', u'ȥ'), + (0x225, 'V'), + (0x226, 'M', u'ȧ'), + (0x227, 'V'), + (0x228, 'M', u'ȩ'), + (0x229, 'V'), + (0x22A, 'M', u'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', u'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', u'ȯ'), + (0x22F, 'V'), + (0x230, 'M', u'ȱ'), + (0x231, 'V'), + (0x232, 'M', u'ȳ'), + (0x233, 'V'), + (0x23A, 'M', u'ⱥ'), + (0x23B, 'M', u'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', u'ƚ'), + (0x23E, 'M', u'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', u'ɂ'), + (0x242, 'V'), + (0x243, 'M', u'ƀ'), + (0x244, 'M', u'ʉ'), + (0x245, 'M', u'ʌ'), + (0x246, 'M', u'ɇ'), + (0x247, 'V'), + (0x248, 'M', u'ɉ'), + (0x249, 'V'), + (0x24A, 'M', u'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', u'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', u'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', u'h'), + (0x2B1, 'M', u'ɦ'), + (0x2B2, 'M', u'j'), + (0x2B3, 'M', u'r'), + (0x2B4, 'M', u'ɹ'), + (0x2B5, 'M', u'ɻ'), + (0x2B6, 'M', u'ʁ'), + (0x2B7, 'M', u'w'), + (0x2B8, 'M', u'y'), + (0x2B9, 'V'), + (0x2D8, '3', u' ̆'), + (0x2D9, '3', u' ̇'), + (0x2DA, '3', u' ̊'), + (0x2DB, '3', u' ̨'), + (0x2DC, '3', u' ̃'), + (0x2DD, '3', u' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', u'ɣ'), + (0x2E1, 'M', u'l'), + (0x2E2, 'M', u's'), + (0x2E3, 'M', u'x'), + (0x2E4, 'M', u'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', u'̀'), + (0x341, 'M', u'́'), + (0x342, 'V'), + (0x343, 'M', u'̓'), + (0x344, 'M', u'̈́'), + (0x345, 'M', u'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', u'ͱ'), + (0x371, 'V'), + (0x372, 'M', u'ͳ'), + (0x373, 'V'), + (0x374, 'M', u'ʹ'), + (0x375, 'V'), + (0x376, 'M', u'ͷ'), + (0x377, 'V'), + ] + +def _seg_6(): + return [ + (0x378, 'X'), + (0x37A, '3', u' ι'), + (0x37B, 'V'), + (0x37E, '3', u';'), + (0x37F, 'X'), + (0x384, '3', u' ́'), + (0x385, '3', u' ̈́'), + (0x386, 'M', u'ά'), + (0x387, 'M', u'·'), + (0x388, 'M', u'έ'), + (0x389, 'M', u'ή'), + (0x38A, 'M', u'ί'), + (0x38B, 'X'), + (0x38C, 'M', u'ό'), + (0x38D, 'X'), + (0x38E, 'M', u'ύ'), + (0x38F, 'M', u'ώ'), + (0x390, 'V'), + (0x391, 'M', u'α'), + (0x392, 'M', u'β'), + (0x393, 'M', u'γ'), + (0x394, 'M', u'δ'), + (0x395, 'M', u'ε'), + (0x396, 'M', u'ζ'), + (0x397, 'M', u'η'), + (0x398, 'M', u'θ'), + (0x399, 'M', u'ι'), + (0x39A, 'M', u'κ'), + (0x39B, 'M', u'λ'), + (0x39C, 'M', u'μ'), + (0x39D, 'M', u'ν'), + (0x39E, 'M', u'ξ'), + (0x39F, 'M', u'ο'), + (0x3A0, 'M', u'π'), + (0x3A1, 'M', u'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', u'σ'), + (0x3A4, 'M', u'τ'), + (0x3A5, 'M', u'υ'), + (0x3A6, 'M', u'φ'), + (0x3A7, 'M', u'χ'), + (0x3A8, 'M', u'ψ'), + (0x3A9, 'M', u'ω'), + (0x3AA, 'M', u'ϊ'), + (0x3AB, 'M', u'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', u'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', u'ϗ'), + (0x3D0, 'M', u'β'), + (0x3D1, 'M', u'θ'), + (0x3D2, 'M', u'υ'), + (0x3D3, 'M', u'ύ'), + (0x3D4, 'M', u'ϋ'), + (0x3D5, 'M', u'φ'), + (0x3D6, 'M', u'π'), + (0x3D7, 'V'), + (0x3D8, 'M', u'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', u'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', u'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', u'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', u'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', u'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', u'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', u'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', u'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', u'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', u'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', u'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', u'κ'), + (0x3F1, 'M', u'ρ'), + (0x3F2, 'M', u'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', u'θ'), + (0x3F5, 'M', u'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', u'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', u'σ'), + (0x3FA, 'M', u'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', u'ͻ'), + (0x3FE, 'M', u'ͼ'), + (0x3FF, 'M', u'ͽ'), + (0x400, 'M', u'ѐ'), + (0x401, 'M', u'ё'), + (0x402, 'M', u'ђ'), + (0x403, 'M', u'ѓ'), + ] + +def _seg_7(): + return [ + (0x404, 'M', u'є'), + (0x405, 'M', u'ѕ'), + (0x406, 'M', u'і'), + (0x407, 'M', u'ї'), + (0x408, 'M', u'ј'), + (0x409, 'M', u'љ'), + (0x40A, 'M', u'њ'), + (0x40B, 'M', u'ћ'), + (0x40C, 'M', u'ќ'), + (0x40D, 'M', u'ѝ'), + (0x40E, 'M', u'ў'), + (0x40F, 'M', u'џ'), + (0x410, 'M', u'а'), + (0x411, 'M', u'б'), + (0x412, 'M', u'в'), + (0x413, 'M', u'г'), + (0x414, 'M', u'д'), + (0x415, 'M', u'е'), + (0x416, 'M', u'ж'), + (0x417, 'M', u'з'), + (0x418, 'M', u'и'), + (0x419, 'M', u'й'), + (0x41A, 'M', u'к'), + (0x41B, 'M', u'л'), + (0x41C, 'M', u'м'), + (0x41D, 'M', u'н'), + (0x41E, 'M', u'о'), + (0x41F, 'M', u'п'), + (0x420, 'M', u'р'), + (0x421, 'M', u'с'), + (0x422, 'M', u'т'), + (0x423, 'M', u'у'), + (0x424, 'M', u'ф'), + (0x425, 'M', u'х'), + (0x426, 'M', u'ц'), + (0x427, 'M', u'ч'), + (0x428, 'M', u'ш'), + (0x429, 'M', u'щ'), + (0x42A, 'M', u'ъ'), + (0x42B, 'M', u'ы'), + (0x42C, 'M', u'ь'), + (0x42D, 'M', u'э'), + (0x42E, 'M', u'ю'), + (0x42F, 'M', u'я'), + (0x430, 'V'), + (0x460, 'M', u'ѡ'), + (0x461, 'V'), + (0x462, 'M', u'ѣ'), + (0x463, 'V'), + (0x464, 'M', u'ѥ'), + (0x465, 'V'), + (0x466, 'M', u'ѧ'), + (0x467, 'V'), + (0x468, 'M', u'ѩ'), + (0x469, 'V'), + (0x46A, 'M', u'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', u'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', u'ѯ'), + (0x46F, 'V'), + (0x470, 'M', u'ѱ'), + (0x471, 'V'), + (0x472, 'M', u'ѳ'), + (0x473, 'V'), + (0x474, 'M', u'ѵ'), + (0x475, 'V'), + (0x476, 'M', u'ѷ'), + (0x477, 'V'), + (0x478, 'M', u'ѹ'), + (0x479, 'V'), + (0x47A, 'M', u'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', u'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', u'ѿ'), + (0x47F, 'V'), + (0x480, 'M', u'ҁ'), + (0x481, 'V'), + (0x48A, 'M', u'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', u'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', u'ҏ'), + (0x48F, 'V'), + (0x490, 'M', u'ґ'), + (0x491, 'V'), + (0x492, 'M', u'ғ'), + (0x493, 'V'), + (0x494, 'M', u'ҕ'), + (0x495, 'V'), + (0x496, 'M', u'җ'), + (0x497, 'V'), + (0x498, 'M', u'ҙ'), + (0x499, 'V'), + (0x49A, 'M', u'қ'), + (0x49B, 'V'), + (0x49C, 'M', u'ҝ'), + (0x49D, 'V'), + (0x49E, 'M', u'ҟ'), + ] + +def _seg_8(): + return [ + (0x49F, 'V'), + (0x4A0, 'M', u'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', u'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', u'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', u'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', u'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', u'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', u'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', u'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', u'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', u'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', u'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', u'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', u'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', u'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', u'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', u'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', u'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', u'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', u'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', u'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', u'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', u'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', u'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', u'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', u'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', u'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', u'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', u'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', u'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', u'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', u'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', u'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', u'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', u'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', u'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', u'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', u'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', u'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', u'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', u'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', u'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', u'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', u'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', u'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', u'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', u'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', u'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', u'ԁ'), + (0x501, 'V'), + (0x502, 'M', u'ԃ'), + (0x503, 'V'), + ] + +def _seg_9(): + return [ + (0x504, 'M', u'ԅ'), + (0x505, 'V'), + (0x506, 'M', u'ԇ'), + (0x507, 'V'), + (0x508, 'M', u'ԉ'), + (0x509, 'V'), + (0x50A, 'M', u'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', u'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', u'ԏ'), + (0x50F, 'V'), + (0x510, 'M', u'ԑ'), + (0x511, 'V'), + (0x512, 'M', u'ԓ'), + (0x513, 'V'), + (0x514, 'M', u'ԕ'), + (0x515, 'V'), + (0x516, 'M', u'ԗ'), + (0x517, 'V'), + (0x518, 'M', u'ԙ'), + (0x519, 'V'), + (0x51A, 'M', u'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', u'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', u'ԟ'), + (0x51F, 'V'), + (0x520, 'M', u'ԡ'), + (0x521, 'V'), + (0x522, 'M', u'ԣ'), + (0x523, 'V'), + (0x524, 'M', u'ԥ'), + (0x525, 'V'), + (0x526, 'M', u'ԧ'), + (0x527, 'V'), + (0x528, 'X'), + (0x531, 'M', u'ա'), + (0x532, 'M', u'բ'), + (0x533, 'M', u'գ'), + (0x534, 'M', u'դ'), + (0x535, 'M', u'ե'), + (0x536, 'M', u'զ'), + (0x537, 'M', u'է'), + (0x538, 'M', u'ը'), + (0x539, 'M', u'թ'), + (0x53A, 'M', u'ժ'), + (0x53B, 'M', u'ի'), + (0x53C, 'M', u'լ'), + (0x53D, 'M', u'խ'), + (0x53E, 'M', u'ծ'), + (0x53F, 'M', u'կ'), + (0x540, 'M', u'հ'), + (0x541, 'M', u'ձ'), + (0x542, 'M', u'ղ'), + (0x543, 'M', u'ճ'), + (0x544, 'M', u'մ'), + (0x545, 'M', u'յ'), + (0x546, 'M', u'ն'), + (0x547, 'M', u'շ'), + (0x548, 'M', u'ո'), + (0x549, 'M', u'չ'), + (0x54A, 'M', u'պ'), + (0x54B, 'M', u'ջ'), + (0x54C, 'M', u'ռ'), + (0x54D, 'M', u'ս'), + (0x54E, 'M', u'վ'), + (0x54F, 'M', u'տ'), + (0x550, 'M', u'ր'), + (0x551, 'M', u'ց'), + (0x552, 'M', u'ւ'), + (0x553, 'M', u'փ'), + (0x554, 'M', u'ք'), + (0x555, 'M', u'օ'), + (0x556, 'M', u'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x560, 'X'), + (0x561, 'V'), + (0x587, 'M', u'եւ'), + (0x588, 'X'), + (0x589, 'V'), + (0x58B, 'X'), + (0x58F, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5F0, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61E, 'V'), + (0x675, 'M', u'اٴ'), + (0x676, 'M', u'وٴ'), + (0x677, 'M', u'ۇٴ'), + (0x678, 'M', u'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + ] + +def _seg_10(): + return [ + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x800, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x8A0, 'V'), + (0x8A1, 'X'), + (0x8A2, 'V'), + (0x8AD, 'X'), + (0x8E4, 'V'), + (0x8FF, 'X'), + (0x900, 'V'), + (0x958, 'M', u'क़'), + (0x959, 'M', u'ख़'), + (0x95A, 'M', u'ग़'), + (0x95B, 'M', u'ज़'), + (0x95C, 'M', u'ड़'), + (0x95D, 'M', u'ढ़'), + (0x95E, 'M', u'फ़'), + (0x95F, 'M', u'य़'), + (0x960, 'V'), + (0x978, 'X'), + (0x979, 'V'), + (0x980, 'X'), + (0x981, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', u'ড়'), + (0x9DD, 'M', u'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', u'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FC, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', u'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', u'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', u'ਖ਼'), + (0xA5A, 'M', u'ਗ਼'), + (0xA5B, 'M', u'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + (0xA5E, 'M', u'ਫ਼'), + (0xA5F, 'X'), + ] + +def _seg_11(): + return [ + (0xA66, 'V'), + (0xA76, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB56, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', u'ଡ଼'), + (0xB5D, 'M', u'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC01, 'V'), + (0xC04, 'X'), + (0xC05, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + (0xC34, 'X'), + (0xC35, 'V'), + ] + +def _seg_12(): + return [ + (0xC3A, 'X'), + (0xC3D, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5A, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC78, 'V'), + (0xC80, 'X'), + (0xC82, 'V'), + (0xC84, 'X'), + (0xC85, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDE, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD02, 'V'), + (0xD04, 'X'), + (0xD05, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD3B, 'X'), + (0xD3D, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD4F, 'X'), + (0xD57, 'V'), + (0xD58, 'X'), + (0xD60, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD76, 'X'), + (0xD79, 'V'), + (0xD80, 'X'), + (0xD82, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', u'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE87, 'V'), + ] + +def _seg_13(): + return [ + (0xE89, 'X'), + (0xE8A, 'V'), + (0xE8B, 'X'), + (0xE8D, 'V'), + (0xE8E, 'X'), + (0xE94, 'V'), + (0xE98, 'X'), + (0xE99, 'V'), + (0xEA0, 'X'), + (0xEA1, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEA8, 'X'), + (0xEAA, 'V'), + (0xEAC, 'X'), + (0xEAD, 'V'), + (0xEB3, 'M', u'ໍາ'), + (0xEB4, 'V'), + (0xEBA, 'X'), + (0xEBB, 'V'), + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', u'ຫນ'), + (0xEDD, 'M', u'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', u'་'), + (0xF0D, 'V'), + (0xF43, 'M', u'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', u'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', u'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', u'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', u'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', u'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', u'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', u'ཱུ'), + (0xF76, 'M', u'ྲྀ'), + (0xF77, 'M', u'ྲཱྀ'), + (0xF78, 'M', u'ླྀ'), + (0xF79, 'M', u'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', u'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', u'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', u'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', u'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', u'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', u'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', u'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', u'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', u'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', u'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + ] + +def _seg_14(): + return [ + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F5, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F1, 'X'), + (0x1700, 'V'), + (0x170D, 'X'), + (0x170E, 'V'), + (0x1715, 'X'), + (0x1720, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1878, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191D, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + ] + +def _seg_15(): + return [ + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1B00, 'V'), + (0x1B4C, 'X'), + (0x1B50, 'V'), + (0x1B7D, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'X'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CF7, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', u'a'), + (0x1D2D, 'M', u'æ'), + (0x1D2E, 'M', u'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', u'd'), + (0x1D31, 'M', u'e'), + (0x1D32, 'M', u'ǝ'), + (0x1D33, 'M', u'g'), + (0x1D34, 'M', u'h'), + (0x1D35, 'M', u'i'), + (0x1D36, 'M', u'j'), + (0x1D37, 'M', u'k'), + (0x1D38, 'M', u'l'), + (0x1D39, 'M', u'm'), + (0x1D3A, 'M', u'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', u'o'), + (0x1D3D, 'M', u'ȣ'), + (0x1D3E, 'M', u'p'), + (0x1D3F, 'M', u'r'), + (0x1D40, 'M', u't'), + (0x1D41, 'M', u'u'), + (0x1D42, 'M', u'w'), + (0x1D43, 'M', u'a'), + (0x1D44, 'M', u'ɐ'), + (0x1D45, 'M', u'ɑ'), + (0x1D46, 'M', u'ᴂ'), + (0x1D47, 'M', u'b'), + (0x1D48, 'M', u'd'), + (0x1D49, 'M', u'e'), + (0x1D4A, 'M', u'ə'), + (0x1D4B, 'M', u'ɛ'), + (0x1D4C, 'M', u'ɜ'), + (0x1D4D, 'M', u'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', u'k'), + (0x1D50, 'M', u'm'), + (0x1D51, 'M', u'ŋ'), + (0x1D52, 'M', u'o'), + (0x1D53, 'M', u'ɔ'), + (0x1D54, 'M', u'ᴖ'), + (0x1D55, 'M', u'ᴗ'), + (0x1D56, 'M', u'p'), + (0x1D57, 'M', u't'), + (0x1D58, 'M', u'u'), + (0x1D59, 'M', u'ᴝ'), + (0x1D5A, 'M', u'ɯ'), + (0x1D5B, 'M', u'v'), + (0x1D5C, 'M', u'ᴥ'), + (0x1D5D, 'M', u'β'), + (0x1D5E, 'M', u'γ'), + (0x1D5F, 'M', u'δ'), + (0x1D60, 'M', u'φ'), + (0x1D61, 'M', u'χ'), + (0x1D62, 'M', u'i'), + (0x1D63, 'M', u'r'), + (0x1D64, 'M', u'u'), + (0x1D65, 'M', u'v'), + (0x1D66, 'M', u'β'), + (0x1D67, 'M', u'γ'), + (0x1D68, 'M', u'ρ'), + (0x1D69, 'M', u'φ'), + (0x1D6A, 'M', u'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', u'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', u'ɒ'), + (0x1D9C, 'M', u'c'), + (0x1D9D, 'M', u'ɕ'), + (0x1D9E, 'M', u'ð'), + (0x1D9F, 'M', u'ɜ'), + (0x1DA0, 'M', u'f'), + (0x1DA1, 'M', u'ɟ'), + (0x1DA2, 'M', u'ɡ'), + (0x1DA3, 'M', u'ɥ'), + (0x1DA4, 'M', u'ɨ'), + (0x1DA5, 'M', u'ɩ'), + (0x1DA6, 'M', u'ɪ'), + (0x1DA7, 'M', u'ᵻ'), + (0x1DA8, 'M', u'ʝ'), + (0x1DA9, 'M', u'ɭ'), + ] + +def _seg_16(): + return [ + (0x1DAA, 'M', u'ᶅ'), + (0x1DAB, 'M', u'ʟ'), + (0x1DAC, 'M', u'ɱ'), + (0x1DAD, 'M', u'ɰ'), + (0x1DAE, 'M', u'ɲ'), + (0x1DAF, 'M', u'ɳ'), + (0x1DB0, 'M', u'ɴ'), + (0x1DB1, 'M', u'ɵ'), + (0x1DB2, 'M', u'ɸ'), + (0x1DB3, 'M', u'ʂ'), + (0x1DB4, 'M', u'ʃ'), + (0x1DB5, 'M', u'ƫ'), + (0x1DB6, 'M', u'ʉ'), + (0x1DB7, 'M', u'ʊ'), + (0x1DB8, 'M', u'ᴜ'), + (0x1DB9, 'M', u'ʋ'), + (0x1DBA, 'M', u'ʌ'), + (0x1DBB, 'M', u'z'), + (0x1DBC, 'M', u'ʐ'), + (0x1DBD, 'M', u'ʑ'), + (0x1DBE, 'M', u'ʒ'), + (0x1DBF, 'M', u'θ'), + (0x1DC0, 'V'), + (0x1DE7, 'X'), + (0x1DFC, 'V'), + (0x1E00, 'M', u'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', u'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', u'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', u'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', u'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', u'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', u'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', u'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', u'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', u'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', u'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', u'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', u'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', u'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', u'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', u'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', u'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', u'ḣ'), + (0x1E23, 'V'), + (0x1E24, 'M', u'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', u'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', u'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', u'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', u'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', u'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', u'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', u'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', u'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', u'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', u'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', u'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', u'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', u'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', u'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', u'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', u'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', u'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', u'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', u'ṋ'), + ] + +def _seg_17(): + return [ + (0x1E4B, 'V'), + (0x1E4C, 'M', u'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', u'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', u'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', u'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', u'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', u'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', u'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', u'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', u'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', u'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', u'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', u'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', u'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', u'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', u'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', u'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', u'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', u'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', u'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', u'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', u'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', u'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', u'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', u'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', u'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', u'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', u'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', u'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', u'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', u'ẇ'), + (0x1E87, 'V'), + (0x1E88, 'M', u'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', u'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', u'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', u'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', u'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', u'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', u'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', u'aʾ'), + (0x1E9B, 'M', u'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', u'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', u'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', u'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', u'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', u'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', u'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', u'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', u'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', u'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', u'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', u'ẳ'), + (0x1EB3, 'V'), + ] + +def _seg_18(): + return [ + (0x1EB4, 'M', u'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', u'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', u'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', u'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', u'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', u'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', u'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', u'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', u'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', u'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', u'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', u'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', u'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', u'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', u'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', u'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', u'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', u'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', u'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', u'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', u'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', u'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', u'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', u'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', u'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', u'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', u'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', u'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', u'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', u'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', u'ự'), + (0x1EF1, 'V'), + (0x1EF2, 'M', u'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', u'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', u'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', u'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', u'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', u'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', u'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', u'ἀ'), + (0x1F09, 'M', u'ἁ'), + (0x1F0A, 'M', u'ἂ'), + (0x1F0B, 'M', u'ἃ'), + (0x1F0C, 'M', u'ἄ'), + (0x1F0D, 'M', u'ἅ'), + (0x1F0E, 'M', u'ἆ'), + (0x1F0F, 'M', u'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', u'ἐ'), + (0x1F19, 'M', u'ἑ'), + (0x1F1A, 'M', u'ἒ'), + (0x1F1B, 'M', u'ἓ'), + (0x1F1C, 'M', u'ἔ'), + (0x1F1D, 'M', u'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', u'ἠ'), + (0x1F29, 'M', u'ἡ'), + (0x1F2A, 'M', u'ἢ'), + (0x1F2B, 'M', u'ἣ'), + (0x1F2C, 'M', u'ἤ'), + (0x1F2D, 'M', u'ἥ'), + ] + +def _seg_19(): + return [ + (0x1F2E, 'M', u'ἦ'), + (0x1F2F, 'M', u'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', u'ἰ'), + (0x1F39, 'M', u'ἱ'), + (0x1F3A, 'M', u'ἲ'), + (0x1F3B, 'M', u'ἳ'), + (0x1F3C, 'M', u'ἴ'), + (0x1F3D, 'M', u'ἵ'), + (0x1F3E, 'M', u'ἶ'), + (0x1F3F, 'M', u'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', u'ὀ'), + (0x1F49, 'M', u'ὁ'), + (0x1F4A, 'M', u'ὂ'), + (0x1F4B, 'M', u'ὃ'), + (0x1F4C, 'M', u'ὄ'), + (0x1F4D, 'M', u'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', u'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', u'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', u'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', u'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', u'ὠ'), + (0x1F69, 'M', u'ὡ'), + (0x1F6A, 'M', u'ὢ'), + (0x1F6B, 'M', u'ὣ'), + (0x1F6C, 'M', u'ὤ'), + (0x1F6D, 'M', u'ὥ'), + (0x1F6E, 'M', u'ὦ'), + (0x1F6F, 'M', u'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', u'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', u'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', u'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', u'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', u'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', u'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', u'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', u'ἀι'), + (0x1F81, 'M', u'ἁι'), + (0x1F82, 'M', u'ἂι'), + (0x1F83, 'M', u'ἃι'), + (0x1F84, 'M', u'ἄι'), + (0x1F85, 'M', u'ἅι'), + (0x1F86, 'M', u'ἆι'), + (0x1F87, 'M', u'ἇι'), + (0x1F88, 'M', u'ἀι'), + (0x1F89, 'M', u'ἁι'), + (0x1F8A, 'M', u'ἂι'), + (0x1F8B, 'M', u'ἃι'), + (0x1F8C, 'M', u'ἄι'), + (0x1F8D, 'M', u'ἅι'), + (0x1F8E, 'M', u'ἆι'), + (0x1F8F, 'M', u'ἇι'), + (0x1F90, 'M', u'ἠι'), + (0x1F91, 'M', u'ἡι'), + (0x1F92, 'M', u'ἢι'), + (0x1F93, 'M', u'ἣι'), + (0x1F94, 'M', u'ἤι'), + (0x1F95, 'M', u'ἥι'), + (0x1F96, 'M', u'ἦι'), + (0x1F97, 'M', u'ἧι'), + (0x1F98, 'M', u'ἠι'), + (0x1F99, 'M', u'ἡι'), + (0x1F9A, 'M', u'ἢι'), + (0x1F9B, 'M', u'ἣι'), + (0x1F9C, 'M', u'ἤι'), + (0x1F9D, 'M', u'ἥι'), + (0x1F9E, 'M', u'ἦι'), + (0x1F9F, 'M', u'ἧι'), + (0x1FA0, 'M', u'ὠι'), + (0x1FA1, 'M', u'ὡι'), + (0x1FA2, 'M', u'ὢι'), + (0x1FA3, 'M', u'ὣι'), + (0x1FA4, 'M', u'ὤι'), + (0x1FA5, 'M', u'ὥι'), + (0x1FA6, 'M', u'ὦι'), + (0x1FA7, 'M', u'ὧι'), + (0x1FA8, 'M', u'ὠι'), + (0x1FA9, 'M', u'ὡι'), + (0x1FAA, 'M', u'ὢι'), + (0x1FAB, 'M', u'ὣι'), + (0x1FAC, 'M', u'ὤι'), + (0x1FAD, 'M', u'ὥι'), + (0x1FAE, 'M', u'ὦι'), + ] + +def _seg_20(): + return [ + (0x1FAF, 'M', u'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', u'ὰι'), + (0x1FB3, 'M', u'αι'), + (0x1FB4, 'M', u'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', u'ᾶι'), + (0x1FB8, 'M', u'ᾰ'), + (0x1FB9, 'M', u'ᾱ'), + (0x1FBA, 'M', u'ὰ'), + (0x1FBB, 'M', u'ά'), + (0x1FBC, 'M', u'αι'), + (0x1FBD, '3', u' ̓'), + (0x1FBE, 'M', u'ι'), + (0x1FBF, '3', u' ̓'), + (0x1FC0, '3', u' ͂'), + (0x1FC1, '3', u' ̈͂'), + (0x1FC2, 'M', u'ὴι'), + (0x1FC3, 'M', u'ηι'), + (0x1FC4, 'M', u'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', u'ῆι'), + (0x1FC8, 'M', u'ὲ'), + (0x1FC9, 'M', u'έ'), + (0x1FCA, 'M', u'ὴ'), + (0x1FCB, 'M', u'ή'), + (0x1FCC, 'M', u'ηι'), + (0x1FCD, '3', u' ̓̀'), + (0x1FCE, '3', u' ̓́'), + (0x1FCF, '3', u' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', u'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', u'ῐ'), + (0x1FD9, 'M', u'ῑ'), + (0x1FDA, 'M', u'ὶ'), + (0x1FDB, 'M', u'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', u' ̔̀'), + (0x1FDE, '3', u' ̔́'), + (0x1FDF, '3', u' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', u'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', u'ῠ'), + (0x1FE9, 'M', u'ῡ'), + (0x1FEA, 'M', u'ὺ'), + (0x1FEB, 'M', u'ύ'), + (0x1FEC, 'M', u'ῥ'), + (0x1FED, '3', u' ̈̀'), + (0x1FEE, '3', u' ̈́'), + (0x1FEF, '3', u'`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', u'ὼι'), + (0x1FF3, 'M', u'ωι'), + (0x1FF4, 'M', u'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + (0x1FF7, 'M', u'ῶι'), + (0x1FF8, 'M', u'ὸ'), + (0x1FF9, 'M', u'ό'), + (0x1FFA, 'M', u'ὼ'), + (0x1FFB, 'M', u'ώ'), + (0x1FFC, 'M', u'ωι'), + (0x1FFD, '3', u' ́'), + (0x1FFE, '3', u' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', u' '), + (0x200B, 'I'), + (0x200C, 'D', u''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', u'‐'), + (0x2012, 'V'), + (0x2017, '3', u' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', u' '), + (0x2030, 'V'), + (0x2033, 'M', u'′′'), + (0x2034, 'M', u'′′′'), + (0x2035, 'V'), + (0x2036, 'M', u'‵‵'), + (0x2037, 'M', u'‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', u'!!'), + (0x203D, 'V'), + (0x203E, '3', u' ̅'), + (0x203F, 'V'), + (0x2047, '3', u'??'), + (0x2048, '3', u'?!'), + (0x2049, '3', u'!?'), + (0x204A, 'V'), + (0x2057, 'M', u'′′′′'), + (0x2058, 'V'), + ] + +def _seg_21(): + return [ + (0x205F, '3', u' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', u'0'), + (0x2071, 'M', u'i'), + (0x2072, 'X'), + (0x2074, 'M', u'4'), + (0x2075, 'M', u'5'), + (0x2076, 'M', u'6'), + (0x2077, 'M', u'7'), + (0x2078, 'M', u'8'), + (0x2079, 'M', u'9'), + (0x207A, '3', u'+'), + (0x207B, 'M', u'−'), + (0x207C, '3', u'='), + (0x207D, '3', u'('), + (0x207E, '3', u')'), + (0x207F, 'M', u'n'), + (0x2080, 'M', u'0'), + (0x2081, 'M', u'1'), + (0x2082, 'M', u'2'), + (0x2083, 'M', u'3'), + (0x2084, 'M', u'4'), + (0x2085, 'M', u'5'), + (0x2086, 'M', u'6'), + (0x2087, 'M', u'7'), + (0x2088, 'M', u'8'), + (0x2089, 'M', u'9'), + (0x208A, '3', u'+'), + (0x208B, 'M', u'−'), + (0x208C, '3', u'='), + (0x208D, '3', u'('), + (0x208E, '3', u')'), + (0x208F, 'X'), + (0x2090, 'M', u'a'), + (0x2091, 'M', u'e'), + (0x2092, 'M', u'o'), + (0x2093, 'M', u'x'), + (0x2094, 'M', u'ə'), + (0x2095, 'M', u'h'), + (0x2096, 'M', u'k'), + (0x2097, 'M', u'l'), + (0x2098, 'M', u'm'), + (0x2099, 'M', u'n'), + (0x209A, 'M', u'p'), + (0x209B, 'M', u's'), + (0x209C, 'M', u't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', u'rs'), + (0x20A9, 'V'), + (0x20BB, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', u'a/c'), + (0x2101, '3', u'a/s'), + (0x2102, 'M', u'c'), + (0x2103, 'M', u'°c'), + (0x2104, 'V'), + (0x2105, '3', u'c/o'), + (0x2106, '3', u'c/u'), + (0x2107, 'M', u'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', u'°f'), + (0x210A, 'M', u'g'), + (0x210B, 'M', u'h'), + (0x210F, 'M', u'ħ'), + (0x2110, 'M', u'i'), + (0x2112, 'M', u'l'), + (0x2114, 'V'), + (0x2115, 'M', u'n'), + (0x2116, 'M', u'no'), + (0x2117, 'V'), + (0x2119, 'M', u'p'), + (0x211A, 'M', u'q'), + (0x211B, 'M', u'r'), + (0x211E, 'V'), + (0x2120, 'M', u'sm'), + (0x2121, 'M', u'tel'), + (0x2122, 'M', u'tm'), + (0x2123, 'V'), + (0x2124, 'M', u'z'), + (0x2125, 'V'), + (0x2126, 'M', u'ω'), + (0x2127, 'V'), + (0x2128, 'M', u'z'), + (0x2129, 'V'), + (0x212A, 'M', u'k'), + (0x212B, 'M', u'å'), + (0x212C, 'M', u'b'), + (0x212D, 'M', u'c'), + (0x212E, 'V'), + (0x212F, 'M', u'e'), + (0x2131, 'M', u'f'), + (0x2132, 'X'), + (0x2133, 'M', u'm'), + (0x2134, 'M', u'o'), + (0x2135, 'M', u'א'), + ] + +def _seg_22(): + return [ + (0x2136, 'M', u'ב'), + (0x2137, 'M', u'ג'), + (0x2138, 'M', u'ד'), + (0x2139, 'M', u'i'), + (0x213A, 'V'), + (0x213B, 'M', u'fax'), + (0x213C, 'M', u'π'), + (0x213D, 'M', u'γ'), + (0x213F, 'M', u'π'), + (0x2140, 'M', u'∑'), + (0x2141, 'V'), + (0x2145, 'M', u'd'), + (0x2147, 'M', u'e'), + (0x2148, 'M', u'i'), + (0x2149, 'M', u'j'), + (0x214A, 'V'), + (0x2150, 'M', u'1⁄7'), + (0x2151, 'M', u'1⁄9'), + (0x2152, 'M', u'1⁄10'), + (0x2153, 'M', u'1⁄3'), + (0x2154, 'M', u'2⁄3'), + (0x2155, 'M', u'1⁄5'), + (0x2156, 'M', u'2⁄5'), + (0x2157, 'M', u'3⁄5'), + (0x2158, 'M', u'4⁄5'), + (0x2159, 'M', u'1⁄6'), + (0x215A, 'M', u'5⁄6'), + (0x215B, 'M', u'1⁄8'), + (0x215C, 'M', u'3⁄8'), + (0x215D, 'M', u'5⁄8'), + (0x215E, 'M', u'7⁄8'), + (0x215F, 'M', u'1⁄'), + (0x2160, 'M', u'i'), + (0x2161, 'M', u'ii'), + (0x2162, 'M', u'iii'), + (0x2163, 'M', u'iv'), + (0x2164, 'M', u'v'), + (0x2165, 'M', u'vi'), + (0x2166, 'M', u'vii'), + (0x2167, 'M', u'viii'), + (0x2168, 'M', u'ix'), + (0x2169, 'M', u'x'), + (0x216A, 'M', u'xi'), + (0x216B, 'M', u'xii'), + (0x216C, 'M', u'l'), + (0x216D, 'M', u'c'), + (0x216E, 'M', u'd'), + (0x216F, 'M', u'm'), + (0x2170, 'M', u'i'), + (0x2171, 'M', u'ii'), + (0x2172, 'M', u'iii'), + (0x2173, 'M', u'iv'), + (0x2174, 'M', u'v'), + (0x2175, 'M', u'vi'), + (0x2176, 'M', u'vii'), + (0x2177, 'M', u'viii'), + (0x2178, 'M', u'ix'), + (0x2179, 'M', u'x'), + (0x217A, 'M', u'xi'), + (0x217B, 'M', u'xii'), + (0x217C, 'M', u'l'), + (0x217D, 'M', u'c'), + (0x217E, 'M', u'd'), + (0x217F, 'M', u'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', u'0⁄3'), + (0x218A, 'X'), + (0x2190, 'V'), + (0x222C, 'M', u'∫∫'), + (0x222D, 'M', u'∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', u'∮∮'), + (0x2230, 'M', u'∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', u'〈'), + (0x232A, 'M', u'〉'), + (0x232B, 'V'), + (0x23F4, 'X'), + (0x2400, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', u'1'), + (0x2461, 'M', u'2'), + (0x2462, 'M', u'3'), + (0x2463, 'M', u'4'), + (0x2464, 'M', u'5'), + (0x2465, 'M', u'6'), + (0x2466, 'M', u'7'), + (0x2467, 'M', u'8'), + (0x2468, 'M', u'9'), + (0x2469, 'M', u'10'), + (0x246A, 'M', u'11'), + (0x246B, 'M', u'12'), + ] + +def _seg_23(): + return [ + (0x246C, 'M', u'13'), + (0x246D, 'M', u'14'), + (0x246E, 'M', u'15'), + (0x246F, 'M', u'16'), + (0x2470, 'M', u'17'), + (0x2471, 'M', u'18'), + (0x2472, 'M', u'19'), + (0x2473, 'M', u'20'), + (0x2474, '3', u'(1)'), + (0x2475, '3', u'(2)'), + (0x2476, '3', u'(3)'), + (0x2477, '3', u'(4)'), + (0x2478, '3', u'(5)'), + (0x2479, '3', u'(6)'), + (0x247A, '3', u'(7)'), + (0x247B, '3', u'(8)'), + (0x247C, '3', u'(9)'), + (0x247D, '3', u'(10)'), + (0x247E, '3', u'(11)'), + (0x247F, '3', u'(12)'), + (0x2480, '3', u'(13)'), + (0x2481, '3', u'(14)'), + (0x2482, '3', u'(15)'), + (0x2483, '3', u'(16)'), + (0x2484, '3', u'(17)'), + (0x2485, '3', u'(18)'), + (0x2486, '3', u'(19)'), + (0x2487, '3', u'(20)'), + (0x2488, 'X'), + (0x249C, '3', u'(a)'), + (0x249D, '3', u'(b)'), + (0x249E, '3', u'(c)'), + (0x249F, '3', u'(d)'), + (0x24A0, '3', u'(e)'), + (0x24A1, '3', u'(f)'), + (0x24A2, '3', u'(g)'), + (0x24A3, '3', u'(h)'), + (0x24A4, '3', u'(i)'), + (0x24A5, '3', u'(j)'), + (0x24A6, '3', u'(k)'), + (0x24A7, '3', u'(l)'), + (0x24A8, '3', u'(m)'), + (0x24A9, '3', u'(n)'), + (0x24AA, '3', u'(o)'), + (0x24AB, '3', u'(p)'), + (0x24AC, '3', u'(q)'), + (0x24AD, '3', u'(r)'), + (0x24AE, '3', u'(s)'), + (0x24AF, '3', u'(t)'), + (0x24B0, '3', u'(u)'), + (0x24B1, '3', u'(v)'), + (0x24B2, '3', u'(w)'), + (0x24B3, '3', u'(x)'), + (0x24B4, '3', u'(y)'), + (0x24B5, '3', u'(z)'), + (0x24B6, 'M', u'a'), + (0x24B7, 'M', u'b'), + (0x24B8, 'M', u'c'), + (0x24B9, 'M', u'd'), + (0x24BA, 'M', u'e'), + (0x24BB, 'M', u'f'), + (0x24BC, 'M', u'g'), + (0x24BD, 'M', u'h'), + (0x24BE, 'M', u'i'), + (0x24BF, 'M', u'j'), + (0x24C0, 'M', u'k'), + (0x24C1, 'M', u'l'), + (0x24C2, 'M', u'm'), + (0x24C3, 'M', u'n'), + (0x24C4, 'M', u'o'), + (0x24C5, 'M', u'p'), + (0x24C6, 'M', u'q'), + (0x24C7, 'M', u'r'), + (0x24C8, 'M', u's'), + (0x24C9, 'M', u't'), + (0x24CA, 'M', u'u'), + (0x24CB, 'M', u'v'), + (0x24CC, 'M', u'w'), + (0x24CD, 'M', u'x'), + (0x24CE, 'M', u'y'), + (0x24CF, 'M', u'z'), + (0x24D0, 'M', u'a'), + (0x24D1, 'M', u'b'), + (0x24D2, 'M', u'c'), + (0x24D3, 'M', u'd'), + (0x24D4, 'M', u'e'), + (0x24D5, 'M', u'f'), + (0x24D6, 'M', u'g'), + (0x24D7, 'M', u'h'), + (0x24D8, 'M', u'i'), + (0x24D9, 'M', u'j'), + (0x24DA, 'M', u'k'), + (0x24DB, 'M', u'l'), + (0x24DC, 'M', u'm'), + (0x24DD, 'M', u'n'), + (0x24DE, 'M', u'o'), + (0x24DF, 'M', u'p'), + (0x24E0, 'M', u'q'), + (0x24E1, 'M', u'r'), + (0x24E2, 'M', u's'), + ] + +def _seg_24(): + return [ + (0x24E3, 'M', u't'), + (0x24E4, 'M', u'u'), + (0x24E5, 'M', u'v'), + (0x24E6, 'M', u'w'), + (0x24E7, 'M', u'x'), + (0x24E8, 'M', u'y'), + (0x24E9, 'M', u'z'), + (0x24EA, 'M', u'0'), + (0x24EB, 'V'), + (0x2700, 'X'), + (0x2701, 'V'), + (0x2A0C, 'M', u'∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', u'::='), + (0x2A75, '3', u'=='), + (0x2A76, '3', u'==='), + (0x2A77, 'V'), + (0x2ADC, 'M', u'⫝̸'), + (0x2ADD, 'V'), + (0x2B4D, 'X'), + (0x2B50, 'V'), + (0x2B5A, 'X'), + (0x2C00, 'M', u'ⰰ'), + (0x2C01, 'M', u'ⰱ'), + (0x2C02, 'M', u'ⰲ'), + (0x2C03, 'M', u'ⰳ'), + (0x2C04, 'M', u'ⰴ'), + (0x2C05, 'M', u'ⰵ'), + (0x2C06, 'M', u'ⰶ'), + (0x2C07, 'M', u'ⰷ'), + (0x2C08, 'M', u'ⰸ'), + (0x2C09, 'M', u'ⰹ'), + (0x2C0A, 'M', u'ⰺ'), + (0x2C0B, 'M', u'ⰻ'), + (0x2C0C, 'M', u'ⰼ'), + (0x2C0D, 'M', u'ⰽ'), + (0x2C0E, 'M', u'ⰾ'), + (0x2C0F, 'M', u'ⰿ'), + (0x2C10, 'M', u'ⱀ'), + (0x2C11, 'M', u'ⱁ'), + (0x2C12, 'M', u'ⱂ'), + (0x2C13, 'M', u'ⱃ'), + (0x2C14, 'M', u'ⱄ'), + (0x2C15, 'M', u'ⱅ'), + (0x2C16, 'M', u'ⱆ'), + (0x2C17, 'M', u'ⱇ'), + (0x2C18, 'M', u'ⱈ'), + (0x2C19, 'M', u'ⱉ'), + (0x2C1A, 'M', u'ⱊ'), + (0x2C1B, 'M', u'ⱋ'), + (0x2C1C, 'M', u'ⱌ'), + (0x2C1D, 'M', u'ⱍ'), + (0x2C1E, 'M', u'ⱎ'), + (0x2C1F, 'M', u'ⱏ'), + (0x2C20, 'M', u'ⱐ'), + (0x2C21, 'M', u'ⱑ'), + (0x2C22, 'M', u'ⱒ'), + (0x2C23, 'M', u'ⱓ'), + (0x2C24, 'M', u'ⱔ'), + (0x2C25, 'M', u'ⱕ'), + (0x2C26, 'M', u'ⱖ'), + (0x2C27, 'M', u'ⱗ'), + (0x2C28, 'M', u'ⱘ'), + (0x2C29, 'M', u'ⱙ'), + (0x2C2A, 'M', u'ⱚ'), + (0x2C2B, 'M', u'ⱛ'), + (0x2C2C, 'M', u'ⱜ'), + (0x2C2D, 'M', u'ⱝ'), + (0x2C2E, 'M', u'ⱞ'), + (0x2C2F, 'X'), + (0x2C30, 'V'), + (0x2C5F, 'X'), + (0x2C60, 'M', u'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', u'ɫ'), + (0x2C63, 'M', u'ᵽ'), + (0x2C64, 'M', u'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', u'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', u'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', u'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', u'ɑ'), + (0x2C6E, 'M', u'ɱ'), + (0x2C6F, 'M', u'ɐ'), + (0x2C70, 'M', u'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', u'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', u'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', u'j'), + (0x2C7D, 'M', u'v'), + (0x2C7E, 'M', u'ȿ'), + (0x2C7F, 'M', u'ɀ'), + (0x2C80, 'M', u'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', u'ⲃ'), + ] + +def _seg_25(): + return [ + (0x2C83, 'V'), + (0x2C84, 'M', u'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', u'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', u'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', u'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', u'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', u'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', u'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', u'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', u'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', u'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', u'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', u'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', u'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', u'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', u'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', u'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', u'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', u'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', u'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', u'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', u'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', u'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', u'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', u'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', u'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', u'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', u'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', u'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', u'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', u'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', u'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', u'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', u'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', u'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', u'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', u'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', u'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', u'ⳏ'), + (0x2CCF, 'V'), + (0x2CD0, 'M', u'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', u'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', u'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', u'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', u'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', u'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', u'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', u'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', u'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', u'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', u'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', u'ⳮ'), + ] + +def _seg_26(): + return [ + (0x2CEE, 'V'), + (0x2CF2, 'M', u'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', u'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E3C, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', u'母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', u'龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', u'一'), + (0x2F01, 'M', u'丨'), + (0x2F02, 'M', u'丶'), + (0x2F03, 'M', u'丿'), + (0x2F04, 'M', u'乙'), + (0x2F05, 'M', u'亅'), + (0x2F06, 'M', u'二'), + (0x2F07, 'M', u'亠'), + (0x2F08, 'M', u'人'), + (0x2F09, 'M', u'儿'), + (0x2F0A, 'M', u'入'), + (0x2F0B, 'M', u'八'), + (0x2F0C, 'M', u'冂'), + (0x2F0D, 'M', u'冖'), + (0x2F0E, 'M', u'冫'), + (0x2F0F, 'M', u'几'), + (0x2F10, 'M', u'凵'), + (0x2F11, 'M', u'刀'), + (0x2F12, 'M', u'力'), + (0x2F13, 'M', u'勹'), + (0x2F14, 'M', u'匕'), + (0x2F15, 'M', u'匚'), + (0x2F16, 'M', u'匸'), + (0x2F17, 'M', u'十'), + (0x2F18, 'M', u'卜'), + (0x2F19, 'M', u'卩'), + (0x2F1A, 'M', u'厂'), + (0x2F1B, 'M', u'厶'), + (0x2F1C, 'M', u'又'), + (0x2F1D, 'M', u'口'), + (0x2F1E, 'M', u'囗'), + (0x2F1F, 'M', u'土'), + (0x2F20, 'M', u'士'), + (0x2F21, 'M', u'夂'), + (0x2F22, 'M', u'夊'), + (0x2F23, 'M', u'夕'), + (0x2F24, 'M', u'大'), + (0x2F25, 'M', u'女'), + (0x2F26, 'M', u'子'), + (0x2F27, 'M', u'宀'), + (0x2F28, 'M', u'寸'), + (0x2F29, 'M', u'小'), + (0x2F2A, 'M', u'尢'), + (0x2F2B, 'M', u'尸'), + (0x2F2C, 'M', u'屮'), + (0x2F2D, 'M', u'山'), + (0x2F2E, 'M', u'巛'), + (0x2F2F, 'M', u'工'), + (0x2F30, 'M', u'己'), + (0x2F31, 'M', u'巾'), + (0x2F32, 'M', u'干'), + (0x2F33, 'M', u'幺'), + (0x2F34, 'M', u'广'), + (0x2F35, 'M', u'廴'), + (0x2F36, 'M', u'廾'), + (0x2F37, 'M', u'弋'), + (0x2F38, 'M', u'弓'), + (0x2F39, 'M', u'彐'), + ] + +def _seg_27(): + return [ + (0x2F3A, 'M', u'彡'), + (0x2F3B, 'M', u'彳'), + (0x2F3C, 'M', u'心'), + (0x2F3D, 'M', u'戈'), + (0x2F3E, 'M', u'戶'), + (0x2F3F, 'M', u'手'), + (0x2F40, 'M', u'支'), + (0x2F41, 'M', u'攴'), + (0x2F42, 'M', u'文'), + (0x2F43, 'M', u'斗'), + (0x2F44, 'M', u'斤'), + (0x2F45, 'M', u'方'), + (0x2F46, 'M', u'无'), + (0x2F47, 'M', u'日'), + (0x2F48, 'M', u'曰'), + (0x2F49, 'M', u'月'), + (0x2F4A, 'M', u'木'), + (0x2F4B, 'M', u'欠'), + (0x2F4C, 'M', u'止'), + (0x2F4D, 'M', u'歹'), + (0x2F4E, 'M', u'殳'), + (0x2F4F, 'M', u'毋'), + (0x2F50, 'M', u'比'), + (0x2F51, 'M', u'毛'), + (0x2F52, 'M', u'氏'), + (0x2F53, 'M', u'气'), + (0x2F54, 'M', u'水'), + (0x2F55, 'M', u'火'), + (0x2F56, 'M', u'爪'), + (0x2F57, 'M', u'父'), + (0x2F58, 'M', u'爻'), + (0x2F59, 'M', u'爿'), + (0x2F5A, 'M', u'片'), + (0x2F5B, 'M', u'牙'), + (0x2F5C, 'M', u'牛'), + (0x2F5D, 'M', u'犬'), + (0x2F5E, 'M', u'玄'), + (0x2F5F, 'M', u'玉'), + (0x2F60, 'M', u'瓜'), + (0x2F61, 'M', u'瓦'), + (0x2F62, 'M', u'甘'), + (0x2F63, 'M', u'生'), + (0x2F64, 'M', u'用'), + (0x2F65, 'M', u'田'), + (0x2F66, 'M', u'疋'), + (0x2F67, 'M', u'疒'), + (0x2F68, 'M', u'癶'), + (0x2F69, 'M', u'白'), + (0x2F6A, 'M', u'皮'), + (0x2F6B, 'M', u'皿'), + (0x2F6C, 'M', u'目'), + (0x2F6D, 'M', u'矛'), + (0x2F6E, 'M', u'矢'), + (0x2F6F, 'M', u'石'), + (0x2F70, 'M', u'示'), + (0x2F71, 'M', u'禸'), + (0x2F72, 'M', u'禾'), + (0x2F73, 'M', u'穴'), + (0x2F74, 'M', u'立'), + (0x2F75, 'M', u'竹'), + (0x2F76, 'M', u'米'), + (0x2F77, 'M', u'糸'), + (0x2F78, 'M', u'缶'), + (0x2F79, 'M', u'网'), + (0x2F7A, 'M', u'羊'), + (0x2F7B, 'M', u'羽'), + (0x2F7C, 'M', u'老'), + (0x2F7D, 'M', u'而'), + (0x2F7E, 'M', u'耒'), + (0x2F7F, 'M', u'耳'), + (0x2F80, 'M', u'聿'), + (0x2F81, 'M', u'肉'), + (0x2F82, 'M', u'臣'), + (0x2F83, 'M', u'自'), + (0x2F84, 'M', u'至'), + (0x2F85, 'M', u'臼'), + (0x2F86, 'M', u'舌'), + (0x2F87, 'M', u'舛'), + (0x2F88, 'M', u'舟'), + (0x2F89, 'M', u'艮'), + (0x2F8A, 'M', u'色'), + (0x2F8B, 'M', u'艸'), + (0x2F8C, 'M', u'虍'), + (0x2F8D, 'M', u'虫'), + (0x2F8E, 'M', u'血'), + (0x2F8F, 'M', u'行'), + (0x2F90, 'M', u'衣'), + (0x2F91, 'M', u'襾'), + (0x2F92, 'M', u'見'), + (0x2F93, 'M', u'角'), + (0x2F94, 'M', u'言'), + (0x2F95, 'M', u'谷'), + (0x2F96, 'M', u'豆'), + (0x2F97, 'M', u'豕'), + (0x2F98, 'M', u'豸'), + (0x2F99, 'M', u'貝'), + (0x2F9A, 'M', u'赤'), + (0x2F9B, 'M', u'走'), + (0x2F9C, 'M', u'足'), + (0x2F9D, 'M', u'身'), + ] + +def _seg_28(): + return [ + (0x2F9E, 'M', u'車'), + (0x2F9F, 'M', u'辛'), + (0x2FA0, 'M', u'辰'), + (0x2FA1, 'M', u'辵'), + (0x2FA2, 'M', u'邑'), + (0x2FA3, 'M', u'酉'), + (0x2FA4, 'M', u'釆'), + (0x2FA5, 'M', u'里'), + (0x2FA6, 'M', u'金'), + (0x2FA7, 'M', u'長'), + (0x2FA8, 'M', u'門'), + (0x2FA9, 'M', u'阜'), + (0x2FAA, 'M', u'隶'), + (0x2FAB, 'M', u'隹'), + (0x2FAC, 'M', u'雨'), + (0x2FAD, 'M', u'靑'), + (0x2FAE, 'M', u'非'), + (0x2FAF, 'M', u'面'), + (0x2FB0, 'M', u'革'), + (0x2FB1, 'M', u'韋'), + (0x2FB2, 'M', u'韭'), + (0x2FB3, 'M', u'音'), + (0x2FB4, 'M', u'頁'), + (0x2FB5, 'M', u'風'), + (0x2FB6, 'M', u'飛'), + (0x2FB7, 'M', u'食'), + (0x2FB8, 'M', u'首'), + (0x2FB9, 'M', u'香'), + (0x2FBA, 'M', u'馬'), + (0x2FBB, 'M', u'骨'), + (0x2FBC, 'M', u'高'), + (0x2FBD, 'M', u'髟'), + (0x2FBE, 'M', u'鬥'), + (0x2FBF, 'M', u'鬯'), + (0x2FC0, 'M', u'鬲'), + (0x2FC1, 'M', u'鬼'), + (0x2FC2, 'M', u'魚'), + (0x2FC3, 'M', u'鳥'), + (0x2FC4, 'M', u'鹵'), + (0x2FC5, 'M', u'鹿'), + (0x2FC6, 'M', u'麥'), + (0x2FC7, 'M', u'麻'), + (0x2FC8, 'M', u'黃'), + (0x2FC9, 'M', u'黍'), + (0x2FCA, 'M', u'黑'), + (0x2FCB, 'M', u'黹'), + (0x2FCC, 'M', u'黽'), + (0x2FCD, 'M', u'鼎'), + (0x2FCE, 'M', u'鼓'), + (0x2FCF, 'M', u'鼠'), + (0x2FD0, 'M', u'鼻'), + (0x2FD1, 'M', u'齊'), + (0x2FD2, 'M', u'齒'), + (0x2FD3, 'M', u'龍'), + (0x2FD4, 'M', u'龜'), + (0x2FD5, 'M', u'龠'), + (0x2FD6, 'X'), + (0x3000, '3', u' '), + (0x3001, 'V'), + (0x3002, 'M', u'.'), + (0x3003, 'V'), + (0x3036, 'M', u'〒'), + (0x3037, 'V'), + (0x3038, 'M', u'十'), + (0x3039, 'M', u'卄'), + (0x303A, 'M', u'卅'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', u' ゙'), + (0x309C, '3', u' ゚'), + (0x309D, 'V'), + (0x309F, 'M', u'より'), + (0x30A0, 'V'), + (0x30FF, 'M', u'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x312E, 'X'), + (0x3131, 'M', u'ᄀ'), + (0x3132, 'M', u'ᄁ'), + (0x3133, 'M', u'ᆪ'), + (0x3134, 'M', u'ᄂ'), + (0x3135, 'M', u'ᆬ'), + (0x3136, 'M', u'ᆭ'), + (0x3137, 'M', u'ᄃ'), + (0x3138, 'M', u'ᄄ'), + (0x3139, 'M', u'ᄅ'), + (0x313A, 'M', u'ᆰ'), + (0x313B, 'M', u'ᆱ'), + (0x313C, 'M', u'ᆲ'), + (0x313D, 'M', u'ᆳ'), + (0x313E, 'M', u'ᆴ'), + (0x313F, 'M', u'ᆵ'), + (0x3140, 'M', u'ᄚ'), + (0x3141, 'M', u'ᄆ'), + (0x3142, 'M', u'ᄇ'), + (0x3143, 'M', u'ᄈ'), + (0x3144, 'M', u'ᄡ'), + ] + +def _seg_29(): + return [ + (0x3145, 'M', u'ᄉ'), + (0x3146, 'M', u'ᄊ'), + (0x3147, 'M', u'ᄋ'), + (0x3148, 'M', u'ᄌ'), + (0x3149, 'M', u'ᄍ'), + (0x314A, 'M', u'ᄎ'), + (0x314B, 'M', u'ᄏ'), + (0x314C, 'M', u'ᄐ'), + (0x314D, 'M', u'ᄑ'), + (0x314E, 'M', u'ᄒ'), + (0x314F, 'M', u'ᅡ'), + (0x3150, 'M', u'ᅢ'), + (0x3151, 'M', u'ᅣ'), + (0x3152, 'M', u'ᅤ'), + (0x3153, 'M', u'ᅥ'), + (0x3154, 'M', u'ᅦ'), + (0x3155, 'M', u'ᅧ'), + (0x3156, 'M', u'ᅨ'), + (0x3157, 'M', u'ᅩ'), + (0x3158, 'M', u'ᅪ'), + (0x3159, 'M', u'ᅫ'), + (0x315A, 'M', u'ᅬ'), + (0x315B, 'M', u'ᅭ'), + (0x315C, 'M', u'ᅮ'), + (0x315D, 'M', u'ᅯ'), + (0x315E, 'M', u'ᅰ'), + (0x315F, 'M', u'ᅱ'), + (0x3160, 'M', u'ᅲ'), + (0x3161, 'M', u'ᅳ'), + (0x3162, 'M', u'ᅴ'), + (0x3163, 'M', u'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', u'ᄔ'), + (0x3166, 'M', u'ᄕ'), + (0x3167, 'M', u'ᇇ'), + (0x3168, 'M', u'ᇈ'), + (0x3169, 'M', u'ᇌ'), + (0x316A, 'M', u'ᇎ'), + (0x316B, 'M', u'ᇓ'), + (0x316C, 'M', u'ᇗ'), + (0x316D, 'M', u'ᇙ'), + (0x316E, 'M', u'ᄜ'), + (0x316F, 'M', u'ᇝ'), + (0x3170, 'M', u'ᇟ'), + (0x3171, 'M', u'ᄝ'), + (0x3172, 'M', u'ᄞ'), + (0x3173, 'M', u'ᄠ'), + (0x3174, 'M', u'ᄢ'), + (0x3175, 'M', u'ᄣ'), + (0x3176, 'M', u'ᄧ'), + (0x3177, 'M', u'ᄩ'), + (0x3178, 'M', u'ᄫ'), + (0x3179, 'M', u'ᄬ'), + (0x317A, 'M', u'ᄭ'), + (0x317B, 'M', u'ᄮ'), + (0x317C, 'M', u'ᄯ'), + (0x317D, 'M', u'ᄲ'), + (0x317E, 'M', u'ᄶ'), + (0x317F, 'M', u'ᅀ'), + (0x3180, 'M', u'ᅇ'), + (0x3181, 'M', u'ᅌ'), + (0x3182, 'M', u'ᇱ'), + (0x3183, 'M', u'ᇲ'), + (0x3184, 'M', u'ᅗ'), + (0x3185, 'M', u'ᅘ'), + (0x3186, 'M', u'ᅙ'), + (0x3187, 'M', u'ᆄ'), + (0x3188, 'M', u'ᆅ'), + (0x3189, 'M', u'ᆈ'), + (0x318A, 'M', u'ᆑ'), + (0x318B, 'M', u'ᆒ'), + (0x318C, 'M', u'ᆔ'), + (0x318D, 'M', u'ᆞ'), + (0x318E, 'M', u'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', u'一'), + (0x3193, 'M', u'二'), + (0x3194, 'M', u'三'), + (0x3195, 'M', u'四'), + (0x3196, 'M', u'上'), + (0x3197, 'M', u'中'), + (0x3198, 'M', u'下'), + (0x3199, 'M', u'甲'), + (0x319A, 'M', u'乙'), + (0x319B, 'M', u'丙'), + (0x319C, 'M', u'丁'), + (0x319D, 'M', u'天'), + (0x319E, 'M', u'地'), + (0x319F, 'M', u'人'), + (0x31A0, 'V'), + (0x31BB, 'X'), + (0x31C0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', u'(ᄀ)'), + (0x3201, '3', u'(ᄂ)'), + (0x3202, '3', u'(ᄃ)'), + (0x3203, '3', u'(ᄅ)'), + (0x3204, '3', u'(ᄆ)'), + ] + +def _seg_30(): + return [ + (0x3205, '3', u'(ᄇ)'), + (0x3206, '3', u'(ᄉ)'), + (0x3207, '3', u'(ᄋ)'), + (0x3208, '3', u'(ᄌ)'), + (0x3209, '3', u'(ᄎ)'), + (0x320A, '3', u'(ᄏ)'), + (0x320B, '3', u'(ᄐ)'), + (0x320C, '3', u'(ᄑ)'), + (0x320D, '3', u'(ᄒ)'), + (0x320E, '3', u'(가)'), + (0x320F, '3', u'(나)'), + (0x3210, '3', u'(다)'), + (0x3211, '3', u'(라)'), + (0x3212, '3', u'(마)'), + (0x3213, '3', u'(바)'), + (0x3214, '3', u'(사)'), + (0x3215, '3', u'(아)'), + (0x3216, '3', u'(자)'), + (0x3217, '3', u'(차)'), + (0x3218, '3', u'(카)'), + (0x3219, '3', u'(타)'), + (0x321A, '3', u'(파)'), + (0x321B, '3', u'(하)'), + (0x321C, '3', u'(주)'), + (0x321D, '3', u'(오전)'), + (0x321E, '3', u'(오후)'), + (0x321F, 'X'), + (0x3220, '3', u'(一)'), + (0x3221, '3', u'(二)'), + (0x3222, '3', u'(三)'), + (0x3223, '3', u'(四)'), + (0x3224, '3', u'(五)'), + (0x3225, '3', u'(六)'), + (0x3226, '3', u'(七)'), + (0x3227, '3', u'(八)'), + (0x3228, '3', u'(九)'), + (0x3229, '3', u'(十)'), + (0x322A, '3', u'(月)'), + (0x322B, '3', u'(火)'), + (0x322C, '3', u'(水)'), + (0x322D, '3', u'(木)'), + (0x322E, '3', u'(金)'), + (0x322F, '3', u'(土)'), + (0x3230, '3', u'(日)'), + (0x3231, '3', u'(株)'), + (0x3232, '3', u'(有)'), + (0x3233, '3', u'(社)'), + (0x3234, '3', u'(名)'), + (0x3235, '3', u'(特)'), + (0x3236, '3', u'(財)'), + (0x3237, '3', u'(祝)'), + (0x3238, '3', u'(労)'), + (0x3239, '3', u'(代)'), + (0x323A, '3', u'(呼)'), + (0x323B, '3', u'(学)'), + (0x323C, '3', u'(監)'), + (0x323D, '3', u'(企)'), + (0x323E, '3', u'(資)'), + (0x323F, '3', u'(協)'), + (0x3240, '3', u'(祭)'), + (0x3241, '3', u'(休)'), + (0x3242, '3', u'(自)'), + (0x3243, '3', u'(至)'), + (0x3244, 'M', u'問'), + (0x3245, 'M', u'幼'), + (0x3246, 'M', u'文'), + (0x3247, 'M', u'箏'), + (0x3248, 'V'), + (0x3250, 'M', u'pte'), + (0x3251, 'M', u'21'), + (0x3252, 'M', u'22'), + (0x3253, 'M', u'23'), + (0x3254, 'M', u'24'), + (0x3255, 'M', u'25'), + (0x3256, 'M', u'26'), + (0x3257, 'M', u'27'), + (0x3258, 'M', u'28'), + (0x3259, 'M', u'29'), + (0x325A, 'M', u'30'), + (0x325B, 'M', u'31'), + (0x325C, 'M', u'32'), + (0x325D, 'M', u'33'), + (0x325E, 'M', u'34'), + (0x325F, 'M', u'35'), + (0x3260, 'M', u'ᄀ'), + (0x3261, 'M', u'ᄂ'), + (0x3262, 'M', u'ᄃ'), + (0x3263, 'M', u'ᄅ'), + (0x3264, 'M', u'ᄆ'), + (0x3265, 'M', u'ᄇ'), + (0x3266, 'M', u'ᄉ'), + (0x3267, 'M', u'ᄋ'), + (0x3268, 'M', u'ᄌ'), + (0x3269, 'M', u'ᄎ'), + (0x326A, 'M', u'ᄏ'), + (0x326B, 'M', u'ᄐ'), + (0x326C, 'M', u'ᄑ'), + (0x326D, 'M', u'ᄒ'), + (0x326E, 'M', u'가'), + (0x326F, 'M', u'나'), + ] + +def _seg_31(): + return [ + (0x3270, 'M', u'다'), + (0x3271, 'M', u'라'), + (0x3272, 'M', u'마'), + (0x3273, 'M', u'바'), + (0x3274, 'M', u'사'), + (0x3275, 'M', u'아'), + (0x3276, 'M', u'자'), + (0x3277, 'M', u'차'), + (0x3278, 'M', u'카'), + (0x3279, 'M', u'타'), + (0x327A, 'M', u'파'), + (0x327B, 'M', u'하'), + (0x327C, 'M', u'참고'), + (0x327D, 'M', u'주의'), + (0x327E, 'M', u'우'), + (0x327F, 'V'), + (0x3280, 'M', u'一'), + (0x3281, 'M', u'二'), + (0x3282, 'M', u'三'), + (0x3283, 'M', u'四'), + (0x3284, 'M', u'五'), + (0x3285, 'M', u'六'), + (0x3286, 'M', u'七'), + (0x3287, 'M', u'八'), + (0x3288, 'M', u'九'), + (0x3289, 'M', u'十'), + (0x328A, 'M', u'月'), + (0x328B, 'M', u'火'), + (0x328C, 'M', u'水'), + (0x328D, 'M', u'木'), + (0x328E, 'M', u'金'), + (0x328F, 'M', u'土'), + (0x3290, 'M', u'日'), + (0x3291, 'M', u'株'), + (0x3292, 'M', u'有'), + (0x3293, 'M', u'社'), + (0x3294, 'M', u'名'), + (0x3295, 'M', u'特'), + (0x3296, 'M', u'財'), + (0x3297, 'M', u'祝'), + (0x3298, 'M', u'労'), + (0x3299, 'M', u'秘'), + (0x329A, 'M', u'男'), + (0x329B, 'M', u'女'), + (0x329C, 'M', u'適'), + (0x329D, 'M', u'優'), + (0x329E, 'M', u'印'), + (0x329F, 'M', u'注'), + (0x32A0, 'M', u'項'), + (0x32A1, 'M', u'休'), + (0x32A2, 'M', u'写'), + (0x32A3, 'M', u'正'), + (0x32A4, 'M', u'上'), + (0x32A5, 'M', u'中'), + (0x32A6, 'M', u'下'), + (0x32A7, 'M', u'左'), + (0x32A8, 'M', u'右'), + (0x32A9, 'M', u'医'), + (0x32AA, 'M', u'宗'), + (0x32AB, 'M', u'学'), + (0x32AC, 'M', u'監'), + (0x32AD, 'M', u'企'), + (0x32AE, 'M', u'資'), + (0x32AF, 'M', u'協'), + (0x32B0, 'M', u'夜'), + (0x32B1, 'M', u'36'), + (0x32B2, 'M', u'37'), + (0x32B3, 'M', u'38'), + (0x32B4, 'M', u'39'), + (0x32B5, 'M', u'40'), + (0x32B6, 'M', u'41'), + (0x32B7, 'M', u'42'), + (0x32B8, 'M', u'43'), + (0x32B9, 'M', u'44'), + (0x32BA, 'M', u'45'), + (0x32BB, 'M', u'46'), + (0x32BC, 'M', u'47'), + (0x32BD, 'M', u'48'), + (0x32BE, 'M', u'49'), + (0x32BF, 'M', u'50'), + (0x32C0, 'M', u'1月'), + (0x32C1, 'M', u'2月'), + (0x32C2, 'M', u'3月'), + (0x32C3, 'M', u'4月'), + (0x32C4, 'M', u'5月'), + (0x32C5, 'M', u'6月'), + (0x32C6, 'M', u'7月'), + (0x32C7, 'M', u'8月'), + (0x32C8, 'M', u'9月'), + (0x32C9, 'M', u'10月'), + (0x32CA, 'M', u'11月'), + (0x32CB, 'M', u'12月'), + (0x32CC, 'M', u'hg'), + (0x32CD, 'M', u'erg'), + (0x32CE, 'M', u'ev'), + (0x32CF, 'M', u'ltd'), + (0x32D0, 'M', u'ア'), + (0x32D1, 'M', u'イ'), + (0x32D2, 'M', u'ウ'), + (0x32D3, 'M', u'エ'), + ] + +def _seg_32(): + return [ + (0x32D4, 'M', u'オ'), + (0x32D5, 'M', u'カ'), + (0x32D6, 'M', u'キ'), + (0x32D7, 'M', u'ク'), + (0x32D8, 'M', u'ケ'), + (0x32D9, 'M', u'コ'), + (0x32DA, 'M', u'サ'), + (0x32DB, 'M', u'シ'), + (0x32DC, 'M', u'ス'), + (0x32DD, 'M', u'セ'), + (0x32DE, 'M', u'ソ'), + (0x32DF, 'M', u'タ'), + (0x32E0, 'M', u'チ'), + (0x32E1, 'M', u'ツ'), + (0x32E2, 'M', u'テ'), + (0x32E3, 'M', u'ト'), + (0x32E4, 'M', u'ナ'), + (0x32E5, 'M', u'ニ'), + (0x32E6, 'M', u'ヌ'), + (0x32E7, 'M', u'ネ'), + (0x32E8, 'M', u'ノ'), + (0x32E9, 'M', u'ハ'), + (0x32EA, 'M', u'ヒ'), + (0x32EB, 'M', u'フ'), + (0x32EC, 'M', u'ヘ'), + (0x32ED, 'M', u'ホ'), + (0x32EE, 'M', u'マ'), + (0x32EF, 'M', u'ミ'), + (0x32F0, 'M', u'ム'), + (0x32F1, 'M', u'メ'), + (0x32F2, 'M', u'モ'), + (0x32F3, 'M', u'ヤ'), + (0x32F4, 'M', u'ユ'), + (0x32F5, 'M', u'ヨ'), + (0x32F6, 'M', u'ラ'), + (0x32F7, 'M', u'リ'), + (0x32F8, 'M', u'ル'), + (0x32F9, 'M', u'レ'), + (0x32FA, 'M', u'ロ'), + (0x32FB, 'M', u'ワ'), + (0x32FC, 'M', u'ヰ'), + (0x32FD, 'M', u'ヱ'), + (0x32FE, 'M', u'ヲ'), + (0x32FF, 'X'), + (0x3300, 'M', u'アパート'), + (0x3301, 'M', u'アルファ'), + (0x3302, 'M', u'アンペア'), + (0x3303, 'M', u'アール'), + (0x3304, 'M', u'イニング'), + (0x3305, 'M', u'インチ'), + (0x3306, 'M', u'ウォン'), + (0x3307, 'M', u'エスクード'), + (0x3308, 'M', u'エーカー'), + (0x3309, 'M', u'オンス'), + (0x330A, 'M', u'オーム'), + (0x330B, 'M', u'カイリ'), + (0x330C, 'M', u'カラット'), + (0x330D, 'M', u'カロリー'), + (0x330E, 'M', u'ガロン'), + (0x330F, 'M', u'ガンマ'), + (0x3310, 'M', u'ギガ'), + (0x3311, 'M', u'ギニー'), + (0x3312, 'M', u'キュリー'), + (0x3313, 'M', u'ギルダー'), + (0x3314, 'M', u'キロ'), + (0x3315, 'M', u'キログラム'), + (0x3316, 'M', u'キロメートル'), + (0x3317, 'M', u'キロワット'), + (0x3318, 'M', u'グラム'), + (0x3319, 'M', u'グラムトン'), + (0x331A, 'M', u'クルゼイロ'), + (0x331B, 'M', u'クローネ'), + (0x331C, 'M', u'ケース'), + (0x331D, 'M', u'コルナ'), + (0x331E, 'M', u'コーポ'), + (0x331F, 'M', u'サイクル'), + (0x3320, 'M', u'サンチーム'), + (0x3321, 'M', u'シリング'), + (0x3322, 'M', u'センチ'), + (0x3323, 'M', u'セント'), + (0x3324, 'M', u'ダース'), + (0x3325, 'M', u'デシ'), + (0x3326, 'M', u'ドル'), + (0x3327, 'M', u'トン'), + (0x3328, 'M', u'ナノ'), + (0x3329, 'M', u'ノット'), + (0x332A, 'M', u'ハイツ'), + (0x332B, 'M', u'パーセント'), + (0x332C, 'M', u'パーツ'), + (0x332D, 'M', u'バーレル'), + (0x332E, 'M', u'ピアストル'), + (0x332F, 'M', u'ピクル'), + (0x3330, 'M', u'ピコ'), + (0x3331, 'M', u'ビル'), + (0x3332, 'M', u'ファラッド'), + (0x3333, 'M', u'フィート'), + (0x3334, 'M', u'ブッシェル'), + (0x3335, 'M', u'フラン'), + (0x3336, 'M', u'ヘクタール'), + (0x3337, 'M', u'ペソ'), + ] + +def _seg_33(): + return [ + (0x3338, 'M', u'ペニヒ'), + (0x3339, 'M', u'ヘルツ'), + (0x333A, 'M', u'ペンス'), + (0x333B, 'M', u'ページ'), + (0x333C, 'M', u'ベータ'), + (0x333D, 'M', u'ポイント'), + (0x333E, 'M', u'ボルト'), + (0x333F, 'M', u'ホン'), + (0x3340, 'M', u'ポンド'), + (0x3341, 'M', u'ホール'), + (0x3342, 'M', u'ホーン'), + (0x3343, 'M', u'マイクロ'), + (0x3344, 'M', u'マイル'), + (0x3345, 'M', u'マッハ'), + (0x3346, 'M', u'マルク'), + (0x3347, 'M', u'マンション'), + (0x3348, 'M', u'ミクロン'), + (0x3349, 'M', u'ミリ'), + (0x334A, 'M', u'ミリバール'), + (0x334B, 'M', u'メガ'), + (0x334C, 'M', u'メガトン'), + (0x334D, 'M', u'メートル'), + (0x334E, 'M', u'ヤード'), + (0x334F, 'M', u'ヤール'), + (0x3350, 'M', u'ユアン'), + (0x3351, 'M', u'リットル'), + (0x3352, 'M', u'リラ'), + (0x3353, 'M', u'ルピー'), + (0x3354, 'M', u'ルーブル'), + (0x3355, 'M', u'レム'), + (0x3356, 'M', u'レントゲン'), + (0x3357, 'M', u'ワット'), + (0x3358, 'M', u'0点'), + (0x3359, 'M', u'1点'), + (0x335A, 'M', u'2点'), + (0x335B, 'M', u'3点'), + (0x335C, 'M', u'4点'), + (0x335D, 'M', u'5点'), + (0x335E, 'M', u'6点'), + (0x335F, 'M', u'7点'), + (0x3360, 'M', u'8点'), + (0x3361, 'M', u'9点'), + (0x3362, 'M', u'10点'), + (0x3363, 'M', u'11点'), + (0x3364, 'M', u'12点'), + (0x3365, 'M', u'13点'), + (0x3366, 'M', u'14点'), + (0x3367, 'M', u'15点'), + (0x3368, 'M', u'16点'), + (0x3369, 'M', u'17点'), + (0x336A, 'M', u'18点'), + (0x336B, 'M', u'19点'), + (0x336C, 'M', u'20点'), + (0x336D, 'M', u'21点'), + (0x336E, 'M', u'22点'), + (0x336F, 'M', u'23点'), + (0x3370, 'M', u'24点'), + (0x3371, 'M', u'hpa'), + (0x3372, 'M', u'da'), + (0x3373, 'M', u'au'), + (0x3374, 'M', u'bar'), + (0x3375, 'M', u'ov'), + (0x3376, 'M', u'pc'), + (0x3377, 'M', u'dm'), + (0x3378, 'M', u'dm2'), + (0x3379, 'M', u'dm3'), + (0x337A, 'M', u'iu'), + (0x337B, 'M', u'平成'), + (0x337C, 'M', u'昭和'), + (0x337D, 'M', u'大正'), + (0x337E, 'M', u'明治'), + (0x337F, 'M', u'株式会社'), + (0x3380, 'M', u'pa'), + (0x3381, 'M', u'na'), + (0x3382, 'M', u'μa'), + (0x3383, 'M', u'ma'), + (0x3384, 'M', u'ka'), + (0x3385, 'M', u'kb'), + (0x3386, 'M', u'mb'), + (0x3387, 'M', u'gb'), + (0x3388, 'M', u'cal'), + (0x3389, 'M', u'kcal'), + (0x338A, 'M', u'pf'), + (0x338B, 'M', u'nf'), + (0x338C, 'M', u'μf'), + (0x338D, 'M', u'μg'), + (0x338E, 'M', u'mg'), + (0x338F, 'M', u'kg'), + (0x3390, 'M', u'hz'), + (0x3391, 'M', u'khz'), + (0x3392, 'M', u'mhz'), + (0x3393, 'M', u'ghz'), + (0x3394, 'M', u'thz'), + (0x3395, 'M', u'μl'), + (0x3396, 'M', u'ml'), + (0x3397, 'M', u'dl'), + (0x3398, 'M', u'kl'), + (0x3399, 'M', u'fm'), + (0x339A, 'M', u'nm'), + (0x339B, 'M', u'μm'), + ] + +def _seg_34(): + return [ + (0x339C, 'M', u'mm'), + (0x339D, 'M', u'cm'), + (0x339E, 'M', u'km'), + (0x339F, 'M', u'mm2'), + (0x33A0, 'M', u'cm2'), + (0x33A1, 'M', u'm2'), + (0x33A2, 'M', u'km2'), + (0x33A3, 'M', u'mm3'), + (0x33A4, 'M', u'cm3'), + (0x33A5, 'M', u'm3'), + (0x33A6, 'M', u'km3'), + (0x33A7, 'M', u'm∕s'), + (0x33A8, 'M', u'm∕s2'), + (0x33A9, 'M', u'pa'), + (0x33AA, 'M', u'kpa'), + (0x33AB, 'M', u'mpa'), + (0x33AC, 'M', u'gpa'), + (0x33AD, 'M', u'rad'), + (0x33AE, 'M', u'rad∕s'), + (0x33AF, 'M', u'rad∕s2'), + (0x33B0, 'M', u'ps'), + (0x33B1, 'M', u'ns'), + (0x33B2, 'M', u'μs'), + (0x33B3, 'M', u'ms'), + (0x33B4, 'M', u'pv'), + (0x33B5, 'M', u'nv'), + (0x33B6, 'M', u'μv'), + (0x33B7, 'M', u'mv'), + (0x33B8, 'M', u'kv'), + (0x33B9, 'M', u'mv'), + (0x33BA, 'M', u'pw'), + (0x33BB, 'M', u'nw'), + (0x33BC, 'M', u'μw'), + (0x33BD, 'M', u'mw'), + (0x33BE, 'M', u'kw'), + (0x33BF, 'M', u'mw'), + (0x33C0, 'M', u'kω'), + (0x33C1, 'M', u'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', u'bq'), + (0x33C4, 'M', u'cc'), + (0x33C5, 'M', u'cd'), + (0x33C6, 'M', u'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', u'db'), + (0x33C9, 'M', u'gy'), + (0x33CA, 'M', u'ha'), + (0x33CB, 'M', u'hp'), + (0x33CC, 'M', u'in'), + (0x33CD, 'M', u'kk'), + (0x33CE, 'M', u'km'), + (0x33CF, 'M', u'kt'), + (0x33D0, 'M', u'lm'), + (0x33D1, 'M', u'ln'), + (0x33D2, 'M', u'log'), + (0x33D3, 'M', u'lx'), + (0x33D4, 'M', u'mb'), + (0x33D5, 'M', u'mil'), + (0x33D6, 'M', u'mol'), + (0x33D7, 'M', u'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', u'ppm'), + (0x33DA, 'M', u'pr'), + (0x33DB, 'M', u'sr'), + (0x33DC, 'M', u'sv'), + (0x33DD, 'M', u'wb'), + (0x33DE, 'M', u'v∕m'), + (0x33DF, 'M', u'a∕m'), + (0x33E0, 'M', u'1日'), + (0x33E1, 'M', u'2日'), + (0x33E2, 'M', u'3日'), + (0x33E3, 'M', u'4日'), + (0x33E4, 'M', u'5日'), + (0x33E5, 'M', u'6日'), + (0x33E6, 'M', u'7日'), + (0x33E7, 'M', u'8日'), + (0x33E8, 'M', u'9日'), + (0x33E9, 'M', u'10日'), + (0x33EA, 'M', u'11日'), + (0x33EB, 'M', u'12日'), + (0x33EC, 'M', u'13日'), + (0x33ED, 'M', u'14日'), + (0x33EE, 'M', u'15日'), + (0x33EF, 'M', u'16日'), + (0x33F0, 'M', u'17日'), + (0x33F1, 'M', u'18日'), + (0x33F2, 'M', u'19日'), + (0x33F3, 'M', u'20日'), + (0x33F4, 'M', u'21日'), + (0x33F5, 'M', u'22日'), + (0x33F6, 'M', u'23日'), + (0x33F7, 'M', u'24日'), + (0x33F8, 'M', u'25日'), + (0x33F9, 'M', u'26日'), + (0x33FA, 'M', u'27日'), + (0x33FB, 'M', u'28日'), + (0x33FC, 'M', u'29日'), + (0x33FD, 'M', u'30日'), + (0x33FE, 'M', u'31日'), + (0x33FF, 'M', u'gal'), + ] + +def _seg_35(): + return [ + (0x3400, 'V'), + (0x4DB6, 'X'), + (0x4DC0, 'V'), + (0x9FCD, 'X'), + (0xA000, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', u'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', u'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', u'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', u'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', u'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', u'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', u'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', u'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', u'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', u'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', u'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', u'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', u'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', u'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', u'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', u'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', u'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', u'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', u'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', u'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', u'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', u'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', u'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', u'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', u'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', u'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', u'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', u'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', u'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', u'ꚍ'), + (0xA68D, 'V'), + (0xA68E, 'M', u'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', u'ꚑ'), + (0xA691, 'V'), + (0xA692, 'M', u'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', u'ꚕ'), + (0xA695, 'V'), + (0xA696, 'M', u'ꚗ'), + (0xA697, 'V'), + (0xA698, 'X'), + (0xA69F, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', u'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', u'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', u'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', u'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', u'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', u'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', u'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', u'ꜳ'), + (0xA733, 'V'), + ] + +def _seg_36(): + return [ + (0xA734, 'M', u'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', u'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', u'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', u'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', u'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', u'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', u'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', u'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', u'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', u'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', u'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', u'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', u'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', u'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', u'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', u'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', u'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', u'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', u'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', u'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', u'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', u'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', u'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', u'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', u'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', u'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', u'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', u'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', u'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', u'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', u'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', u'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', u'ꝼ'), + (0xA77C, 'V'), + (0xA77D, 'M', u'ᵹ'), + (0xA77E, 'M', u'ꝿ'), + (0xA77F, 'V'), + (0xA780, 'M', u'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', u'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', u'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', u'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', u'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', u'ɥ'), + (0xA78E, 'V'), + (0xA78F, 'X'), + (0xA790, 'M', u'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', u'ꞓ'), + (0xA793, 'V'), + (0xA794, 'X'), + (0xA7A0, 'M', u'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', u'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', u'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', u'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', u'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', u'ɦ'), + (0xA7AB, 'X'), + (0xA7F8, 'M', u'ħ'), + ] + +def _seg_37(): + return [ + (0xA7F9, 'M', u'œ'), + (0xA7FA, 'V'), + (0xA82C, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C5, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA8FC, 'X'), + (0xA900, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9E0, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAA7C, 'X'), + (0xAA80, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', u'豈'), + (0xF901, 'M', u'更'), + (0xF902, 'M', u'車'), + (0xF903, 'M', u'賈'), + (0xF904, 'M', u'滑'), + (0xF905, 'M', u'串'), + (0xF906, 'M', u'句'), + (0xF907, 'M', u'龜'), + (0xF909, 'M', u'契'), + (0xF90A, 'M', u'金'), + (0xF90B, 'M', u'喇'), + (0xF90C, 'M', u'奈'), + (0xF90D, 'M', u'懶'), + (0xF90E, 'M', u'癩'), + (0xF90F, 'M', u'羅'), + (0xF910, 'M', u'蘿'), + (0xF911, 'M', u'螺'), + (0xF912, 'M', u'裸'), + (0xF913, 'M', u'邏'), + (0xF914, 'M', u'樂'), + (0xF915, 'M', u'洛'), + (0xF916, 'M', u'烙'), + (0xF917, 'M', u'珞'), + (0xF918, 'M', u'落'), + (0xF919, 'M', u'酪'), + (0xF91A, 'M', u'駱'), + (0xF91B, 'M', u'亂'), + (0xF91C, 'M', u'卵'), + (0xF91D, 'M', u'欄'), + (0xF91E, 'M', u'爛'), + (0xF91F, 'M', u'蘭'), + (0xF920, 'M', u'鸞'), + (0xF921, 'M', u'嵐'), + (0xF922, 'M', u'濫'), + (0xF923, 'M', u'藍'), + (0xF924, 'M', u'襤'), + (0xF925, 'M', u'拉'), + (0xF926, 'M', u'臘'), + (0xF927, 'M', u'蠟'), + (0xF928, 'M', u'廊'), + (0xF929, 'M', u'朗'), + (0xF92A, 'M', u'浪'), + (0xF92B, 'M', u'狼'), + (0xF92C, 'M', u'郎'), + (0xF92D, 'M', u'來'), + ] + +def _seg_38(): + return [ + (0xF92E, 'M', u'冷'), + (0xF92F, 'M', u'勞'), + (0xF930, 'M', u'擄'), + (0xF931, 'M', u'櫓'), + (0xF932, 'M', u'爐'), + (0xF933, 'M', u'盧'), + (0xF934, 'M', u'老'), + (0xF935, 'M', u'蘆'), + (0xF936, 'M', u'虜'), + (0xF937, 'M', u'路'), + (0xF938, 'M', u'露'), + (0xF939, 'M', u'魯'), + (0xF93A, 'M', u'鷺'), + (0xF93B, 'M', u'碌'), + (0xF93C, 'M', u'祿'), + (0xF93D, 'M', u'綠'), + (0xF93E, 'M', u'菉'), + (0xF93F, 'M', u'錄'), + (0xF940, 'M', u'鹿'), + (0xF941, 'M', u'論'), + (0xF942, 'M', u'壟'), + (0xF943, 'M', u'弄'), + (0xF944, 'M', u'籠'), + (0xF945, 'M', u'聾'), + (0xF946, 'M', u'牢'), + (0xF947, 'M', u'磊'), + (0xF948, 'M', u'賂'), + (0xF949, 'M', u'雷'), + (0xF94A, 'M', u'壘'), + (0xF94B, 'M', u'屢'), + (0xF94C, 'M', u'樓'), + (0xF94D, 'M', u'淚'), + (0xF94E, 'M', u'漏'), + (0xF94F, 'M', u'累'), + (0xF950, 'M', u'縷'), + (0xF951, 'M', u'陋'), + (0xF952, 'M', u'勒'), + (0xF953, 'M', u'肋'), + (0xF954, 'M', u'凜'), + (0xF955, 'M', u'凌'), + (0xF956, 'M', u'稜'), + (0xF957, 'M', u'綾'), + (0xF958, 'M', u'菱'), + (0xF959, 'M', u'陵'), + (0xF95A, 'M', u'讀'), + (0xF95B, 'M', u'拏'), + (0xF95C, 'M', u'樂'), + (0xF95D, 'M', u'諾'), + (0xF95E, 'M', u'丹'), + (0xF95F, 'M', u'寧'), + (0xF960, 'M', u'怒'), + (0xF961, 'M', u'率'), + (0xF962, 'M', u'異'), + (0xF963, 'M', u'北'), + (0xF964, 'M', u'磻'), + (0xF965, 'M', u'便'), + (0xF966, 'M', u'復'), + (0xF967, 'M', u'不'), + (0xF968, 'M', u'泌'), + (0xF969, 'M', u'數'), + (0xF96A, 'M', u'索'), + (0xF96B, 'M', u'參'), + (0xF96C, 'M', u'塞'), + (0xF96D, 'M', u'省'), + (0xF96E, 'M', u'葉'), + (0xF96F, 'M', u'說'), + (0xF970, 'M', u'殺'), + (0xF971, 'M', u'辰'), + (0xF972, 'M', u'沈'), + (0xF973, 'M', u'拾'), + (0xF974, 'M', u'若'), + (0xF975, 'M', u'掠'), + (0xF976, 'M', u'略'), + (0xF977, 'M', u'亮'), + (0xF978, 'M', u'兩'), + (0xF979, 'M', u'凉'), + (0xF97A, 'M', u'梁'), + (0xF97B, 'M', u'糧'), + (0xF97C, 'M', u'良'), + (0xF97D, 'M', u'諒'), + (0xF97E, 'M', u'量'), + (0xF97F, 'M', u'勵'), + (0xF980, 'M', u'呂'), + (0xF981, 'M', u'女'), + (0xF982, 'M', u'廬'), + (0xF983, 'M', u'旅'), + (0xF984, 'M', u'濾'), + (0xF985, 'M', u'礪'), + (0xF986, 'M', u'閭'), + (0xF987, 'M', u'驪'), + (0xF988, 'M', u'麗'), + (0xF989, 'M', u'黎'), + (0xF98A, 'M', u'力'), + (0xF98B, 'M', u'曆'), + (0xF98C, 'M', u'歷'), + (0xF98D, 'M', u'轢'), + (0xF98E, 'M', u'年'), + (0xF98F, 'M', u'憐'), + (0xF990, 'M', u'戀'), + (0xF991, 'M', u'撚'), + ] + +def _seg_39(): + return [ + (0xF992, 'M', u'漣'), + (0xF993, 'M', u'煉'), + (0xF994, 'M', u'璉'), + (0xF995, 'M', u'秊'), + (0xF996, 'M', u'練'), + (0xF997, 'M', u'聯'), + (0xF998, 'M', u'輦'), + (0xF999, 'M', u'蓮'), + (0xF99A, 'M', u'連'), + (0xF99B, 'M', u'鍊'), + (0xF99C, 'M', u'列'), + (0xF99D, 'M', u'劣'), + (0xF99E, 'M', u'咽'), + (0xF99F, 'M', u'烈'), + (0xF9A0, 'M', u'裂'), + (0xF9A1, 'M', u'說'), + (0xF9A2, 'M', u'廉'), + (0xF9A3, 'M', u'念'), + (0xF9A4, 'M', u'捻'), + (0xF9A5, 'M', u'殮'), + (0xF9A6, 'M', u'簾'), + (0xF9A7, 'M', u'獵'), + (0xF9A8, 'M', u'令'), + (0xF9A9, 'M', u'囹'), + (0xF9AA, 'M', u'寧'), + (0xF9AB, 'M', u'嶺'), + (0xF9AC, 'M', u'怜'), + (0xF9AD, 'M', u'玲'), + (0xF9AE, 'M', u'瑩'), + (0xF9AF, 'M', u'羚'), + (0xF9B0, 'M', u'聆'), + (0xF9B1, 'M', u'鈴'), + (0xF9B2, 'M', u'零'), + (0xF9B3, 'M', u'靈'), + (0xF9B4, 'M', u'領'), + (0xF9B5, 'M', u'例'), + (0xF9B6, 'M', u'禮'), + (0xF9B7, 'M', u'醴'), + (0xF9B8, 'M', u'隸'), + (0xF9B9, 'M', u'惡'), + (0xF9BA, 'M', u'了'), + (0xF9BB, 'M', u'僚'), + (0xF9BC, 'M', u'寮'), + (0xF9BD, 'M', u'尿'), + (0xF9BE, 'M', u'料'), + (0xF9BF, 'M', u'樂'), + (0xF9C0, 'M', u'燎'), + (0xF9C1, 'M', u'療'), + (0xF9C2, 'M', u'蓼'), + (0xF9C3, 'M', u'遼'), + (0xF9C4, 'M', u'龍'), + (0xF9C5, 'M', u'暈'), + (0xF9C6, 'M', u'阮'), + (0xF9C7, 'M', u'劉'), + (0xF9C8, 'M', u'杻'), + (0xF9C9, 'M', u'柳'), + (0xF9CA, 'M', u'流'), + (0xF9CB, 'M', u'溜'), + (0xF9CC, 'M', u'琉'), + (0xF9CD, 'M', u'留'), + (0xF9CE, 'M', u'硫'), + (0xF9CF, 'M', u'紐'), + (0xF9D0, 'M', u'類'), + (0xF9D1, 'M', u'六'), + (0xF9D2, 'M', u'戮'), + (0xF9D3, 'M', u'陸'), + (0xF9D4, 'M', u'倫'), + (0xF9D5, 'M', u'崙'), + (0xF9D6, 'M', u'淪'), + (0xF9D7, 'M', u'輪'), + (0xF9D8, 'M', u'律'), + (0xF9D9, 'M', u'慄'), + (0xF9DA, 'M', u'栗'), + (0xF9DB, 'M', u'率'), + (0xF9DC, 'M', u'隆'), + (0xF9DD, 'M', u'利'), + (0xF9DE, 'M', u'吏'), + (0xF9DF, 'M', u'履'), + (0xF9E0, 'M', u'易'), + (0xF9E1, 'M', u'李'), + (0xF9E2, 'M', u'梨'), + (0xF9E3, 'M', u'泥'), + (0xF9E4, 'M', u'理'), + (0xF9E5, 'M', u'痢'), + (0xF9E6, 'M', u'罹'), + (0xF9E7, 'M', u'裏'), + (0xF9E8, 'M', u'裡'), + (0xF9E9, 'M', u'里'), + (0xF9EA, 'M', u'離'), + (0xF9EB, 'M', u'匿'), + (0xF9EC, 'M', u'溺'), + (0xF9ED, 'M', u'吝'), + (0xF9EE, 'M', u'燐'), + (0xF9EF, 'M', u'璘'), + (0xF9F0, 'M', u'藺'), + (0xF9F1, 'M', u'隣'), + (0xF9F2, 'M', u'鱗'), + (0xF9F3, 'M', u'麟'), + (0xF9F4, 'M', u'林'), + (0xF9F5, 'M', u'淋'), + ] + +def _seg_40(): + return [ + (0xF9F6, 'M', u'臨'), + (0xF9F7, 'M', u'立'), + (0xF9F8, 'M', u'笠'), + (0xF9F9, 'M', u'粒'), + (0xF9FA, 'M', u'狀'), + (0xF9FB, 'M', u'炙'), + (0xF9FC, 'M', u'識'), + (0xF9FD, 'M', u'什'), + (0xF9FE, 'M', u'茶'), + (0xF9FF, 'M', u'刺'), + (0xFA00, 'M', u'切'), + (0xFA01, 'M', u'度'), + (0xFA02, 'M', u'拓'), + (0xFA03, 'M', u'糖'), + (0xFA04, 'M', u'宅'), + (0xFA05, 'M', u'洞'), + (0xFA06, 'M', u'暴'), + (0xFA07, 'M', u'輻'), + (0xFA08, 'M', u'行'), + (0xFA09, 'M', u'降'), + (0xFA0A, 'M', u'見'), + (0xFA0B, 'M', u'廓'), + (0xFA0C, 'M', u'兀'), + (0xFA0D, 'M', u'嗀'), + (0xFA0E, 'V'), + (0xFA10, 'M', u'塚'), + (0xFA11, 'V'), + (0xFA12, 'M', u'晴'), + (0xFA13, 'V'), + (0xFA15, 'M', u'凞'), + (0xFA16, 'M', u'猪'), + (0xFA17, 'M', u'益'), + (0xFA18, 'M', u'礼'), + (0xFA19, 'M', u'神'), + (0xFA1A, 'M', u'祥'), + (0xFA1B, 'M', u'福'), + (0xFA1C, 'M', u'靖'), + (0xFA1D, 'M', u'精'), + (0xFA1E, 'M', u'羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', u'蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', u'諸'), + (0xFA23, 'V'), + (0xFA25, 'M', u'逸'), + (0xFA26, 'M', u'都'), + (0xFA27, 'V'), + (0xFA2A, 'M', u'飯'), + (0xFA2B, 'M', u'飼'), + (0xFA2C, 'M', u'館'), + (0xFA2D, 'M', u'鶴'), + (0xFA2E, 'M', u'郞'), + (0xFA2F, 'M', u'隷'), + (0xFA30, 'M', u'侮'), + (0xFA31, 'M', u'僧'), + (0xFA32, 'M', u'免'), + (0xFA33, 'M', u'勉'), + (0xFA34, 'M', u'勤'), + (0xFA35, 'M', u'卑'), + (0xFA36, 'M', u'喝'), + (0xFA37, 'M', u'嘆'), + (0xFA38, 'M', u'器'), + (0xFA39, 'M', u'塀'), + (0xFA3A, 'M', u'墨'), + (0xFA3B, 'M', u'層'), + (0xFA3C, 'M', u'屮'), + (0xFA3D, 'M', u'悔'), + (0xFA3E, 'M', u'慨'), + (0xFA3F, 'M', u'憎'), + (0xFA40, 'M', u'懲'), + (0xFA41, 'M', u'敏'), + (0xFA42, 'M', u'既'), + (0xFA43, 'M', u'暑'), + (0xFA44, 'M', u'梅'), + (0xFA45, 'M', u'海'), + (0xFA46, 'M', u'渚'), + (0xFA47, 'M', u'漢'), + (0xFA48, 'M', u'煮'), + (0xFA49, 'M', u'爫'), + (0xFA4A, 'M', u'琢'), + (0xFA4B, 'M', u'碑'), + (0xFA4C, 'M', u'社'), + (0xFA4D, 'M', u'祉'), + (0xFA4E, 'M', u'祈'), + (0xFA4F, 'M', u'祐'), + (0xFA50, 'M', u'祖'), + (0xFA51, 'M', u'祝'), + (0xFA52, 'M', u'禍'), + (0xFA53, 'M', u'禎'), + (0xFA54, 'M', u'穀'), + (0xFA55, 'M', u'突'), + (0xFA56, 'M', u'節'), + (0xFA57, 'M', u'練'), + (0xFA58, 'M', u'縉'), + (0xFA59, 'M', u'繁'), + (0xFA5A, 'M', u'署'), + (0xFA5B, 'M', u'者'), + (0xFA5C, 'M', u'臭'), + (0xFA5D, 'M', u'艹'), + (0xFA5F, 'M', u'著'), + ] + +def _seg_41(): + return [ + (0xFA60, 'M', u'褐'), + (0xFA61, 'M', u'視'), + (0xFA62, 'M', u'謁'), + (0xFA63, 'M', u'謹'), + (0xFA64, 'M', u'賓'), + (0xFA65, 'M', u'贈'), + (0xFA66, 'M', u'辶'), + (0xFA67, 'M', u'逸'), + (0xFA68, 'M', u'難'), + (0xFA69, 'M', u'響'), + (0xFA6A, 'M', u'頻'), + (0xFA6B, 'M', u'恵'), + (0xFA6C, 'M', u'𤋮'), + (0xFA6D, 'M', u'舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', u'並'), + (0xFA71, 'M', u'况'), + (0xFA72, 'M', u'全'), + (0xFA73, 'M', u'侀'), + (0xFA74, 'M', u'充'), + (0xFA75, 'M', u'冀'), + (0xFA76, 'M', u'勇'), + (0xFA77, 'M', u'勺'), + (0xFA78, 'M', u'喝'), + (0xFA79, 'M', u'啕'), + (0xFA7A, 'M', u'喙'), + (0xFA7B, 'M', u'嗢'), + (0xFA7C, 'M', u'塚'), + (0xFA7D, 'M', u'墳'), + (0xFA7E, 'M', u'奄'), + (0xFA7F, 'M', u'奔'), + (0xFA80, 'M', u'婢'), + (0xFA81, 'M', u'嬨'), + (0xFA82, 'M', u'廒'), + (0xFA83, 'M', u'廙'), + (0xFA84, 'M', u'彩'), + (0xFA85, 'M', u'徭'), + (0xFA86, 'M', u'惘'), + (0xFA87, 'M', u'慎'), + (0xFA88, 'M', u'愈'), + (0xFA89, 'M', u'憎'), + (0xFA8A, 'M', u'慠'), + (0xFA8B, 'M', u'懲'), + (0xFA8C, 'M', u'戴'), + (0xFA8D, 'M', u'揄'), + (0xFA8E, 'M', u'搜'), + (0xFA8F, 'M', u'摒'), + (0xFA90, 'M', u'敖'), + (0xFA91, 'M', u'晴'), + (0xFA92, 'M', u'朗'), + (0xFA93, 'M', u'望'), + (0xFA94, 'M', u'杖'), + (0xFA95, 'M', u'歹'), + (0xFA96, 'M', u'殺'), + (0xFA97, 'M', u'流'), + (0xFA98, 'M', u'滛'), + (0xFA99, 'M', u'滋'), + (0xFA9A, 'M', u'漢'), + (0xFA9B, 'M', u'瀞'), + (0xFA9C, 'M', u'煮'), + (0xFA9D, 'M', u'瞧'), + (0xFA9E, 'M', u'爵'), + (0xFA9F, 'M', u'犯'), + (0xFAA0, 'M', u'猪'), + (0xFAA1, 'M', u'瑱'), + (0xFAA2, 'M', u'甆'), + (0xFAA3, 'M', u'画'), + (0xFAA4, 'M', u'瘝'), + (0xFAA5, 'M', u'瘟'), + (0xFAA6, 'M', u'益'), + (0xFAA7, 'M', u'盛'), + (0xFAA8, 'M', u'直'), + (0xFAA9, 'M', u'睊'), + (0xFAAA, 'M', u'着'), + (0xFAAB, 'M', u'磌'), + (0xFAAC, 'M', u'窱'), + (0xFAAD, 'M', u'節'), + (0xFAAE, 'M', u'类'), + (0xFAAF, 'M', u'絛'), + (0xFAB0, 'M', u'練'), + (0xFAB1, 'M', u'缾'), + (0xFAB2, 'M', u'者'), + (0xFAB3, 'M', u'荒'), + (0xFAB4, 'M', u'華'), + (0xFAB5, 'M', u'蝹'), + (0xFAB6, 'M', u'襁'), + (0xFAB7, 'M', u'覆'), + (0xFAB8, 'M', u'視'), + (0xFAB9, 'M', u'調'), + (0xFABA, 'M', u'諸'), + (0xFABB, 'M', u'請'), + (0xFABC, 'M', u'謁'), + (0xFABD, 'M', u'諾'), + (0xFABE, 'M', u'諭'), + (0xFABF, 'M', u'謹'), + (0xFAC0, 'M', u'變'), + (0xFAC1, 'M', u'贈'), + (0xFAC2, 'M', u'輸'), + (0xFAC3, 'M', u'遲'), + (0xFAC4, 'M', u'醙'), + ] + +def _seg_42(): + return [ + (0xFAC5, 'M', u'鉶'), + (0xFAC6, 'M', u'陼'), + (0xFAC7, 'M', u'難'), + (0xFAC8, 'M', u'靖'), + (0xFAC9, 'M', u'韛'), + (0xFACA, 'M', u'響'), + (0xFACB, 'M', u'頋'), + (0xFACC, 'M', u'頻'), + (0xFACD, 'M', u'鬒'), + (0xFACE, 'M', u'龜'), + (0xFACF, 'M', u'𢡊'), + (0xFAD0, 'M', u'𢡄'), + (0xFAD1, 'M', u'𣏕'), + (0xFAD2, 'M', u'㮝'), + (0xFAD3, 'M', u'䀘'), + (0xFAD4, 'M', u'䀹'), + (0xFAD5, 'M', u'𥉉'), + (0xFAD6, 'M', u'𥳐'), + (0xFAD7, 'M', u'𧻓'), + (0xFAD8, 'M', u'齃'), + (0xFAD9, 'M', u'龎'), + (0xFADA, 'X'), + (0xFB00, 'M', u'ff'), + (0xFB01, 'M', u'fi'), + (0xFB02, 'M', u'fl'), + (0xFB03, 'M', u'ffi'), + (0xFB04, 'M', u'ffl'), + (0xFB05, 'M', u'st'), + (0xFB07, 'X'), + (0xFB13, 'M', u'մն'), + (0xFB14, 'M', u'մե'), + (0xFB15, 'M', u'մի'), + (0xFB16, 'M', u'վն'), + (0xFB17, 'M', u'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', u'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', u'ײַ'), + (0xFB20, 'M', u'ע'), + (0xFB21, 'M', u'א'), + (0xFB22, 'M', u'ד'), + (0xFB23, 'M', u'ה'), + (0xFB24, 'M', u'כ'), + (0xFB25, 'M', u'ל'), + (0xFB26, 'M', u'ם'), + (0xFB27, 'M', u'ר'), + (0xFB28, 'M', u'ת'), + (0xFB29, '3', u'+'), + (0xFB2A, 'M', u'שׁ'), + (0xFB2B, 'M', u'שׂ'), + (0xFB2C, 'M', u'שּׁ'), + (0xFB2D, 'M', u'שּׂ'), + (0xFB2E, 'M', u'אַ'), + (0xFB2F, 'M', u'אָ'), + (0xFB30, 'M', u'אּ'), + (0xFB31, 'M', u'בּ'), + (0xFB32, 'M', u'גּ'), + (0xFB33, 'M', u'דּ'), + (0xFB34, 'M', u'הּ'), + (0xFB35, 'M', u'וּ'), + (0xFB36, 'M', u'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', u'טּ'), + (0xFB39, 'M', u'יּ'), + (0xFB3A, 'M', u'ךּ'), + (0xFB3B, 'M', u'כּ'), + (0xFB3C, 'M', u'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', u'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', u'נּ'), + (0xFB41, 'M', u'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', u'ףּ'), + (0xFB44, 'M', u'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', u'צּ'), + (0xFB47, 'M', u'קּ'), + (0xFB48, 'M', u'רּ'), + (0xFB49, 'M', u'שּ'), + (0xFB4A, 'M', u'תּ'), + (0xFB4B, 'M', u'וֹ'), + (0xFB4C, 'M', u'בֿ'), + (0xFB4D, 'M', u'כֿ'), + (0xFB4E, 'M', u'פֿ'), + (0xFB4F, 'M', u'אל'), + (0xFB50, 'M', u'ٱ'), + (0xFB52, 'M', u'ٻ'), + (0xFB56, 'M', u'پ'), + (0xFB5A, 'M', u'ڀ'), + (0xFB5E, 'M', u'ٺ'), + (0xFB62, 'M', u'ٿ'), + (0xFB66, 'M', u'ٹ'), + (0xFB6A, 'M', u'ڤ'), + (0xFB6E, 'M', u'ڦ'), + (0xFB72, 'M', u'ڄ'), + (0xFB76, 'M', u'ڃ'), + (0xFB7A, 'M', u'چ'), + (0xFB7E, 'M', u'ڇ'), + (0xFB82, 'M', u'ڍ'), + ] + +def _seg_43(): + return [ + (0xFB84, 'M', u'ڌ'), + (0xFB86, 'M', u'ڎ'), + (0xFB88, 'M', u'ڈ'), + (0xFB8A, 'M', u'ژ'), + (0xFB8C, 'M', u'ڑ'), + (0xFB8E, 'M', u'ک'), + (0xFB92, 'M', u'گ'), + (0xFB96, 'M', u'ڳ'), + (0xFB9A, 'M', u'ڱ'), + (0xFB9E, 'M', u'ں'), + (0xFBA0, 'M', u'ڻ'), + (0xFBA4, 'M', u'ۀ'), + (0xFBA6, 'M', u'ہ'), + (0xFBAA, 'M', u'ھ'), + (0xFBAE, 'M', u'ے'), + (0xFBB0, 'M', u'ۓ'), + (0xFBB2, 'V'), + (0xFBC2, 'X'), + (0xFBD3, 'M', u'ڭ'), + (0xFBD7, 'M', u'ۇ'), + (0xFBD9, 'M', u'ۆ'), + (0xFBDB, 'M', u'ۈ'), + (0xFBDD, 'M', u'ۇٴ'), + (0xFBDE, 'M', u'ۋ'), + (0xFBE0, 'M', u'ۅ'), + (0xFBE2, 'M', u'ۉ'), + (0xFBE4, 'M', u'ې'), + (0xFBE8, 'M', u'ى'), + (0xFBEA, 'M', u'ئا'), + (0xFBEC, 'M', u'ئە'), + (0xFBEE, 'M', u'ئو'), + (0xFBF0, 'M', u'ئۇ'), + (0xFBF2, 'M', u'ئۆ'), + (0xFBF4, 'M', u'ئۈ'), + (0xFBF6, 'M', u'ئې'), + (0xFBF9, 'M', u'ئى'), + (0xFBFC, 'M', u'ی'), + (0xFC00, 'M', u'ئج'), + (0xFC01, 'M', u'ئح'), + (0xFC02, 'M', u'ئم'), + (0xFC03, 'M', u'ئى'), + (0xFC04, 'M', u'ئي'), + (0xFC05, 'M', u'بج'), + (0xFC06, 'M', u'بح'), + (0xFC07, 'M', u'بخ'), + (0xFC08, 'M', u'بم'), + (0xFC09, 'M', u'بى'), + (0xFC0A, 'M', u'بي'), + (0xFC0B, 'M', u'تج'), + (0xFC0C, 'M', u'تح'), + (0xFC0D, 'M', u'تخ'), + (0xFC0E, 'M', u'تم'), + (0xFC0F, 'M', u'تى'), + (0xFC10, 'M', u'تي'), + (0xFC11, 'M', u'ثج'), + (0xFC12, 'M', u'ثم'), + (0xFC13, 'M', u'ثى'), + (0xFC14, 'M', u'ثي'), + (0xFC15, 'M', u'جح'), + (0xFC16, 'M', u'جم'), + (0xFC17, 'M', u'حج'), + (0xFC18, 'M', u'حم'), + (0xFC19, 'M', u'خج'), + (0xFC1A, 'M', u'خح'), + (0xFC1B, 'M', u'خم'), + (0xFC1C, 'M', u'سج'), + (0xFC1D, 'M', u'سح'), + (0xFC1E, 'M', u'سخ'), + (0xFC1F, 'M', u'سم'), + (0xFC20, 'M', u'صح'), + (0xFC21, 'M', u'صم'), + (0xFC22, 'M', u'ضج'), + (0xFC23, 'M', u'ضح'), + (0xFC24, 'M', u'ضخ'), + (0xFC25, 'M', u'ضم'), + (0xFC26, 'M', u'طح'), + (0xFC27, 'M', u'طم'), + (0xFC28, 'M', u'ظم'), + (0xFC29, 'M', u'عج'), + (0xFC2A, 'M', u'عم'), + (0xFC2B, 'M', u'غج'), + (0xFC2C, 'M', u'غم'), + (0xFC2D, 'M', u'فج'), + (0xFC2E, 'M', u'فح'), + (0xFC2F, 'M', u'فخ'), + (0xFC30, 'M', u'فم'), + (0xFC31, 'M', u'فى'), + (0xFC32, 'M', u'في'), + (0xFC33, 'M', u'قح'), + (0xFC34, 'M', u'قم'), + (0xFC35, 'M', u'قى'), + (0xFC36, 'M', u'قي'), + (0xFC37, 'M', u'كا'), + (0xFC38, 'M', u'كج'), + (0xFC39, 'M', u'كح'), + (0xFC3A, 'M', u'كخ'), + (0xFC3B, 'M', u'كل'), + (0xFC3C, 'M', u'كم'), + (0xFC3D, 'M', u'كى'), + (0xFC3E, 'M', u'كي'), + ] + +def _seg_44(): + return [ + (0xFC3F, 'M', u'لج'), + (0xFC40, 'M', u'لح'), + (0xFC41, 'M', u'لخ'), + (0xFC42, 'M', u'لم'), + (0xFC43, 'M', u'لى'), + (0xFC44, 'M', u'لي'), + (0xFC45, 'M', u'مج'), + (0xFC46, 'M', u'مح'), + (0xFC47, 'M', u'مخ'), + (0xFC48, 'M', u'مم'), + (0xFC49, 'M', u'مى'), + (0xFC4A, 'M', u'مي'), + (0xFC4B, 'M', u'نج'), + (0xFC4C, 'M', u'نح'), + (0xFC4D, 'M', u'نخ'), + (0xFC4E, 'M', u'نم'), + (0xFC4F, 'M', u'نى'), + (0xFC50, 'M', u'ني'), + (0xFC51, 'M', u'هج'), + (0xFC52, 'M', u'هم'), + (0xFC53, 'M', u'هى'), + (0xFC54, 'M', u'هي'), + (0xFC55, 'M', u'يج'), + (0xFC56, 'M', u'يح'), + (0xFC57, 'M', u'يخ'), + (0xFC58, 'M', u'يم'), + (0xFC59, 'M', u'يى'), + (0xFC5A, 'M', u'يي'), + (0xFC5B, 'M', u'ذٰ'), + (0xFC5C, 'M', u'رٰ'), + (0xFC5D, 'M', u'ىٰ'), + (0xFC5E, '3', u' ٌّ'), + (0xFC5F, '3', u' ٍّ'), + (0xFC60, '3', u' َّ'), + (0xFC61, '3', u' ُّ'), + (0xFC62, '3', u' ِّ'), + (0xFC63, '3', u' ّٰ'), + (0xFC64, 'M', u'ئر'), + (0xFC65, 'M', u'ئز'), + (0xFC66, 'M', u'ئم'), + (0xFC67, 'M', u'ئن'), + (0xFC68, 'M', u'ئى'), + (0xFC69, 'M', u'ئي'), + (0xFC6A, 'M', u'بر'), + (0xFC6B, 'M', u'بز'), + (0xFC6C, 'M', u'بم'), + (0xFC6D, 'M', u'بن'), + (0xFC6E, 'M', u'بى'), + (0xFC6F, 'M', u'بي'), + (0xFC70, 'M', u'تر'), + (0xFC71, 'M', u'تز'), + (0xFC72, 'M', u'تم'), + (0xFC73, 'M', u'تن'), + (0xFC74, 'M', u'تى'), + (0xFC75, 'M', u'تي'), + (0xFC76, 'M', u'ثر'), + (0xFC77, 'M', u'ثز'), + (0xFC78, 'M', u'ثم'), + (0xFC79, 'M', u'ثن'), + (0xFC7A, 'M', u'ثى'), + (0xFC7B, 'M', u'ثي'), + (0xFC7C, 'M', u'فى'), + (0xFC7D, 'M', u'في'), + (0xFC7E, 'M', u'قى'), + (0xFC7F, 'M', u'قي'), + (0xFC80, 'M', u'كا'), + (0xFC81, 'M', u'كل'), + (0xFC82, 'M', u'كم'), + (0xFC83, 'M', u'كى'), + (0xFC84, 'M', u'كي'), + (0xFC85, 'M', u'لم'), + (0xFC86, 'M', u'لى'), + (0xFC87, 'M', u'لي'), + (0xFC88, 'M', u'ما'), + (0xFC89, 'M', u'مم'), + (0xFC8A, 'M', u'نر'), + (0xFC8B, 'M', u'نز'), + (0xFC8C, 'M', u'نم'), + (0xFC8D, 'M', u'نن'), + (0xFC8E, 'M', u'نى'), + (0xFC8F, 'M', u'ني'), + (0xFC90, 'M', u'ىٰ'), + (0xFC91, 'M', u'ير'), + (0xFC92, 'M', u'يز'), + (0xFC93, 'M', u'يم'), + (0xFC94, 'M', u'ين'), + (0xFC95, 'M', u'يى'), + (0xFC96, 'M', u'يي'), + (0xFC97, 'M', u'ئج'), + (0xFC98, 'M', u'ئح'), + (0xFC99, 'M', u'ئخ'), + (0xFC9A, 'M', u'ئم'), + (0xFC9B, 'M', u'ئه'), + (0xFC9C, 'M', u'بج'), + (0xFC9D, 'M', u'بح'), + (0xFC9E, 'M', u'بخ'), + (0xFC9F, 'M', u'بم'), + (0xFCA0, 'M', u'به'), + (0xFCA1, 'M', u'تج'), + (0xFCA2, 'M', u'تح'), + ] + +def _seg_45(): + return [ + (0xFCA3, 'M', u'تخ'), + (0xFCA4, 'M', u'تم'), + (0xFCA5, 'M', u'ته'), + (0xFCA6, 'M', u'ثم'), + (0xFCA7, 'M', u'جح'), + (0xFCA8, 'M', u'جم'), + (0xFCA9, 'M', u'حج'), + (0xFCAA, 'M', u'حم'), + (0xFCAB, 'M', u'خج'), + (0xFCAC, 'M', u'خم'), + (0xFCAD, 'M', u'سج'), + (0xFCAE, 'M', u'سح'), + (0xFCAF, 'M', u'سخ'), + (0xFCB0, 'M', u'سم'), + (0xFCB1, 'M', u'صح'), + (0xFCB2, 'M', u'صخ'), + (0xFCB3, 'M', u'صم'), + (0xFCB4, 'M', u'ضج'), + (0xFCB5, 'M', u'ضح'), + (0xFCB6, 'M', u'ضخ'), + (0xFCB7, 'M', u'ضم'), + (0xFCB8, 'M', u'طح'), + (0xFCB9, 'M', u'ظم'), + (0xFCBA, 'M', u'عج'), + (0xFCBB, 'M', u'عم'), + (0xFCBC, 'M', u'غج'), + (0xFCBD, 'M', u'غم'), + (0xFCBE, 'M', u'فج'), + (0xFCBF, 'M', u'فح'), + (0xFCC0, 'M', u'فخ'), + (0xFCC1, 'M', u'فم'), + (0xFCC2, 'M', u'قح'), + (0xFCC3, 'M', u'قم'), + (0xFCC4, 'M', u'كج'), + (0xFCC5, 'M', u'كح'), + (0xFCC6, 'M', u'كخ'), + (0xFCC7, 'M', u'كل'), + (0xFCC8, 'M', u'كم'), + (0xFCC9, 'M', u'لج'), + (0xFCCA, 'M', u'لح'), + (0xFCCB, 'M', u'لخ'), + (0xFCCC, 'M', u'لم'), + (0xFCCD, 'M', u'له'), + (0xFCCE, 'M', u'مج'), + (0xFCCF, 'M', u'مح'), + (0xFCD0, 'M', u'مخ'), + (0xFCD1, 'M', u'مم'), + (0xFCD2, 'M', u'نج'), + (0xFCD3, 'M', u'نح'), + (0xFCD4, 'M', u'نخ'), + (0xFCD5, 'M', u'نم'), + (0xFCD6, 'M', u'نه'), + (0xFCD7, 'M', u'هج'), + (0xFCD8, 'M', u'هم'), + (0xFCD9, 'M', u'هٰ'), + (0xFCDA, 'M', u'يج'), + (0xFCDB, 'M', u'يح'), + (0xFCDC, 'M', u'يخ'), + (0xFCDD, 'M', u'يم'), + (0xFCDE, 'M', u'يه'), + (0xFCDF, 'M', u'ئم'), + (0xFCE0, 'M', u'ئه'), + (0xFCE1, 'M', u'بم'), + (0xFCE2, 'M', u'به'), + (0xFCE3, 'M', u'تم'), + (0xFCE4, 'M', u'ته'), + (0xFCE5, 'M', u'ثم'), + (0xFCE6, 'M', u'ثه'), + (0xFCE7, 'M', u'سم'), + (0xFCE8, 'M', u'سه'), + (0xFCE9, 'M', u'شم'), + (0xFCEA, 'M', u'شه'), + (0xFCEB, 'M', u'كل'), + (0xFCEC, 'M', u'كم'), + (0xFCED, 'M', u'لم'), + (0xFCEE, 'M', u'نم'), + (0xFCEF, 'M', u'نه'), + (0xFCF0, 'M', u'يم'), + (0xFCF1, 'M', u'يه'), + (0xFCF2, 'M', u'ـَّ'), + (0xFCF3, 'M', u'ـُّ'), + (0xFCF4, 'M', u'ـِّ'), + (0xFCF5, 'M', u'طى'), + (0xFCF6, 'M', u'طي'), + (0xFCF7, 'M', u'عى'), + (0xFCF8, 'M', u'عي'), + (0xFCF9, 'M', u'غى'), + (0xFCFA, 'M', u'غي'), + (0xFCFB, 'M', u'سى'), + (0xFCFC, 'M', u'سي'), + (0xFCFD, 'M', u'شى'), + (0xFCFE, 'M', u'شي'), + (0xFCFF, 'M', u'حى'), + (0xFD00, 'M', u'حي'), + (0xFD01, 'M', u'جى'), + (0xFD02, 'M', u'جي'), + (0xFD03, 'M', u'خى'), + (0xFD04, 'M', u'خي'), + (0xFD05, 'M', u'صى'), + (0xFD06, 'M', u'صي'), + ] + +def _seg_46(): + return [ + (0xFD07, 'M', u'ضى'), + (0xFD08, 'M', u'ضي'), + (0xFD09, 'M', u'شج'), + (0xFD0A, 'M', u'شح'), + (0xFD0B, 'M', u'شخ'), + (0xFD0C, 'M', u'شم'), + (0xFD0D, 'M', u'شر'), + (0xFD0E, 'M', u'سر'), + (0xFD0F, 'M', u'صر'), + (0xFD10, 'M', u'ضر'), + (0xFD11, 'M', u'طى'), + (0xFD12, 'M', u'طي'), + (0xFD13, 'M', u'عى'), + (0xFD14, 'M', u'عي'), + (0xFD15, 'M', u'غى'), + (0xFD16, 'M', u'غي'), + (0xFD17, 'M', u'سى'), + (0xFD18, 'M', u'سي'), + (0xFD19, 'M', u'شى'), + (0xFD1A, 'M', u'شي'), + (0xFD1B, 'M', u'حى'), + (0xFD1C, 'M', u'حي'), + (0xFD1D, 'M', u'جى'), + (0xFD1E, 'M', u'جي'), + (0xFD1F, 'M', u'خى'), + (0xFD20, 'M', u'خي'), + (0xFD21, 'M', u'صى'), + (0xFD22, 'M', u'صي'), + (0xFD23, 'M', u'ضى'), + (0xFD24, 'M', u'ضي'), + (0xFD25, 'M', u'شج'), + (0xFD26, 'M', u'شح'), + (0xFD27, 'M', u'شخ'), + (0xFD28, 'M', u'شم'), + (0xFD29, 'M', u'شر'), + (0xFD2A, 'M', u'سر'), + (0xFD2B, 'M', u'صر'), + (0xFD2C, 'M', u'ضر'), + (0xFD2D, 'M', u'شج'), + (0xFD2E, 'M', u'شح'), + (0xFD2F, 'M', u'شخ'), + (0xFD30, 'M', u'شم'), + (0xFD31, 'M', u'سه'), + (0xFD32, 'M', u'شه'), + (0xFD33, 'M', u'طم'), + (0xFD34, 'M', u'سج'), + (0xFD35, 'M', u'سح'), + (0xFD36, 'M', u'سخ'), + (0xFD37, 'M', u'شج'), + (0xFD38, 'M', u'شح'), + (0xFD39, 'M', u'شخ'), + (0xFD3A, 'M', u'طم'), + (0xFD3B, 'M', u'ظم'), + (0xFD3C, 'M', u'اً'), + (0xFD3E, 'V'), + (0xFD40, 'X'), + (0xFD50, 'M', u'تجم'), + (0xFD51, 'M', u'تحج'), + (0xFD53, 'M', u'تحم'), + (0xFD54, 'M', u'تخم'), + (0xFD55, 'M', u'تمج'), + (0xFD56, 'M', u'تمح'), + (0xFD57, 'M', u'تمخ'), + (0xFD58, 'M', u'جمح'), + (0xFD5A, 'M', u'حمي'), + (0xFD5B, 'M', u'حمى'), + (0xFD5C, 'M', u'سحج'), + (0xFD5D, 'M', u'سجح'), + (0xFD5E, 'M', u'سجى'), + (0xFD5F, 'M', u'سمح'), + (0xFD61, 'M', u'سمج'), + (0xFD62, 'M', u'سمم'), + (0xFD64, 'M', u'صحح'), + (0xFD66, 'M', u'صمم'), + (0xFD67, 'M', u'شحم'), + (0xFD69, 'M', u'شجي'), + (0xFD6A, 'M', u'شمخ'), + (0xFD6C, 'M', u'شمم'), + (0xFD6E, 'M', u'ضحى'), + (0xFD6F, 'M', u'ضخم'), + (0xFD71, 'M', u'طمح'), + (0xFD73, 'M', u'طمم'), + (0xFD74, 'M', u'طمي'), + (0xFD75, 'M', u'عجم'), + (0xFD76, 'M', u'عمم'), + (0xFD78, 'M', u'عمى'), + (0xFD79, 'M', u'غمم'), + (0xFD7A, 'M', u'غمي'), + (0xFD7B, 'M', u'غمى'), + (0xFD7C, 'M', u'فخم'), + (0xFD7E, 'M', u'قمح'), + (0xFD7F, 'M', u'قمم'), + (0xFD80, 'M', u'لحم'), + (0xFD81, 'M', u'لحي'), + (0xFD82, 'M', u'لحى'), + (0xFD83, 'M', u'لجج'), + (0xFD85, 'M', u'لخم'), + (0xFD87, 'M', u'لمح'), + (0xFD89, 'M', u'محج'), + (0xFD8A, 'M', u'محم'), + ] + +def _seg_47(): + return [ + (0xFD8B, 'M', u'محي'), + (0xFD8C, 'M', u'مجح'), + (0xFD8D, 'M', u'مجم'), + (0xFD8E, 'M', u'مخج'), + (0xFD8F, 'M', u'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', u'مجخ'), + (0xFD93, 'M', u'همج'), + (0xFD94, 'M', u'همم'), + (0xFD95, 'M', u'نحم'), + (0xFD96, 'M', u'نحى'), + (0xFD97, 'M', u'نجم'), + (0xFD99, 'M', u'نجى'), + (0xFD9A, 'M', u'نمي'), + (0xFD9B, 'M', u'نمى'), + (0xFD9C, 'M', u'يمم'), + (0xFD9E, 'M', u'بخي'), + (0xFD9F, 'M', u'تجي'), + (0xFDA0, 'M', u'تجى'), + (0xFDA1, 'M', u'تخي'), + (0xFDA2, 'M', u'تخى'), + (0xFDA3, 'M', u'تمي'), + (0xFDA4, 'M', u'تمى'), + (0xFDA5, 'M', u'جمي'), + (0xFDA6, 'M', u'جحى'), + (0xFDA7, 'M', u'جمى'), + (0xFDA8, 'M', u'سخى'), + (0xFDA9, 'M', u'صحي'), + (0xFDAA, 'M', u'شحي'), + (0xFDAB, 'M', u'ضحي'), + (0xFDAC, 'M', u'لجي'), + (0xFDAD, 'M', u'لمي'), + (0xFDAE, 'M', u'يحي'), + (0xFDAF, 'M', u'يجي'), + (0xFDB0, 'M', u'يمي'), + (0xFDB1, 'M', u'ممي'), + (0xFDB2, 'M', u'قمي'), + (0xFDB3, 'M', u'نحي'), + (0xFDB4, 'M', u'قمح'), + (0xFDB5, 'M', u'لحم'), + (0xFDB6, 'M', u'عمي'), + (0xFDB7, 'M', u'كمي'), + (0xFDB8, 'M', u'نجح'), + (0xFDB9, 'M', u'مخي'), + (0xFDBA, 'M', u'لجم'), + (0xFDBB, 'M', u'كمم'), + (0xFDBC, 'M', u'لجم'), + (0xFDBD, 'M', u'نجح'), + (0xFDBE, 'M', u'جحي'), + (0xFDBF, 'M', u'حجي'), + (0xFDC0, 'M', u'مجي'), + (0xFDC1, 'M', u'فمي'), + (0xFDC2, 'M', u'بحي'), + (0xFDC3, 'M', u'كمم'), + (0xFDC4, 'M', u'عجم'), + (0xFDC5, 'M', u'صمم'), + (0xFDC6, 'M', u'سخي'), + (0xFDC7, 'M', u'نجي'), + (0xFDC8, 'X'), + (0xFDF0, 'M', u'صلے'), + (0xFDF1, 'M', u'قلے'), + (0xFDF2, 'M', u'الله'), + (0xFDF3, 'M', u'اكبر'), + (0xFDF4, 'M', u'محمد'), + (0xFDF5, 'M', u'صلعم'), + (0xFDF6, 'M', u'رسول'), + (0xFDF7, 'M', u'عليه'), + (0xFDF8, 'M', u'وسلم'), + (0xFDF9, 'M', u'صلى'), + (0xFDFA, '3', u'صلى الله عليه وسلم'), + (0xFDFB, '3', u'جل جلاله'), + (0xFDFC, 'M', u'ریال'), + (0xFDFD, 'V'), + (0xFDFE, 'X'), + (0xFE00, 'I'), + (0xFE10, '3', u','), + (0xFE11, 'M', u'、'), + (0xFE12, 'X'), + (0xFE13, '3', u':'), + (0xFE14, '3', u';'), + (0xFE15, '3', u'!'), + (0xFE16, '3', u'?'), + (0xFE17, 'M', u'〖'), + (0xFE18, 'M', u'〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE27, 'X'), + (0xFE31, 'M', u'—'), + (0xFE32, 'M', u'–'), + (0xFE33, '3', u'_'), + (0xFE35, '3', u'('), + (0xFE36, '3', u')'), + (0xFE37, '3', u'{'), + (0xFE38, '3', u'}'), + (0xFE39, 'M', u'〔'), + (0xFE3A, 'M', u'〕'), + (0xFE3B, 'M', u'【'), + (0xFE3C, 'M', u'】'), + (0xFE3D, 'M', u'《'), + (0xFE3E, 'M', u'》'), + ] + +def _seg_48(): + return [ + (0xFE3F, 'M', u'〈'), + (0xFE40, 'M', u'〉'), + (0xFE41, 'M', u'「'), + (0xFE42, 'M', u'」'), + (0xFE43, 'M', u'『'), + (0xFE44, 'M', u'』'), + (0xFE45, 'V'), + (0xFE47, '3', u'['), + (0xFE48, '3', u']'), + (0xFE49, '3', u' ̅'), + (0xFE4D, '3', u'_'), + (0xFE50, '3', u','), + (0xFE51, 'M', u'、'), + (0xFE52, 'X'), + (0xFE54, '3', u';'), + (0xFE55, '3', u':'), + (0xFE56, '3', u'?'), + (0xFE57, '3', u'!'), + (0xFE58, 'M', u'—'), + (0xFE59, '3', u'('), + (0xFE5A, '3', u')'), + (0xFE5B, '3', u'{'), + (0xFE5C, '3', u'}'), + (0xFE5D, 'M', u'〔'), + (0xFE5E, 'M', u'〕'), + (0xFE5F, '3', u'#'), + (0xFE60, '3', u'&'), + (0xFE61, '3', u'*'), + (0xFE62, '3', u'+'), + (0xFE63, 'M', u'-'), + (0xFE64, '3', u'<'), + (0xFE65, '3', u'>'), + (0xFE66, '3', u'='), + (0xFE67, 'X'), + (0xFE68, '3', u'\\'), + (0xFE69, '3', u'$'), + (0xFE6A, '3', u'%'), + (0xFE6B, '3', u'@'), + (0xFE6C, 'X'), + (0xFE70, '3', u' ً'), + (0xFE71, 'M', u'ـً'), + (0xFE72, '3', u' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', u' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', u' َ'), + (0xFE77, 'M', u'ـَ'), + (0xFE78, '3', u' ُ'), + (0xFE79, 'M', u'ـُ'), + (0xFE7A, '3', u' ِ'), + (0xFE7B, 'M', u'ـِ'), + (0xFE7C, '3', u' ّ'), + (0xFE7D, 'M', u'ـّ'), + (0xFE7E, '3', u' ْ'), + (0xFE7F, 'M', u'ـْ'), + (0xFE80, 'M', u'ء'), + (0xFE81, 'M', u'آ'), + (0xFE83, 'M', u'أ'), + (0xFE85, 'M', u'ؤ'), + (0xFE87, 'M', u'إ'), + (0xFE89, 'M', u'ئ'), + (0xFE8D, 'M', u'ا'), + (0xFE8F, 'M', u'ب'), + (0xFE93, 'M', u'ة'), + (0xFE95, 'M', u'ت'), + (0xFE99, 'M', u'ث'), + (0xFE9D, 'M', u'ج'), + (0xFEA1, 'M', u'ح'), + (0xFEA5, 'M', u'خ'), + (0xFEA9, 'M', u'د'), + (0xFEAB, 'M', u'ذ'), + (0xFEAD, 'M', u'ر'), + (0xFEAF, 'M', u'ز'), + (0xFEB1, 'M', u'س'), + (0xFEB5, 'M', u'ش'), + (0xFEB9, 'M', u'ص'), + (0xFEBD, 'M', u'ض'), + (0xFEC1, 'M', u'ط'), + (0xFEC5, 'M', u'ظ'), + (0xFEC9, 'M', u'ع'), + (0xFECD, 'M', u'غ'), + (0xFED1, 'M', u'ف'), + (0xFED5, 'M', u'ق'), + (0xFED9, 'M', u'ك'), + (0xFEDD, 'M', u'ل'), + (0xFEE1, 'M', u'م'), + (0xFEE5, 'M', u'ن'), + (0xFEE9, 'M', u'ه'), + (0xFEED, 'M', u'و'), + (0xFEEF, 'M', u'ى'), + (0xFEF1, 'M', u'ي'), + (0xFEF5, 'M', u'لآ'), + (0xFEF7, 'M', u'لأ'), + (0xFEF9, 'M', u'لإ'), + (0xFEFB, 'M', u'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', u'!'), + (0xFF02, '3', u'"'), + ] + +def _seg_49(): + return [ + (0xFF03, '3', u'#'), + (0xFF04, '3', u'$'), + (0xFF05, '3', u'%'), + (0xFF06, '3', u'&'), + (0xFF07, '3', u'\''), + (0xFF08, '3', u'('), + (0xFF09, '3', u')'), + (0xFF0A, '3', u'*'), + (0xFF0B, '3', u'+'), + (0xFF0C, '3', u','), + (0xFF0D, 'M', u'-'), + (0xFF0E, 'M', u'.'), + (0xFF0F, '3', u'/'), + (0xFF10, 'M', u'0'), + (0xFF11, 'M', u'1'), + (0xFF12, 'M', u'2'), + (0xFF13, 'M', u'3'), + (0xFF14, 'M', u'4'), + (0xFF15, 'M', u'5'), + (0xFF16, 'M', u'6'), + (0xFF17, 'M', u'7'), + (0xFF18, 'M', u'8'), + (0xFF19, 'M', u'9'), + (0xFF1A, '3', u':'), + (0xFF1B, '3', u';'), + (0xFF1C, '3', u'<'), + (0xFF1D, '3', u'='), + (0xFF1E, '3', u'>'), + (0xFF1F, '3', u'?'), + (0xFF20, '3', u'@'), + (0xFF21, 'M', u'a'), + (0xFF22, 'M', u'b'), + (0xFF23, 'M', u'c'), + (0xFF24, 'M', u'd'), + (0xFF25, 'M', u'e'), + (0xFF26, 'M', u'f'), + (0xFF27, 'M', u'g'), + (0xFF28, 'M', u'h'), + (0xFF29, 'M', u'i'), + (0xFF2A, 'M', u'j'), + (0xFF2B, 'M', u'k'), + (0xFF2C, 'M', u'l'), + (0xFF2D, 'M', u'm'), + (0xFF2E, 'M', u'n'), + (0xFF2F, 'M', u'o'), + (0xFF30, 'M', u'p'), + (0xFF31, 'M', u'q'), + (0xFF32, 'M', u'r'), + (0xFF33, 'M', u's'), + (0xFF34, 'M', u't'), + (0xFF35, 'M', u'u'), + (0xFF36, 'M', u'v'), + (0xFF37, 'M', u'w'), + (0xFF38, 'M', u'x'), + (0xFF39, 'M', u'y'), + (0xFF3A, 'M', u'z'), + (0xFF3B, '3', u'['), + (0xFF3C, '3', u'\\'), + (0xFF3D, '3', u']'), + (0xFF3E, '3', u'^'), + (0xFF3F, '3', u'_'), + (0xFF40, '3', u'`'), + (0xFF41, 'M', u'a'), + (0xFF42, 'M', u'b'), + (0xFF43, 'M', u'c'), + (0xFF44, 'M', u'd'), + (0xFF45, 'M', u'e'), + (0xFF46, 'M', u'f'), + (0xFF47, 'M', u'g'), + (0xFF48, 'M', u'h'), + (0xFF49, 'M', u'i'), + (0xFF4A, 'M', u'j'), + (0xFF4B, 'M', u'k'), + (0xFF4C, 'M', u'l'), + (0xFF4D, 'M', u'm'), + (0xFF4E, 'M', u'n'), + (0xFF4F, 'M', u'o'), + (0xFF50, 'M', u'p'), + (0xFF51, 'M', u'q'), + (0xFF52, 'M', u'r'), + (0xFF53, 'M', u's'), + (0xFF54, 'M', u't'), + (0xFF55, 'M', u'u'), + (0xFF56, 'M', u'v'), + (0xFF57, 'M', u'w'), + (0xFF58, 'M', u'x'), + (0xFF59, 'M', u'y'), + (0xFF5A, 'M', u'z'), + (0xFF5B, '3', u'{'), + (0xFF5C, '3', u'|'), + (0xFF5D, '3', u'}'), + (0xFF5E, '3', u'~'), + (0xFF5F, 'M', u'⦅'), + (0xFF60, 'M', u'⦆'), + (0xFF61, 'M', u'.'), + (0xFF62, 'M', u'「'), + (0xFF63, 'M', u'」'), + (0xFF64, 'M', u'、'), + (0xFF65, 'M', u'・'), + (0xFF66, 'M', u'ヲ'), + ] + +def _seg_50(): + return [ + (0xFF67, 'M', u'ァ'), + (0xFF68, 'M', u'ィ'), + (0xFF69, 'M', u'ゥ'), + (0xFF6A, 'M', u'ェ'), + (0xFF6B, 'M', u'ォ'), + (0xFF6C, 'M', u'ャ'), + (0xFF6D, 'M', u'ュ'), + (0xFF6E, 'M', u'ョ'), + (0xFF6F, 'M', u'ッ'), + (0xFF70, 'M', u'ー'), + (0xFF71, 'M', u'ア'), + (0xFF72, 'M', u'イ'), + (0xFF73, 'M', u'ウ'), + (0xFF74, 'M', u'エ'), + (0xFF75, 'M', u'オ'), + (0xFF76, 'M', u'カ'), + (0xFF77, 'M', u'キ'), + (0xFF78, 'M', u'ク'), + (0xFF79, 'M', u'ケ'), + (0xFF7A, 'M', u'コ'), + (0xFF7B, 'M', u'サ'), + (0xFF7C, 'M', u'シ'), + (0xFF7D, 'M', u'ス'), + (0xFF7E, 'M', u'セ'), + (0xFF7F, 'M', u'ソ'), + (0xFF80, 'M', u'タ'), + (0xFF81, 'M', u'チ'), + (0xFF82, 'M', u'ツ'), + (0xFF83, 'M', u'テ'), + (0xFF84, 'M', u'ト'), + (0xFF85, 'M', u'ナ'), + (0xFF86, 'M', u'ニ'), + (0xFF87, 'M', u'ヌ'), + (0xFF88, 'M', u'ネ'), + (0xFF89, 'M', u'ノ'), + (0xFF8A, 'M', u'ハ'), + (0xFF8B, 'M', u'ヒ'), + (0xFF8C, 'M', u'フ'), + (0xFF8D, 'M', u'ヘ'), + (0xFF8E, 'M', u'ホ'), + (0xFF8F, 'M', u'マ'), + (0xFF90, 'M', u'ミ'), + (0xFF91, 'M', u'ム'), + (0xFF92, 'M', u'メ'), + (0xFF93, 'M', u'モ'), + (0xFF94, 'M', u'ヤ'), + (0xFF95, 'M', u'ユ'), + (0xFF96, 'M', u'ヨ'), + (0xFF97, 'M', u'ラ'), + (0xFF98, 'M', u'リ'), + (0xFF99, 'M', u'ル'), + (0xFF9A, 'M', u'レ'), + (0xFF9B, 'M', u'ロ'), + (0xFF9C, 'M', u'ワ'), + (0xFF9D, 'M', u'ン'), + (0xFF9E, 'M', u'゙'), + (0xFF9F, 'M', u'゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', u'ᄀ'), + (0xFFA2, 'M', u'ᄁ'), + (0xFFA3, 'M', u'ᆪ'), + (0xFFA4, 'M', u'ᄂ'), + (0xFFA5, 'M', u'ᆬ'), + (0xFFA6, 'M', u'ᆭ'), + (0xFFA7, 'M', u'ᄃ'), + (0xFFA8, 'M', u'ᄄ'), + (0xFFA9, 'M', u'ᄅ'), + (0xFFAA, 'M', u'ᆰ'), + (0xFFAB, 'M', u'ᆱ'), + (0xFFAC, 'M', u'ᆲ'), + (0xFFAD, 'M', u'ᆳ'), + (0xFFAE, 'M', u'ᆴ'), + (0xFFAF, 'M', u'ᆵ'), + (0xFFB0, 'M', u'ᄚ'), + (0xFFB1, 'M', u'ᄆ'), + (0xFFB2, 'M', u'ᄇ'), + (0xFFB3, 'M', u'ᄈ'), + (0xFFB4, 'M', u'ᄡ'), + (0xFFB5, 'M', u'ᄉ'), + (0xFFB6, 'M', u'ᄊ'), + (0xFFB7, 'M', u'ᄋ'), + (0xFFB8, 'M', u'ᄌ'), + (0xFFB9, 'M', u'ᄍ'), + (0xFFBA, 'M', u'ᄎ'), + (0xFFBB, 'M', u'ᄏ'), + (0xFFBC, 'M', u'ᄐ'), + (0xFFBD, 'M', u'ᄑ'), + (0xFFBE, 'M', u'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', u'ᅡ'), + (0xFFC3, 'M', u'ᅢ'), + (0xFFC4, 'M', u'ᅣ'), + (0xFFC5, 'M', u'ᅤ'), + (0xFFC6, 'M', u'ᅥ'), + (0xFFC7, 'M', u'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', u'ᅧ'), + (0xFFCB, 'M', u'ᅨ'), + (0xFFCC, 'M', u'ᅩ'), + (0xFFCD, 'M', u'ᅪ'), + ] + +def _seg_51(): + return [ + (0xFFCE, 'M', u'ᅫ'), + (0xFFCF, 'M', u'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', u'ᅭ'), + (0xFFD3, 'M', u'ᅮ'), + (0xFFD4, 'M', u'ᅯ'), + (0xFFD5, 'M', u'ᅰ'), + (0xFFD6, 'M', u'ᅱ'), + (0xFFD7, 'M', u'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', u'ᅳ'), + (0xFFDB, 'M', u'ᅴ'), + (0xFFDC, 'M', u'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', u'¢'), + (0xFFE1, 'M', u'£'), + (0xFFE2, 'M', u'¬'), + (0xFFE3, '3', u' ̄'), + (0xFFE4, 'M', u'¦'), + (0xFFE5, 'M', u'¥'), + (0xFFE6, 'M', u'₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', u'│'), + (0xFFE9, 'M', u'←'), + (0xFFEA, 'M', u'↑'), + (0xFFEB, 'M', u'→'), + (0xFFEC, 'M', u'↓'), + (0xFFED, 'M', u'■'), + (0xFFEE, 'M', u'○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018B, 'X'), + (0x10190, 'V'), + (0x1019C, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x10300, 'V'), + (0x1031F, 'X'), + (0x10320, 'V'), + (0x10324, 'X'), + (0x10330, 'V'), + (0x1034B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', u'𐐨'), + (0x10401, 'M', u'𐐩'), + (0x10402, 'M', u'𐐪'), + (0x10403, 'M', u'𐐫'), + (0x10404, 'M', u'𐐬'), + (0x10405, 'M', u'𐐭'), + (0x10406, 'M', u'𐐮'), + (0x10407, 'M', u'𐐯'), + (0x10408, 'M', u'𐐰'), + (0x10409, 'M', u'𐐱'), + (0x1040A, 'M', u'𐐲'), + (0x1040B, 'M', u'𐐳'), + (0x1040C, 'M', u'𐐴'), + (0x1040D, 'M', u'𐐵'), + (0x1040E, 'M', u'𐐶'), + (0x1040F, 'M', u'𐐷'), + (0x10410, 'M', u'𐐸'), + (0x10411, 'M', u'𐐹'), + (0x10412, 'M', u'𐐺'), + (0x10413, 'M', u'𐐻'), + (0x10414, 'M', u'𐐼'), + (0x10415, 'M', u'𐐽'), + (0x10416, 'M', u'𐐾'), + (0x10417, 'M', u'𐐿'), + (0x10418, 'M', u'𐑀'), + (0x10419, 'M', u'𐑁'), + (0x1041A, 'M', u'𐑂'), + (0x1041B, 'M', u'𐑃'), + (0x1041C, 'M', u'𐑄'), + (0x1041D, 'M', u'𐑅'), + ] + +def _seg_52(): + return [ + (0x1041E, 'M', u'𐑆'), + (0x1041F, 'M', u'𐑇'), + (0x10420, 'M', u'𐑈'), + (0x10421, 'M', u'𐑉'), + (0x10422, 'M', u'𐑊'), + (0x10423, 'M', u'𐑋'), + (0x10424, 'M', u'𐑌'), + (0x10425, 'M', u'𐑍'), + (0x10426, 'M', u'𐑎'), + (0x10427, 'M', u'𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x10860, 'X'), + (0x10900, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BE, 'V'), + (0x109C0, 'X'), + (0x10A00, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A34, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A48, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10A80, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B80, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11070, 'X'), + (0x11080, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C2, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11144, 'X'), + (0x11180, 'V'), + (0x111C9, 'X'), + (0x111D0, 'V'), + (0x111DA, 'X'), + (0x11680, 'V'), + (0x116B8, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x12000, 'V'), + (0x1236F, 'X'), + (0x12400, 'V'), + (0x12463, 'X'), + (0x12470, 'V'), + (0x12474, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + ] + +def _seg_53(): + return [ + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16F00, 'V'), + (0x16F45, 'X'), + (0x16F50, 'V'), + (0x16F7F, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x1B000, 'V'), + (0x1B002, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', u'𝅗𝅥'), + (0x1D15F, 'M', u'𝅘𝅥'), + (0x1D160, 'M', u'𝅘𝅥𝅮'), + (0x1D161, 'M', u'𝅘𝅥𝅯'), + (0x1D162, 'M', u'𝅘𝅥𝅰'), + (0x1D163, 'M', u'𝅘𝅥𝅱'), + (0x1D164, 'M', u'𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', u'𝆹𝅥'), + (0x1D1BC, 'M', u'𝆺𝅥'), + (0x1D1BD, 'M', u'𝆹𝅥𝅮'), + (0x1D1BE, 'M', u'𝆺𝅥𝅮'), + (0x1D1BF, 'M', u'𝆹𝅥𝅯'), + (0x1D1C0, 'M', u'𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1DE, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D372, 'X'), + (0x1D400, 'M', u'a'), + (0x1D401, 'M', u'b'), + (0x1D402, 'M', u'c'), + (0x1D403, 'M', u'd'), + (0x1D404, 'M', u'e'), + (0x1D405, 'M', u'f'), + (0x1D406, 'M', u'g'), + (0x1D407, 'M', u'h'), + (0x1D408, 'M', u'i'), + (0x1D409, 'M', u'j'), + (0x1D40A, 'M', u'k'), + (0x1D40B, 'M', u'l'), + (0x1D40C, 'M', u'm'), + (0x1D40D, 'M', u'n'), + (0x1D40E, 'M', u'o'), + (0x1D40F, 'M', u'p'), + (0x1D410, 'M', u'q'), + (0x1D411, 'M', u'r'), + (0x1D412, 'M', u's'), + (0x1D413, 'M', u't'), + (0x1D414, 'M', u'u'), + (0x1D415, 'M', u'v'), + (0x1D416, 'M', u'w'), + (0x1D417, 'M', u'x'), + (0x1D418, 'M', u'y'), + (0x1D419, 'M', u'z'), + (0x1D41A, 'M', u'a'), + (0x1D41B, 'M', u'b'), + (0x1D41C, 'M', u'c'), + (0x1D41D, 'M', u'd'), + (0x1D41E, 'M', u'e'), + (0x1D41F, 'M', u'f'), + (0x1D420, 'M', u'g'), + (0x1D421, 'M', u'h'), + (0x1D422, 'M', u'i'), + (0x1D423, 'M', u'j'), + (0x1D424, 'M', u'k'), + (0x1D425, 'M', u'l'), + (0x1D426, 'M', u'm'), + (0x1D427, 'M', u'n'), + (0x1D428, 'M', u'o'), + (0x1D429, 'M', u'p'), + (0x1D42A, 'M', u'q'), + (0x1D42B, 'M', u'r'), + (0x1D42C, 'M', u's'), + (0x1D42D, 'M', u't'), + (0x1D42E, 'M', u'u'), + (0x1D42F, 'M', u'v'), + (0x1D430, 'M', u'w'), + (0x1D431, 'M', u'x'), + (0x1D432, 'M', u'y'), + (0x1D433, 'M', u'z'), + (0x1D434, 'M', u'a'), + (0x1D435, 'M', u'b'), + (0x1D436, 'M', u'c'), + (0x1D437, 'M', u'd'), + (0x1D438, 'M', u'e'), + (0x1D439, 'M', u'f'), + (0x1D43A, 'M', u'g'), + (0x1D43B, 'M', u'h'), + (0x1D43C, 'M', u'i'), + ] + +def _seg_54(): + return [ + (0x1D43D, 'M', u'j'), + (0x1D43E, 'M', u'k'), + (0x1D43F, 'M', u'l'), + (0x1D440, 'M', u'm'), + (0x1D441, 'M', u'n'), + (0x1D442, 'M', u'o'), + (0x1D443, 'M', u'p'), + (0x1D444, 'M', u'q'), + (0x1D445, 'M', u'r'), + (0x1D446, 'M', u's'), + (0x1D447, 'M', u't'), + (0x1D448, 'M', u'u'), + (0x1D449, 'M', u'v'), + (0x1D44A, 'M', u'w'), + (0x1D44B, 'M', u'x'), + (0x1D44C, 'M', u'y'), + (0x1D44D, 'M', u'z'), + (0x1D44E, 'M', u'a'), + (0x1D44F, 'M', u'b'), + (0x1D450, 'M', u'c'), + (0x1D451, 'M', u'd'), + (0x1D452, 'M', u'e'), + (0x1D453, 'M', u'f'), + (0x1D454, 'M', u'g'), + (0x1D455, 'X'), + (0x1D456, 'M', u'i'), + (0x1D457, 'M', u'j'), + (0x1D458, 'M', u'k'), + (0x1D459, 'M', u'l'), + (0x1D45A, 'M', u'm'), + (0x1D45B, 'M', u'n'), + (0x1D45C, 'M', u'o'), + (0x1D45D, 'M', u'p'), + (0x1D45E, 'M', u'q'), + (0x1D45F, 'M', u'r'), + (0x1D460, 'M', u's'), + (0x1D461, 'M', u't'), + (0x1D462, 'M', u'u'), + (0x1D463, 'M', u'v'), + (0x1D464, 'M', u'w'), + (0x1D465, 'M', u'x'), + (0x1D466, 'M', u'y'), + (0x1D467, 'M', u'z'), + (0x1D468, 'M', u'a'), + (0x1D469, 'M', u'b'), + (0x1D46A, 'M', u'c'), + (0x1D46B, 'M', u'd'), + (0x1D46C, 'M', u'e'), + (0x1D46D, 'M', u'f'), + (0x1D46E, 'M', u'g'), + (0x1D46F, 'M', u'h'), + (0x1D470, 'M', u'i'), + (0x1D471, 'M', u'j'), + (0x1D472, 'M', u'k'), + (0x1D473, 'M', u'l'), + (0x1D474, 'M', u'm'), + (0x1D475, 'M', u'n'), + (0x1D476, 'M', u'o'), + (0x1D477, 'M', u'p'), + (0x1D478, 'M', u'q'), + (0x1D479, 'M', u'r'), + (0x1D47A, 'M', u's'), + (0x1D47B, 'M', u't'), + (0x1D47C, 'M', u'u'), + (0x1D47D, 'M', u'v'), + (0x1D47E, 'M', u'w'), + (0x1D47F, 'M', u'x'), + (0x1D480, 'M', u'y'), + (0x1D481, 'M', u'z'), + (0x1D482, 'M', u'a'), + (0x1D483, 'M', u'b'), + (0x1D484, 'M', u'c'), + (0x1D485, 'M', u'd'), + (0x1D486, 'M', u'e'), + (0x1D487, 'M', u'f'), + (0x1D488, 'M', u'g'), + (0x1D489, 'M', u'h'), + (0x1D48A, 'M', u'i'), + (0x1D48B, 'M', u'j'), + (0x1D48C, 'M', u'k'), + (0x1D48D, 'M', u'l'), + (0x1D48E, 'M', u'm'), + (0x1D48F, 'M', u'n'), + (0x1D490, 'M', u'o'), + (0x1D491, 'M', u'p'), + (0x1D492, 'M', u'q'), + (0x1D493, 'M', u'r'), + (0x1D494, 'M', u's'), + (0x1D495, 'M', u't'), + (0x1D496, 'M', u'u'), + (0x1D497, 'M', u'v'), + (0x1D498, 'M', u'w'), + (0x1D499, 'M', u'x'), + (0x1D49A, 'M', u'y'), + (0x1D49B, 'M', u'z'), + (0x1D49C, 'M', u'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', u'c'), + (0x1D49F, 'M', u'd'), + (0x1D4A0, 'X'), + ] + +def _seg_55(): + return [ + (0x1D4A2, 'M', u'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', u'j'), + (0x1D4A6, 'M', u'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', u'n'), + (0x1D4AA, 'M', u'o'), + (0x1D4AB, 'M', u'p'), + (0x1D4AC, 'M', u'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', u's'), + (0x1D4AF, 'M', u't'), + (0x1D4B0, 'M', u'u'), + (0x1D4B1, 'M', u'v'), + (0x1D4B2, 'M', u'w'), + (0x1D4B3, 'M', u'x'), + (0x1D4B4, 'M', u'y'), + (0x1D4B5, 'M', u'z'), + (0x1D4B6, 'M', u'a'), + (0x1D4B7, 'M', u'b'), + (0x1D4B8, 'M', u'c'), + (0x1D4B9, 'M', u'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', u'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', u'h'), + (0x1D4BE, 'M', u'i'), + (0x1D4BF, 'M', u'j'), + (0x1D4C0, 'M', u'k'), + (0x1D4C1, 'M', u'l'), + (0x1D4C2, 'M', u'm'), + (0x1D4C3, 'M', u'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', u'p'), + (0x1D4C6, 'M', u'q'), + (0x1D4C7, 'M', u'r'), + (0x1D4C8, 'M', u's'), + (0x1D4C9, 'M', u't'), + (0x1D4CA, 'M', u'u'), + (0x1D4CB, 'M', u'v'), + (0x1D4CC, 'M', u'w'), + (0x1D4CD, 'M', u'x'), + (0x1D4CE, 'M', u'y'), + (0x1D4CF, 'M', u'z'), + (0x1D4D0, 'M', u'a'), + (0x1D4D1, 'M', u'b'), + (0x1D4D2, 'M', u'c'), + (0x1D4D3, 'M', u'd'), + (0x1D4D4, 'M', u'e'), + (0x1D4D5, 'M', u'f'), + (0x1D4D6, 'M', u'g'), + (0x1D4D7, 'M', u'h'), + (0x1D4D8, 'M', u'i'), + (0x1D4D9, 'M', u'j'), + (0x1D4DA, 'M', u'k'), + (0x1D4DB, 'M', u'l'), + (0x1D4DC, 'M', u'm'), + (0x1D4DD, 'M', u'n'), + (0x1D4DE, 'M', u'o'), + (0x1D4DF, 'M', u'p'), + (0x1D4E0, 'M', u'q'), + (0x1D4E1, 'M', u'r'), + (0x1D4E2, 'M', u's'), + (0x1D4E3, 'M', u't'), + (0x1D4E4, 'M', u'u'), + (0x1D4E5, 'M', u'v'), + (0x1D4E6, 'M', u'w'), + (0x1D4E7, 'M', u'x'), + (0x1D4E8, 'M', u'y'), + (0x1D4E9, 'M', u'z'), + (0x1D4EA, 'M', u'a'), + (0x1D4EB, 'M', u'b'), + (0x1D4EC, 'M', u'c'), + (0x1D4ED, 'M', u'd'), + (0x1D4EE, 'M', u'e'), + (0x1D4EF, 'M', u'f'), + (0x1D4F0, 'M', u'g'), + (0x1D4F1, 'M', u'h'), + (0x1D4F2, 'M', u'i'), + (0x1D4F3, 'M', u'j'), + (0x1D4F4, 'M', u'k'), + (0x1D4F5, 'M', u'l'), + (0x1D4F6, 'M', u'm'), + (0x1D4F7, 'M', u'n'), + (0x1D4F8, 'M', u'o'), + (0x1D4F9, 'M', u'p'), + (0x1D4FA, 'M', u'q'), + (0x1D4FB, 'M', u'r'), + (0x1D4FC, 'M', u's'), + (0x1D4FD, 'M', u't'), + (0x1D4FE, 'M', u'u'), + (0x1D4FF, 'M', u'v'), + (0x1D500, 'M', u'w'), + (0x1D501, 'M', u'x'), + (0x1D502, 'M', u'y'), + (0x1D503, 'M', u'z'), + (0x1D504, 'M', u'a'), + (0x1D505, 'M', u'b'), + (0x1D506, 'X'), + (0x1D507, 'M', u'd'), + ] + +def _seg_56(): + return [ + (0x1D508, 'M', u'e'), + (0x1D509, 'M', u'f'), + (0x1D50A, 'M', u'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', u'j'), + (0x1D50E, 'M', u'k'), + (0x1D50F, 'M', u'l'), + (0x1D510, 'M', u'm'), + (0x1D511, 'M', u'n'), + (0x1D512, 'M', u'o'), + (0x1D513, 'M', u'p'), + (0x1D514, 'M', u'q'), + (0x1D515, 'X'), + (0x1D516, 'M', u's'), + (0x1D517, 'M', u't'), + (0x1D518, 'M', u'u'), + (0x1D519, 'M', u'v'), + (0x1D51A, 'M', u'w'), + (0x1D51B, 'M', u'x'), + (0x1D51C, 'M', u'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', u'a'), + (0x1D51F, 'M', u'b'), + (0x1D520, 'M', u'c'), + (0x1D521, 'M', u'd'), + (0x1D522, 'M', u'e'), + (0x1D523, 'M', u'f'), + (0x1D524, 'M', u'g'), + (0x1D525, 'M', u'h'), + (0x1D526, 'M', u'i'), + (0x1D527, 'M', u'j'), + (0x1D528, 'M', u'k'), + (0x1D529, 'M', u'l'), + (0x1D52A, 'M', u'm'), + (0x1D52B, 'M', u'n'), + (0x1D52C, 'M', u'o'), + (0x1D52D, 'M', u'p'), + (0x1D52E, 'M', u'q'), + (0x1D52F, 'M', u'r'), + (0x1D530, 'M', u's'), + (0x1D531, 'M', u't'), + (0x1D532, 'M', u'u'), + (0x1D533, 'M', u'v'), + (0x1D534, 'M', u'w'), + (0x1D535, 'M', u'x'), + (0x1D536, 'M', u'y'), + (0x1D537, 'M', u'z'), + (0x1D538, 'M', u'a'), + (0x1D539, 'M', u'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', u'd'), + (0x1D53C, 'M', u'e'), + (0x1D53D, 'M', u'f'), + (0x1D53E, 'M', u'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', u'i'), + (0x1D541, 'M', u'j'), + (0x1D542, 'M', u'k'), + (0x1D543, 'M', u'l'), + (0x1D544, 'M', u'm'), + (0x1D545, 'X'), + (0x1D546, 'M', u'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', u's'), + (0x1D54B, 'M', u't'), + (0x1D54C, 'M', u'u'), + (0x1D54D, 'M', u'v'), + (0x1D54E, 'M', u'w'), + (0x1D54F, 'M', u'x'), + (0x1D550, 'M', u'y'), + (0x1D551, 'X'), + (0x1D552, 'M', u'a'), + (0x1D553, 'M', u'b'), + (0x1D554, 'M', u'c'), + (0x1D555, 'M', u'd'), + (0x1D556, 'M', u'e'), + (0x1D557, 'M', u'f'), + (0x1D558, 'M', u'g'), + (0x1D559, 'M', u'h'), + (0x1D55A, 'M', u'i'), + (0x1D55B, 'M', u'j'), + (0x1D55C, 'M', u'k'), + (0x1D55D, 'M', u'l'), + (0x1D55E, 'M', u'm'), + (0x1D55F, 'M', u'n'), + (0x1D560, 'M', u'o'), + (0x1D561, 'M', u'p'), + (0x1D562, 'M', u'q'), + (0x1D563, 'M', u'r'), + (0x1D564, 'M', u's'), + (0x1D565, 'M', u't'), + (0x1D566, 'M', u'u'), + (0x1D567, 'M', u'v'), + (0x1D568, 'M', u'w'), + (0x1D569, 'M', u'x'), + (0x1D56A, 'M', u'y'), + (0x1D56B, 'M', u'z'), + (0x1D56C, 'M', u'a'), + (0x1D56D, 'M', u'b'), + (0x1D56E, 'M', u'c'), + ] + +def _seg_57(): + return [ + (0x1D56F, 'M', u'd'), + (0x1D570, 'M', u'e'), + (0x1D571, 'M', u'f'), + (0x1D572, 'M', u'g'), + (0x1D573, 'M', u'h'), + (0x1D574, 'M', u'i'), + (0x1D575, 'M', u'j'), + (0x1D576, 'M', u'k'), + (0x1D577, 'M', u'l'), + (0x1D578, 'M', u'm'), + (0x1D579, 'M', u'n'), + (0x1D57A, 'M', u'o'), + (0x1D57B, 'M', u'p'), + (0x1D57C, 'M', u'q'), + (0x1D57D, 'M', u'r'), + (0x1D57E, 'M', u's'), + (0x1D57F, 'M', u't'), + (0x1D580, 'M', u'u'), + (0x1D581, 'M', u'v'), + (0x1D582, 'M', u'w'), + (0x1D583, 'M', u'x'), + (0x1D584, 'M', u'y'), + (0x1D585, 'M', u'z'), + (0x1D586, 'M', u'a'), + (0x1D587, 'M', u'b'), + (0x1D588, 'M', u'c'), + (0x1D589, 'M', u'd'), + (0x1D58A, 'M', u'e'), + (0x1D58B, 'M', u'f'), + (0x1D58C, 'M', u'g'), + (0x1D58D, 'M', u'h'), + (0x1D58E, 'M', u'i'), + (0x1D58F, 'M', u'j'), + (0x1D590, 'M', u'k'), + (0x1D591, 'M', u'l'), + (0x1D592, 'M', u'm'), + (0x1D593, 'M', u'n'), + (0x1D594, 'M', u'o'), + (0x1D595, 'M', u'p'), + (0x1D596, 'M', u'q'), + (0x1D597, 'M', u'r'), + (0x1D598, 'M', u's'), + (0x1D599, 'M', u't'), + (0x1D59A, 'M', u'u'), + (0x1D59B, 'M', u'v'), + (0x1D59C, 'M', u'w'), + (0x1D59D, 'M', u'x'), + (0x1D59E, 'M', u'y'), + (0x1D59F, 'M', u'z'), + (0x1D5A0, 'M', u'a'), + (0x1D5A1, 'M', u'b'), + (0x1D5A2, 'M', u'c'), + (0x1D5A3, 'M', u'd'), + (0x1D5A4, 'M', u'e'), + (0x1D5A5, 'M', u'f'), + (0x1D5A6, 'M', u'g'), + (0x1D5A7, 'M', u'h'), + (0x1D5A8, 'M', u'i'), + (0x1D5A9, 'M', u'j'), + (0x1D5AA, 'M', u'k'), + (0x1D5AB, 'M', u'l'), + (0x1D5AC, 'M', u'm'), + (0x1D5AD, 'M', u'n'), + (0x1D5AE, 'M', u'o'), + (0x1D5AF, 'M', u'p'), + (0x1D5B0, 'M', u'q'), + (0x1D5B1, 'M', u'r'), + (0x1D5B2, 'M', u's'), + (0x1D5B3, 'M', u't'), + (0x1D5B4, 'M', u'u'), + (0x1D5B5, 'M', u'v'), + (0x1D5B6, 'M', u'w'), + (0x1D5B7, 'M', u'x'), + (0x1D5B8, 'M', u'y'), + (0x1D5B9, 'M', u'z'), + (0x1D5BA, 'M', u'a'), + (0x1D5BB, 'M', u'b'), + (0x1D5BC, 'M', u'c'), + (0x1D5BD, 'M', u'd'), + (0x1D5BE, 'M', u'e'), + (0x1D5BF, 'M', u'f'), + (0x1D5C0, 'M', u'g'), + (0x1D5C1, 'M', u'h'), + (0x1D5C2, 'M', u'i'), + (0x1D5C3, 'M', u'j'), + (0x1D5C4, 'M', u'k'), + (0x1D5C5, 'M', u'l'), + (0x1D5C6, 'M', u'm'), + (0x1D5C7, 'M', u'n'), + (0x1D5C8, 'M', u'o'), + (0x1D5C9, 'M', u'p'), + (0x1D5CA, 'M', u'q'), + (0x1D5CB, 'M', u'r'), + (0x1D5CC, 'M', u's'), + (0x1D5CD, 'M', u't'), + (0x1D5CE, 'M', u'u'), + (0x1D5CF, 'M', u'v'), + (0x1D5D0, 'M', u'w'), + (0x1D5D1, 'M', u'x'), + (0x1D5D2, 'M', u'y'), + ] + +def _seg_58(): + return [ + (0x1D5D3, 'M', u'z'), + (0x1D5D4, 'M', u'a'), + (0x1D5D5, 'M', u'b'), + (0x1D5D6, 'M', u'c'), + (0x1D5D7, 'M', u'd'), + (0x1D5D8, 'M', u'e'), + (0x1D5D9, 'M', u'f'), + (0x1D5DA, 'M', u'g'), + (0x1D5DB, 'M', u'h'), + (0x1D5DC, 'M', u'i'), + (0x1D5DD, 'M', u'j'), + (0x1D5DE, 'M', u'k'), + (0x1D5DF, 'M', u'l'), + (0x1D5E0, 'M', u'm'), + (0x1D5E1, 'M', u'n'), + (0x1D5E2, 'M', u'o'), + (0x1D5E3, 'M', u'p'), + (0x1D5E4, 'M', u'q'), + (0x1D5E5, 'M', u'r'), + (0x1D5E6, 'M', u's'), + (0x1D5E7, 'M', u't'), + (0x1D5E8, 'M', u'u'), + (0x1D5E9, 'M', u'v'), + (0x1D5EA, 'M', u'w'), + (0x1D5EB, 'M', u'x'), + (0x1D5EC, 'M', u'y'), + (0x1D5ED, 'M', u'z'), + (0x1D5EE, 'M', u'a'), + (0x1D5EF, 'M', u'b'), + (0x1D5F0, 'M', u'c'), + (0x1D5F1, 'M', u'd'), + (0x1D5F2, 'M', u'e'), + (0x1D5F3, 'M', u'f'), + (0x1D5F4, 'M', u'g'), + (0x1D5F5, 'M', u'h'), + (0x1D5F6, 'M', u'i'), + (0x1D5F7, 'M', u'j'), + (0x1D5F8, 'M', u'k'), + (0x1D5F9, 'M', u'l'), + (0x1D5FA, 'M', u'm'), + (0x1D5FB, 'M', u'n'), + (0x1D5FC, 'M', u'o'), + (0x1D5FD, 'M', u'p'), + (0x1D5FE, 'M', u'q'), + (0x1D5FF, 'M', u'r'), + (0x1D600, 'M', u's'), + (0x1D601, 'M', u't'), + (0x1D602, 'M', u'u'), + (0x1D603, 'M', u'v'), + (0x1D604, 'M', u'w'), + (0x1D605, 'M', u'x'), + (0x1D606, 'M', u'y'), + (0x1D607, 'M', u'z'), + (0x1D608, 'M', u'a'), + (0x1D609, 'M', u'b'), + (0x1D60A, 'M', u'c'), + (0x1D60B, 'M', u'd'), + (0x1D60C, 'M', u'e'), + (0x1D60D, 'M', u'f'), + (0x1D60E, 'M', u'g'), + (0x1D60F, 'M', u'h'), + (0x1D610, 'M', u'i'), + (0x1D611, 'M', u'j'), + (0x1D612, 'M', u'k'), + (0x1D613, 'M', u'l'), + (0x1D614, 'M', u'm'), + (0x1D615, 'M', u'n'), + (0x1D616, 'M', u'o'), + (0x1D617, 'M', u'p'), + (0x1D618, 'M', u'q'), + (0x1D619, 'M', u'r'), + (0x1D61A, 'M', u's'), + (0x1D61B, 'M', u't'), + (0x1D61C, 'M', u'u'), + (0x1D61D, 'M', u'v'), + (0x1D61E, 'M', u'w'), + (0x1D61F, 'M', u'x'), + (0x1D620, 'M', u'y'), + (0x1D621, 'M', u'z'), + (0x1D622, 'M', u'a'), + (0x1D623, 'M', u'b'), + (0x1D624, 'M', u'c'), + (0x1D625, 'M', u'd'), + (0x1D626, 'M', u'e'), + (0x1D627, 'M', u'f'), + (0x1D628, 'M', u'g'), + (0x1D629, 'M', u'h'), + (0x1D62A, 'M', u'i'), + (0x1D62B, 'M', u'j'), + (0x1D62C, 'M', u'k'), + (0x1D62D, 'M', u'l'), + (0x1D62E, 'M', u'm'), + (0x1D62F, 'M', u'n'), + (0x1D630, 'M', u'o'), + (0x1D631, 'M', u'p'), + (0x1D632, 'M', u'q'), + (0x1D633, 'M', u'r'), + (0x1D634, 'M', u's'), + (0x1D635, 'M', u't'), + (0x1D636, 'M', u'u'), + ] + +def _seg_59(): + return [ + (0x1D637, 'M', u'v'), + (0x1D638, 'M', u'w'), + (0x1D639, 'M', u'x'), + (0x1D63A, 'M', u'y'), + (0x1D63B, 'M', u'z'), + (0x1D63C, 'M', u'a'), + (0x1D63D, 'M', u'b'), + (0x1D63E, 'M', u'c'), + (0x1D63F, 'M', u'd'), + (0x1D640, 'M', u'e'), + (0x1D641, 'M', u'f'), + (0x1D642, 'M', u'g'), + (0x1D643, 'M', u'h'), + (0x1D644, 'M', u'i'), + (0x1D645, 'M', u'j'), + (0x1D646, 'M', u'k'), + (0x1D647, 'M', u'l'), + (0x1D648, 'M', u'm'), + (0x1D649, 'M', u'n'), + (0x1D64A, 'M', u'o'), + (0x1D64B, 'M', u'p'), + (0x1D64C, 'M', u'q'), + (0x1D64D, 'M', u'r'), + (0x1D64E, 'M', u's'), + (0x1D64F, 'M', u't'), + (0x1D650, 'M', u'u'), + (0x1D651, 'M', u'v'), + (0x1D652, 'M', u'w'), + (0x1D653, 'M', u'x'), + (0x1D654, 'M', u'y'), + (0x1D655, 'M', u'z'), + (0x1D656, 'M', u'a'), + (0x1D657, 'M', u'b'), + (0x1D658, 'M', u'c'), + (0x1D659, 'M', u'd'), + (0x1D65A, 'M', u'e'), + (0x1D65B, 'M', u'f'), + (0x1D65C, 'M', u'g'), + (0x1D65D, 'M', u'h'), + (0x1D65E, 'M', u'i'), + (0x1D65F, 'M', u'j'), + (0x1D660, 'M', u'k'), + (0x1D661, 'M', u'l'), + (0x1D662, 'M', u'm'), + (0x1D663, 'M', u'n'), + (0x1D664, 'M', u'o'), + (0x1D665, 'M', u'p'), + (0x1D666, 'M', u'q'), + (0x1D667, 'M', u'r'), + (0x1D668, 'M', u's'), + (0x1D669, 'M', u't'), + (0x1D66A, 'M', u'u'), + (0x1D66B, 'M', u'v'), + (0x1D66C, 'M', u'w'), + (0x1D66D, 'M', u'x'), + (0x1D66E, 'M', u'y'), + (0x1D66F, 'M', u'z'), + (0x1D670, 'M', u'a'), + (0x1D671, 'M', u'b'), + (0x1D672, 'M', u'c'), + (0x1D673, 'M', u'd'), + (0x1D674, 'M', u'e'), + (0x1D675, 'M', u'f'), + (0x1D676, 'M', u'g'), + (0x1D677, 'M', u'h'), + (0x1D678, 'M', u'i'), + (0x1D679, 'M', u'j'), + (0x1D67A, 'M', u'k'), + (0x1D67B, 'M', u'l'), + (0x1D67C, 'M', u'm'), + (0x1D67D, 'M', u'n'), + (0x1D67E, 'M', u'o'), + (0x1D67F, 'M', u'p'), + (0x1D680, 'M', u'q'), + (0x1D681, 'M', u'r'), + (0x1D682, 'M', u's'), + (0x1D683, 'M', u't'), + (0x1D684, 'M', u'u'), + (0x1D685, 'M', u'v'), + (0x1D686, 'M', u'w'), + (0x1D687, 'M', u'x'), + (0x1D688, 'M', u'y'), + (0x1D689, 'M', u'z'), + (0x1D68A, 'M', u'a'), + (0x1D68B, 'M', u'b'), + (0x1D68C, 'M', u'c'), + (0x1D68D, 'M', u'd'), + (0x1D68E, 'M', u'e'), + (0x1D68F, 'M', u'f'), + (0x1D690, 'M', u'g'), + (0x1D691, 'M', u'h'), + (0x1D692, 'M', u'i'), + (0x1D693, 'M', u'j'), + (0x1D694, 'M', u'k'), + (0x1D695, 'M', u'l'), + (0x1D696, 'M', u'm'), + (0x1D697, 'M', u'n'), + (0x1D698, 'M', u'o'), + (0x1D699, 'M', u'p'), + (0x1D69A, 'M', u'q'), + ] + +def _seg_60(): + return [ + (0x1D69B, 'M', u'r'), + (0x1D69C, 'M', u's'), + (0x1D69D, 'M', u't'), + (0x1D69E, 'M', u'u'), + (0x1D69F, 'M', u'v'), + (0x1D6A0, 'M', u'w'), + (0x1D6A1, 'M', u'x'), + (0x1D6A2, 'M', u'y'), + (0x1D6A3, 'M', u'z'), + (0x1D6A4, 'M', u'ı'), + (0x1D6A5, 'M', u'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', u'α'), + (0x1D6A9, 'M', u'β'), + (0x1D6AA, 'M', u'γ'), + (0x1D6AB, 'M', u'δ'), + (0x1D6AC, 'M', u'ε'), + (0x1D6AD, 'M', u'ζ'), + (0x1D6AE, 'M', u'η'), + (0x1D6AF, 'M', u'θ'), + (0x1D6B0, 'M', u'ι'), + (0x1D6B1, 'M', u'κ'), + (0x1D6B2, 'M', u'λ'), + (0x1D6B3, 'M', u'μ'), + (0x1D6B4, 'M', u'ν'), + (0x1D6B5, 'M', u'ξ'), + (0x1D6B6, 'M', u'ο'), + (0x1D6B7, 'M', u'π'), + (0x1D6B8, 'M', u'ρ'), + (0x1D6B9, 'M', u'θ'), + (0x1D6BA, 'M', u'σ'), + (0x1D6BB, 'M', u'τ'), + (0x1D6BC, 'M', u'υ'), + (0x1D6BD, 'M', u'φ'), + (0x1D6BE, 'M', u'χ'), + (0x1D6BF, 'M', u'ψ'), + (0x1D6C0, 'M', u'ω'), + (0x1D6C1, 'M', u'∇'), + (0x1D6C2, 'M', u'α'), + (0x1D6C3, 'M', u'β'), + (0x1D6C4, 'M', u'γ'), + (0x1D6C5, 'M', u'δ'), + (0x1D6C6, 'M', u'ε'), + (0x1D6C7, 'M', u'ζ'), + (0x1D6C8, 'M', u'η'), + (0x1D6C9, 'M', u'θ'), + (0x1D6CA, 'M', u'ι'), + (0x1D6CB, 'M', u'κ'), + (0x1D6CC, 'M', u'λ'), + (0x1D6CD, 'M', u'μ'), + (0x1D6CE, 'M', u'ν'), + (0x1D6CF, 'M', u'ξ'), + (0x1D6D0, 'M', u'ο'), + (0x1D6D1, 'M', u'π'), + (0x1D6D2, 'M', u'ρ'), + (0x1D6D3, 'M', u'σ'), + (0x1D6D5, 'M', u'τ'), + (0x1D6D6, 'M', u'υ'), + (0x1D6D7, 'M', u'φ'), + (0x1D6D8, 'M', u'χ'), + (0x1D6D9, 'M', u'ψ'), + (0x1D6DA, 'M', u'ω'), + (0x1D6DB, 'M', u'∂'), + (0x1D6DC, 'M', u'ε'), + (0x1D6DD, 'M', u'θ'), + (0x1D6DE, 'M', u'κ'), + (0x1D6DF, 'M', u'φ'), + (0x1D6E0, 'M', u'ρ'), + (0x1D6E1, 'M', u'π'), + (0x1D6E2, 'M', u'α'), + (0x1D6E3, 'M', u'β'), + (0x1D6E4, 'M', u'γ'), + (0x1D6E5, 'M', u'δ'), + (0x1D6E6, 'M', u'ε'), + (0x1D6E7, 'M', u'ζ'), + (0x1D6E8, 'M', u'η'), + (0x1D6E9, 'M', u'θ'), + (0x1D6EA, 'M', u'ι'), + (0x1D6EB, 'M', u'κ'), + (0x1D6EC, 'M', u'λ'), + (0x1D6ED, 'M', u'μ'), + (0x1D6EE, 'M', u'ν'), + (0x1D6EF, 'M', u'ξ'), + (0x1D6F0, 'M', u'ο'), + (0x1D6F1, 'M', u'π'), + (0x1D6F2, 'M', u'ρ'), + (0x1D6F3, 'M', u'θ'), + (0x1D6F4, 'M', u'σ'), + (0x1D6F5, 'M', u'τ'), + (0x1D6F6, 'M', u'υ'), + (0x1D6F7, 'M', u'φ'), + (0x1D6F8, 'M', u'χ'), + (0x1D6F9, 'M', u'ψ'), + (0x1D6FA, 'M', u'ω'), + (0x1D6FB, 'M', u'∇'), + (0x1D6FC, 'M', u'α'), + (0x1D6FD, 'M', u'β'), + (0x1D6FE, 'M', u'γ'), + (0x1D6FF, 'M', u'δ'), + (0x1D700, 'M', u'ε'), + ] + +def _seg_61(): + return [ + (0x1D701, 'M', u'ζ'), + (0x1D702, 'M', u'η'), + (0x1D703, 'M', u'θ'), + (0x1D704, 'M', u'ι'), + (0x1D705, 'M', u'κ'), + (0x1D706, 'M', u'λ'), + (0x1D707, 'M', u'μ'), + (0x1D708, 'M', u'ν'), + (0x1D709, 'M', u'ξ'), + (0x1D70A, 'M', u'ο'), + (0x1D70B, 'M', u'π'), + (0x1D70C, 'M', u'ρ'), + (0x1D70D, 'M', u'σ'), + (0x1D70F, 'M', u'τ'), + (0x1D710, 'M', u'υ'), + (0x1D711, 'M', u'φ'), + (0x1D712, 'M', u'χ'), + (0x1D713, 'M', u'ψ'), + (0x1D714, 'M', u'ω'), + (0x1D715, 'M', u'∂'), + (0x1D716, 'M', u'ε'), + (0x1D717, 'M', u'θ'), + (0x1D718, 'M', u'κ'), + (0x1D719, 'M', u'φ'), + (0x1D71A, 'M', u'ρ'), + (0x1D71B, 'M', u'π'), + (0x1D71C, 'M', u'α'), + (0x1D71D, 'M', u'β'), + (0x1D71E, 'M', u'γ'), + (0x1D71F, 'M', u'δ'), + (0x1D720, 'M', u'ε'), + (0x1D721, 'M', u'ζ'), + (0x1D722, 'M', u'η'), + (0x1D723, 'M', u'θ'), + (0x1D724, 'M', u'ι'), + (0x1D725, 'M', u'κ'), + (0x1D726, 'M', u'λ'), + (0x1D727, 'M', u'μ'), + (0x1D728, 'M', u'ν'), + (0x1D729, 'M', u'ξ'), + (0x1D72A, 'M', u'ο'), + (0x1D72B, 'M', u'π'), + (0x1D72C, 'M', u'ρ'), + (0x1D72D, 'M', u'θ'), + (0x1D72E, 'M', u'σ'), + (0x1D72F, 'M', u'τ'), + (0x1D730, 'M', u'υ'), + (0x1D731, 'M', u'φ'), + (0x1D732, 'M', u'χ'), + (0x1D733, 'M', u'ψ'), + (0x1D734, 'M', u'ω'), + (0x1D735, 'M', u'∇'), + (0x1D736, 'M', u'α'), + (0x1D737, 'M', u'β'), + (0x1D738, 'M', u'γ'), + (0x1D739, 'M', u'δ'), + (0x1D73A, 'M', u'ε'), + (0x1D73B, 'M', u'ζ'), + (0x1D73C, 'M', u'η'), + (0x1D73D, 'M', u'θ'), + (0x1D73E, 'M', u'ι'), + (0x1D73F, 'M', u'κ'), + (0x1D740, 'M', u'λ'), + (0x1D741, 'M', u'μ'), + (0x1D742, 'M', u'ν'), + (0x1D743, 'M', u'ξ'), + (0x1D744, 'M', u'ο'), + (0x1D745, 'M', u'π'), + (0x1D746, 'M', u'ρ'), + (0x1D747, 'M', u'σ'), + (0x1D749, 'M', u'τ'), + (0x1D74A, 'M', u'υ'), + (0x1D74B, 'M', u'φ'), + (0x1D74C, 'M', u'χ'), + (0x1D74D, 'M', u'ψ'), + (0x1D74E, 'M', u'ω'), + (0x1D74F, 'M', u'∂'), + (0x1D750, 'M', u'ε'), + (0x1D751, 'M', u'θ'), + (0x1D752, 'M', u'κ'), + (0x1D753, 'M', u'φ'), + (0x1D754, 'M', u'ρ'), + (0x1D755, 'M', u'π'), + (0x1D756, 'M', u'α'), + (0x1D757, 'M', u'β'), + (0x1D758, 'M', u'γ'), + (0x1D759, 'M', u'δ'), + (0x1D75A, 'M', u'ε'), + (0x1D75B, 'M', u'ζ'), + (0x1D75C, 'M', u'η'), + (0x1D75D, 'M', u'θ'), + (0x1D75E, 'M', u'ι'), + (0x1D75F, 'M', u'κ'), + (0x1D760, 'M', u'λ'), + (0x1D761, 'M', u'μ'), + (0x1D762, 'M', u'ν'), + (0x1D763, 'M', u'ξ'), + (0x1D764, 'M', u'ο'), + (0x1D765, 'M', u'π'), + (0x1D766, 'M', u'ρ'), + ] + +def _seg_62(): + return [ + (0x1D767, 'M', u'θ'), + (0x1D768, 'M', u'σ'), + (0x1D769, 'M', u'τ'), + (0x1D76A, 'M', u'υ'), + (0x1D76B, 'M', u'φ'), + (0x1D76C, 'M', u'χ'), + (0x1D76D, 'M', u'ψ'), + (0x1D76E, 'M', u'ω'), + (0x1D76F, 'M', u'∇'), + (0x1D770, 'M', u'α'), + (0x1D771, 'M', u'β'), + (0x1D772, 'M', u'γ'), + (0x1D773, 'M', u'δ'), + (0x1D774, 'M', u'ε'), + (0x1D775, 'M', u'ζ'), + (0x1D776, 'M', u'η'), + (0x1D777, 'M', u'θ'), + (0x1D778, 'M', u'ι'), + (0x1D779, 'M', u'κ'), + (0x1D77A, 'M', u'λ'), + (0x1D77B, 'M', u'μ'), + (0x1D77C, 'M', u'ν'), + (0x1D77D, 'M', u'ξ'), + (0x1D77E, 'M', u'ο'), + (0x1D77F, 'M', u'π'), + (0x1D780, 'M', u'ρ'), + (0x1D781, 'M', u'σ'), + (0x1D783, 'M', u'τ'), + (0x1D784, 'M', u'υ'), + (0x1D785, 'M', u'φ'), + (0x1D786, 'M', u'χ'), + (0x1D787, 'M', u'ψ'), + (0x1D788, 'M', u'ω'), + (0x1D789, 'M', u'∂'), + (0x1D78A, 'M', u'ε'), + (0x1D78B, 'M', u'θ'), + (0x1D78C, 'M', u'κ'), + (0x1D78D, 'M', u'φ'), + (0x1D78E, 'M', u'ρ'), + (0x1D78F, 'M', u'π'), + (0x1D790, 'M', u'α'), + (0x1D791, 'M', u'β'), + (0x1D792, 'M', u'γ'), + (0x1D793, 'M', u'δ'), + (0x1D794, 'M', u'ε'), + (0x1D795, 'M', u'ζ'), + (0x1D796, 'M', u'η'), + (0x1D797, 'M', u'θ'), + (0x1D798, 'M', u'ι'), + (0x1D799, 'M', u'κ'), + (0x1D79A, 'M', u'λ'), + (0x1D79B, 'M', u'μ'), + (0x1D79C, 'M', u'ν'), + (0x1D79D, 'M', u'ξ'), + (0x1D79E, 'M', u'ο'), + (0x1D79F, 'M', u'π'), + (0x1D7A0, 'M', u'ρ'), + (0x1D7A1, 'M', u'θ'), + (0x1D7A2, 'M', u'σ'), + (0x1D7A3, 'M', u'τ'), + (0x1D7A4, 'M', u'υ'), + (0x1D7A5, 'M', u'φ'), + (0x1D7A6, 'M', u'χ'), + (0x1D7A7, 'M', u'ψ'), + (0x1D7A8, 'M', u'ω'), + (0x1D7A9, 'M', u'∇'), + (0x1D7AA, 'M', u'α'), + (0x1D7AB, 'M', u'β'), + (0x1D7AC, 'M', u'γ'), + (0x1D7AD, 'M', u'δ'), + (0x1D7AE, 'M', u'ε'), + (0x1D7AF, 'M', u'ζ'), + (0x1D7B0, 'M', u'η'), + (0x1D7B1, 'M', u'θ'), + (0x1D7B2, 'M', u'ι'), + (0x1D7B3, 'M', u'κ'), + (0x1D7B4, 'M', u'λ'), + (0x1D7B5, 'M', u'μ'), + (0x1D7B6, 'M', u'ν'), + (0x1D7B7, 'M', u'ξ'), + (0x1D7B8, 'M', u'ο'), + (0x1D7B9, 'M', u'π'), + (0x1D7BA, 'M', u'ρ'), + (0x1D7BB, 'M', u'σ'), + (0x1D7BD, 'M', u'τ'), + (0x1D7BE, 'M', u'υ'), + (0x1D7BF, 'M', u'φ'), + (0x1D7C0, 'M', u'χ'), + (0x1D7C1, 'M', u'ψ'), + (0x1D7C2, 'M', u'ω'), + (0x1D7C3, 'M', u'∂'), + (0x1D7C4, 'M', u'ε'), + (0x1D7C5, 'M', u'θ'), + (0x1D7C6, 'M', u'κ'), + (0x1D7C7, 'M', u'φ'), + (0x1D7C8, 'M', u'ρ'), + (0x1D7C9, 'M', u'π'), + (0x1D7CA, 'M', u'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', u'0'), + ] + +def _seg_63(): + return [ + (0x1D7CF, 'M', u'1'), + (0x1D7D0, 'M', u'2'), + (0x1D7D1, 'M', u'3'), + (0x1D7D2, 'M', u'4'), + (0x1D7D3, 'M', u'5'), + (0x1D7D4, 'M', u'6'), + (0x1D7D5, 'M', u'7'), + (0x1D7D6, 'M', u'8'), + (0x1D7D7, 'M', u'9'), + (0x1D7D8, 'M', u'0'), + (0x1D7D9, 'M', u'1'), + (0x1D7DA, 'M', u'2'), + (0x1D7DB, 'M', u'3'), + (0x1D7DC, 'M', u'4'), + (0x1D7DD, 'M', u'5'), + (0x1D7DE, 'M', u'6'), + (0x1D7DF, 'M', u'7'), + (0x1D7E0, 'M', u'8'), + (0x1D7E1, 'M', u'9'), + (0x1D7E2, 'M', u'0'), + (0x1D7E3, 'M', u'1'), + (0x1D7E4, 'M', u'2'), + (0x1D7E5, 'M', u'3'), + (0x1D7E6, 'M', u'4'), + (0x1D7E7, 'M', u'5'), + (0x1D7E8, 'M', u'6'), + (0x1D7E9, 'M', u'7'), + (0x1D7EA, 'M', u'8'), + (0x1D7EB, 'M', u'9'), + (0x1D7EC, 'M', u'0'), + (0x1D7ED, 'M', u'1'), + (0x1D7EE, 'M', u'2'), + (0x1D7EF, 'M', u'3'), + (0x1D7F0, 'M', u'4'), + (0x1D7F1, 'M', u'5'), + (0x1D7F2, 'M', u'6'), + (0x1D7F3, 'M', u'7'), + (0x1D7F4, 'M', u'8'), + (0x1D7F5, 'M', u'9'), + (0x1D7F6, 'M', u'0'), + (0x1D7F7, 'M', u'1'), + (0x1D7F8, 'M', u'2'), + (0x1D7F9, 'M', u'3'), + (0x1D7FA, 'M', u'4'), + (0x1D7FB, 'M', u'5'), + (0x1D7FC, 'M', u'6'), + (0x1D7FD, 'M', u'7'), + (0x1D7FE, 'M', u'8'), + (0x1D7FF, 'M', u'9'), + (0x1D800, 'X'), + (0x1EE00, 'M', u'ا'), + (0x1EE01, 'M', u'ب'), + (0x1EE02, 'M', u'ج'), + (0x1EE03, 'M', u'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', u'و'), + (0x1EE06, 'M', u'ز'), + (0x1EE07, 'M', u'ح'), + (0x1EE08, 'M', u'ط'), + (0x1EE09, 'M', u'ي'), + (0x1EE0A, 'M', u'ك'), + (0x1EE0B, 'M', u'ل'), + (0x1EE0C, 'M', u'م'), + (0x1EE0D, 'M', u'ن'), + (0x1EE0E, 'M', u'س'), + (0x1EE0F, 'M', u'ع'), + (0x1EE10, 'M', u'ف'), + (0x1EE11, 'M', u'ص'), + (0x1EE12, 'M', u'ق'), + (0x1EE13, 'M', u'ر'), + (0x1EE14, 'M', u'ش'), + (0x1EE15, 'M', u'ت'), + (0x1EE16, 'M', u'ث'), + (0x1EE17, 'M', u'خ'), + (0x1EE18, 'M', u'ذ'), + (0x1EE19, 'M', u'ض'), + (0x1EE1A, 'M', u'ظ'), + (0x1EE1B, 'M', u'غ'), + (0x1EE1C, 'M', u'ٮ'), + (0x1EE1D, 'M', u'ں'), + (0x1EE1E, 'M', u'ڡ'), + (0x1EE1F, 'M', u'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', u'ب'), + (0x1EE22, 'M', u'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', u'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', u'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', u'ي'), + (0x1EE2A, 'M', u'ك'), + (0x1EE2B, 'M', u'ل'), + (0x1EE2C, 'M', u'م'), + (0x1EE2D, 'M', u'ن'), + (0x1EE2E, 'M', u'س'), + (0x1EE2F, 'M', u'ع'), + (0x1EE30, 'M', u'ف'), + (0x1EE31, 'M', u'ص'), + (0x1EE32, 'M', u'ق'), + ] + +def _seg_64(): + return [ + (0x1EE33, 'X'), + (0x1EE34, 'M', u'ش'), + (0x1EE35, 'M', u'ت'), + (0x1EE36, 'M', u'ث'), + (0x1EE37, 'M', u'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', u'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', u'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', u'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', u'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', u'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', u'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', u'ن'), + (0x1EE4E, 'M', u'س'), + (0x1EE4F, 'M', u'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', u'ص'), + (0x1EE52, 'M', u'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', u'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', u'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', u'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', u'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', u'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', u'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', u'ب'), + (0x1EE62, 'M', u'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', u'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', u'ح'), + (0x1EE68, 'M', u'ط'), + (0x1EE69, 'M', u'ي'), + (0x1EE6A, 'M', u'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', u'م'), + (0x1EE6D, 'M', u'ن'), + (0x1EE6E, 'M', u'س'), + (0x1EE6F, 'M', u'ع'), + (0x1EE70, 'M', u'ف'), + (0x1EE71, 'M', u'ص'), + (0x1EE72, 'M', u'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', u'ش'), + (0x1EE75, 'M', u'ت'), + (0x1EE76, 'M', u'ث'), + (0x1EE77, 'M', u'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', u'ض'), + (0x1EE7A, 'M', u'ظ'), + (0x1EE7B, 'M', u'غ'), + (0x1EE7C, 'M', u'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', u'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', u'ا'), + (0x1EE81, 'M', u'ب'), + (0x1EE82, 'M', u'ج'), + (0x1EE83, 'M', u'د'), + (0x1EE84, 'M', u'ه'), + (0x1EE85, 'M', u'و'), + (0x1EE86, 'M', u'ز'), + (0x1EE87, 'M', u'ح'), + (0x1EE88, 'M', u'ط'), + (0x1EE89, 'M', u'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', u'ل'), + (0x1EE8C, 'M', u'م'), + (0x1EE8D, 'M', u'ن'), + (0x1EE8E, 'M', u'س'), + (0x1EE8F, 'M', u'ع'), + (0x1EE90, 'M', u'ف'), + (0x1EE91, 'M', u'ص'), + (0x1EE92, 'M', u'ق'), + (0x1EE93, 'M', u'ر'), + (0x1EE94, 'M', u'ش'), + (0x1EE95, 'M', u'ت'), + (0x1EE96, 'M', u'ث'), + (0x1EE97, 'M', u'خ'), + (0x1EE98, 'M', u'ذ'), + (0x1EE99, 'M', u'ض'), + (0x1EE9A, 'M', u'ظ'), + (0x1EE9B, 'M', u'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', u'ب'), + (0x1EEA2, 'M', u'ج'), + (0x1EEA3, 'M', u'د'), + (0x1EEA4, 'X'), + ] + +def _seg_65(): + return [ + (0x1EEA5, 'M', u'و'), + (0x1EEA6, 'M', u'ز'), + (0x1EEA7, 'M', u'ح'), + (0x1EEA8, 'M', u'ط'), + (0x1EEA9, 'M', u'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', u'ل'), + (0x1EEAC, 'M', u'م'), + (0x1EEAD, 'M', u'ن'), + (0x1EEAE, 'M', u'س'), + (0x1EEAF, 'M', u'ع'), + (0x1EEB0, 'M', u'ف'), + (0x1EEB1, 'M', u'ص'), + (0x1EEB2, 'M', u'ق'), + (0x1EEB3, 'M', u'ر'), + (0x1EEB4, 'M', u'ش'), + (0x1EEB5, 'M', u'ت'), + (0x1EEB6, 'M', u'ث'), + (0x1EEB7, 'M', u'خ'), + (0x1EEB8, 'M', u'ذ'), + (0x1EEB9, 'M', u'ض'), + (0x1EEBA, 'M', u'ظ'), + (0x1EEBB, 'M', u'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0BF, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0E0, 'X'), + (0x1F101, '3', u'0,'), + (0x1F102, '3', u'1,'), + (0x1F103, '3', u'2,'), + (0x1F104, '3', u'3,'), + (0x1F105, '3', u'4,'), + (0x1F106, '3', u'5,'), + (0x1F107, '3', u'6,'), + (0x1F108, '3', u'7,'), + (0x1F109, '3', u'8,'), + (0x1F10A, '3', u'9,'), + (0x1F10B, 'X'), + (0x1F110, '3', u'(a)'), + (0x1F111, '3', u'(b)'), + (0x1F112, '3', u'(c)'), + (0x1F113, '3', u'(d)'), + (0x1F114, '3', u'(e)'), + (0x1F115, '3', u'(f)'), + (0x1F116, '3', u'(g)'), + (0x1F117, '3', u'(h)'), + (0x1F118, '3', u'(i)'), + (0x1F119, '3', u'(j)'), + (0x1F11A, '3', u'(k)'), + (0x1F11B, '3', u'(l)'), + (0x1F11C, '3', u'(m)'), + (0x1F11D, '3', u'(n)'), + (0x1F11E, '3', u'(o)'), + (0x1F11F, '3', u'(p)'), + (0x1F120, '3', u'(q)'), + (0x1F121, '3', u'(r)'), + (0x1F122, '3', u'(s)'), + (0x1F123, '3', u'(t)'), + (0x1F124, '3', u'(u)'), + (0x1F125, '3', u'(v)'), + (0x1F126, '3', u'(w)'), + (0x1F127, '3', u'(x)'), + (0x1F128, '3', u'(y)'), + (0x1F129, '3', u'(z)'), + (0x1F12A, 'M', u'〔s〕'), + (0x1F12B, 'M', u'c'), + (0x1F12C, 'M', u'r'), + (0x1F12D, 'M', u'cd'), + (0x1F12E, 'M', u'wz'), + (0x1F12F, 'X'), + (0x1F130, 'M', u'a'), + (0x1F131, 'M', u'b'), + (0x1F132, 'M', u'c'), + (0x1F133, 'M', u'd'), + (0x1F134, 'M', u'e'), + (0x1F135, 'M', u'f'), + (0x1F136, 'M', u'g'), + (0x1F137, 'M', u'h'), + (0x1F138, 'M', u'i'), + (0x1F139, 'M', u'j'), + (0x1F13A, 'M', u'k'), + (0x1F13B, 'M', u'l'), + (0x1F13C, 'M', u'm'), + (0x1F13D, 'M', u'n'), + (0x1F13E, 'M', u'o'), + (0x1F13F, 'M', u'p'), + (0x1F140, 'M', u'q'), + (0x1F141, 'M', u'r'), + (0x1F142, 'M', u's'), + ] + +def _seg_66(): + return [ + (0x1F143, 'M', u't'), + (0x1F144, 'M', u'u'), + (0x1F145, 'M', u'v'), + (0x1F146, 'M', u'w'), + (0x1F147, 'M', u'x'), + (0x1F148, 'M', u'y'), + (0x1F149, 'M', u'z'), + (0x1F14A, 'M', u'hv'), + (0x1F14B, 'M', u'mv'), + (0x1F14C, 'M', u'sd'), + (0x1F14D, 'M', u'ss'), + (0x1F14E, 'M', u'ppv'), + (0x1F14F, 'M', u'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', u'mc'), + (0x1F16B, 'M', u'md'), + (0x1F16C, 'X'), + (0x1F170, 'V'), + (0x1F190, 'M', u'dj'), + (0x1F191, 'V'), + (0x1F19B, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', u'ほか'), + (0x1F201, 'M', u'ココ'), + (0x1F202, 'M', u'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', u'手'), + (0x1F211, 'M', u'字'), + (0x1F212, 'M', u'双'), + (0x1F213, 'M', u'デ'), + (0x1F214, 'M', u'二'), + (0x1F215, 'M', u'多'), + (0x1F216, 'M', u'解'), + (0x1F217, 'M', u'天'), + (0x1F218, 'M', u'交'), + (0x1F219, 'M', u'映'), + (0x1F21A, 'M', u'無'), + (0x1F21B, 'M', u'料'), + (0x1F21C, 'M', u'前'), + (0x1F21D, 'M', u'後'), + (0x1F21E, 'M', u'再'), + (0x1F21F, 'M', u'新'), + (0x1F220, 'M', u'初'), + (0x1F221, 'M', u'終'), + (0x1F222, 'M', u'生'), + (0x1F223, 'M', u'販'), + (0x1F224, 'M', u'声'), + (0x1F225, 'M', u'吹'), + (0x1F226, 'M', u'演'), + (0x1F227, 'M', u'投'), + (0x1F228, 'M', u'捕'), + (0x1F229, 'M', u'一'), + (0x1F22A, 'M', u'三'), + (0x1F22B, 'M', u'遊'), + (0x1F22C, 'M', u'左'), + (0x1F22D, 'M', u'中'), + (0x1F22E, 'M', u'右'), + (0x1F22F, 'M', u'指'), + (0x1F230, 'M', u'走'), + (0x1F231, 'M', u'打'), + (0x1F232, 'M', u'禁'), + (0x1F233, 'M', u'空'), + (0x1F234, 'M', u'合'), + (0x1F235, 'M', u'満'), + (0x1F236, 'M', u'有'), + (0x1F237, 'M', u'月'), + (0x1F238, 'M', u'申'), + (0x1F239, 'M', u'割'), + (0x1F23A, 'M', u'営'), + (0x1F23B, 'X'), + (0x1F240, 'M', u'〔本〕'), + (0x1F241, 'M', u'〔三〕'), + (0x1F242, 'M', u'〔二〕'), + (0x1F243, 'M', u'〔安〕'), + (0x1F244, 'M', u'〔点〕'), + (0x1F245, 'M', u'〔打〕'), + (0x1F246, 'M', u'〔盗〕'), + (0x1F247, 'M', u'〔勝〕'), + (0x1F248, 'M', u'〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', u'得'), + (0x1F251, 'M', u'可'), + (0x1F252, 'X'), + (0x1F300, 'V'), + (0x1F321, 'X'), + (0x1F330, 'V'), + (0x1F336, 'X'), + (0x1F337, 'V'), + (0x1F37D, 'X'), + (0x1F380, 'V'), + (0x1F394, 'X'), + (0x1F3A0, 'V'), + (0x1F3C5, 'X'), + (0x1F3C6, 'V'), + (0x1F3CB, 'X'), + (0x1F3E0, 'V'), + (0x1F3F1, 'X'), + (0x1F400, 'V'), + (0x1F43F, 'X'), + (0x1F440, 'V'), + ] + +def _seg_67(): + return [ + (0x1F441, 'X'), + (0x1F442, 'V'), + (0x1F4F8, 'X'), + (0x1F4F9, 'V'), + (0x1F4FD, 'X'), + (0x1F500, 'V'), + (0x1F53E, 'X'), + (0x1F540, 'V'), + (0x1F544, 'X'), + (0x1F550, 'V'), + (0x1F568, 'X'), + (0x1F5FB, 'V'), + (0x1F641, 'X'), + (0x1F645, 'V'), + (0x1F650, 'X'), + (0x1F680, 'V'), + (0x1F6C6, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x20000, 'V'), + (0x2A6D7, 'X'), + (0x2A700, 'V'), + (0x2B735, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2F800, 'M', u'丽'), + (0x2F801, 'M', u'丸'), + (0x2F802, 'M', u'乁'), + (0x2F803, 'M', u'𠄢'), + (0x2F804, 'M', u'你'), + (0x2F805, 'M', u'侮'), + (0x2F806, 'M', u'侻'), + (0x2F807, 'M', u'倂'), + (0x2F808, 'M', u'偺'), + (0x2F809, 'M', u'備'), + (0x2F80A, 'M', u'僧'), + (0x2F80B, 'M', u'像'), + (0x2F80C, 'M', u'㒞'), + (0x2F80D, 'M', u'𠘺'), + (0x2F80E, 'M', u'免'), + (0x2F80F, 'M', u'兔'), + (0x2F810, 'M', u'兤'), + (0x2F811, 'M', u'具'), + (0x2F812, 'M', u'𠔜'), + (0x2F813, 'M', u'㒹'), + (0x2F814, 'M', u'內'), + (0x2F815, 'M', u'再'), + (0x2F816, 'M', u'𠕋'), + (0x2F817, 'M', u'冗'), + (0x2F818, 'M', u'冤'), + (0x2F819, 'M', u'仌'), + (0x2F81A, 'M', u'冬'), + (0x2F81B, 'M', u'况'), + (0x2F81C, 'M', u'𩇟'), + (0x2F81D, 'M', u'凵'), + (0x2F81E, 'M', u'刃'), + (0x2F81F, 'M', u'㓟'), + (0x2F820, 'M', u'刻'), + (0x2F821, 'M', u'剆'), + (0x2F822, 'M', u'割'), + (0x2F823, 'M', u'剷'), + (0x2F824, 'M', u'㔕'), + (0x2F825, 'M', u'勇'), + (0x2F826, 'M', u'勉'), + (0x2F827, 'M', u'勤'), + (0x2F828, 'M', u'勺'), + (0x2F829, 'M', u'包'), + (0x2F82A, 'M', u'匆'), + (0x2F82B, 'M', u'北'), + (0x2F82C, 'M', u'卉'), + (0x2F82D, 'M', u'卑'), + (0x2F82E, 'M', u'博'), + (0x2F82F, 'M', u'即'), + (0x2F830, 'M', u'卽'), + (0x2F831, 'M', u'卿'), + (0x2F834, 'M', u'𠨬'), + (0x2F835, 'M', u'灰'), + (0x2F836, 'M', u'及'), + (0x2F837, 'M', u'叟'), + (0x2F838, 'M', u'𠭣'), + (0x2F839, 'M', u'叫'), + (0x2F83A, 'M', u'叱'), + (0x2F83B, 'M', u'吆'), + (0x2F83C, 'M', u'咞'), + (0x2F83D, 'M', u'吸'), + (0x2F83E, 'M', u'呈'), + (0x2F83F, 'M', u'周'), + (0x2F840, 'M', u'咢'), + (0x2F841, 'M', u'哶'), + (0x2F842, 'M', u'唐'), + (0x2F843, 'M', u'啓'), + (0x2F844, 'M', u'啣'), + (0x2F845, 'M', u'善'), + (0x2F847, 'M', u'喙'), + (0x2F848, 'M', u'喫'), + (0x2F849, 'M', u'喳'), + (0x2F84A, 'M', u'嗂'), + (0x2F84B, 'M', u'圖'), + (0x2F84C, 'M', u'嘆'), + (0x2F84D, 'M', u'圗'), + ] + +def _seg_68(): + return [ + (0x2F84E, 'M', u'噑'), + (0x2F84F, 'M', u'噴'), + (0x2F850, 'M', u'切'), + (0x2F851, 'M', u'壮'), + (0x2F852, 'M', u'城'), + (0x2F853, 'M', u'埴'), + (0x2F854, 'M', u'堍'), + (0x2F855, 'M', u'型'), + (0x2F856, 'M', u'堲'), + (0x2F857, 'M', u'報'), + (0x2F858, 'M', u'墬'), + (0x2F859, 'M', u'𡓤'), + (0x2F85A, 'M', u'売'), + (0x2F85B, 'M', u'壷'), + (0x2F85C, 'M', u'夆'), + (0x2F85D, 'M', u'多'), + (0x2F85E, 'M', u'夢'), + (0x2F85F, 'M', u'奢'), + (0x2F860, 'M', u'𡚨'), + (0x2F861, 'M', u'𡛪'), + (0x2F862, 'M', u'姬'), + (0x2F863, 'M', u'娛'), + (0x2F864, 'M', u'娧'), + (0x2F865, 'M', u'姘'), + (0x2F866, 'M', u'婦'), + (0x2F867, 'M', u'㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', u'嬈'), + (0x2F86A, 'M', u'嬾'), + (0x2F86C, 'M', u'𡧈'), + (0x2F86D, 'M', u'寃'), + (0x2F86E, 'M', u'寘'), + (0x2F86F, 'M', u'寧'), + (0x2F870, 'M', u'寳'), + (0x2F871, 'M', u'𡬘'), + (0x2F872, 'M', u'寿'), + (0x2F873, 'M', u'将'), + (0x2F874, 'X'), + (0x2F875, 'M', u'尢'), + (0x2F876, 'M', u'㞁'), + (0x2F877, 'M', u'屠'), + (0x2F878, 'M', u'屮'), + (0x2F879, 'M', u'峀'), + (0x2F87A, 'M', u'岍'), + (0x2F87B, 'M', u'𡷤'), + (0x2F87C, 'M', u'嵃'), + (0x2F87D, 'M', u'𡷦'), + (0x2F87E, 'M', u'嵮'), + (0x2F87F, 'M', u'嵫'), + (0x2F880, 'M', u'嵼'), + (0x2F881, 'M', u'巡'), + (0x2F882, 'M', u'巢'), + (0x2F883, 'M', u'㠯'), + (0x2F884, 'M', u'巽'), + (0x2F885, 'M', u'帨'), + (0x2F886, 'M', u'帽'), + (0x2F887, 'M', u'幩'), + (0x2F888, 'M', u'㡢'), + (0x2F889, 'M', u'𢆃'), + (0x2F88A, 'M', u'㡼'), + (0x2F88B, 'M', u'庰'), + (0x2F88C, 'M', u'庳'), + (0x2F88D, 'M', u'庶'), + (0x2F88E, 'M', u'廊'), + (0x2F88F, 'M', u'𪎒'), + (0x2F890, 'M', u'廾'), + (0x2F891, 'M', u'𢌱'), + (0x2F893, 'M', u'舁'), + (0x2F894, 'M', u'弢'), + (0x2F896, 'M', u'㣇'), + (0x2F897, 'M', u'𣊸'), + (0x2F898, 'M', u'𦇚'), + (0x2F899, 'M', u'形'), + (0x2F89A, 'M', u'彫'), + (0x2F89B, 'M', u'㣣'), + (0x2F89C, 'M', u'徚'), + (0x2F89D, 'M', u'忍'), + (0x2F89E, 'M', u'志'), + (0x2F89F, 'M', u'忹'), + (0x2F8A0, 'M', u'悁'), + (0x2F8A1, 'M', u'㤺'), + (0x2F8A2, 'M', u'㤜'), + (0x2F8A3, 'M', u'悔'), + (0x2F8A4, 'M', u'𢛔'), + (0x2F8A5, 'M', u'惇'), + (0x2F8A6, 'M', u'慈'), + (0x2F8A7, 'M', u'慌'), + (0x2F8A8, 'M', u'慎'), + (0x2F8A9, 'M', u'慌'), + (0x2F8AA, 'M', u'慺'), + (0x2F8AB, 'M', u'憎'), + (0x2F8AC, 'M', u'憲'), + (0x2F8AD, 'M', u'憤'), + (0x2F8AE, 'M', u'憯'), + (0x2F8AF, 'M', u'懞'), + (0x2F8B0, 'M', u'懲'), + (0x2F8B1, 'M', u'懶'), + (0x2F8B2, 'M', u'成'), + (0x2F8B3, 'M', u'戛'), + (0x2F8B4, 'M', u'扝'), + ] + +def _seg_69(): + return [ + (0x2F8B5, 'M', u'抱'), + (0x2F8B6, 'M', u'拔'), + (0x2F8B7, 'M', u'捐'), + (0x2F8B8, 'M', u'𢬌'), + (0x2F8B9, 'M', u'挽'), + (0x2F8BA, 'M', u'拼'), + (0x2F8BB, 'M', u'捨'), + (0x2F8BC, 'M', u'掃'), + (0x2F8BD, 'M', u'揤'), + (0x2F8BE, 'M', u'𢯱'), + (0x2F8BF, 'M', u'搢'), + (0x2F8C0, 'M', u'揅'), + (0x2F8C1, 'M', u'掩'), + (0x2F8C2, 'M', u'㨮'), + (0x2F8C3, 'M', u'摩'), + (0x2F8C4, 'M', u'摾'), + (0x2F8C5, 'M', u'撝'), + (0x2F8C6, 'M', u'摷'), + (0x2F8C7, 'M', u'㩬'), + (0x2F8C8, 'M', u'敏'), + (0x2F8C9, 'M', u'敬'), + (0x2F8CA, 'M', u'𣀊'), + (0x2F8CB, 'M', u'旣'), + (0x2F8CC, 'M', u'書'), + (0x2F8CD, 'M', u'晉'), + (0x2F8CE, 'M', u'㬙'), + (0x2F8CF, 'M', u'暑'), + (0x2F8D0, 'M', u'㬈'), + (0x2F8D1, 'M', u'㫤'), + (0x2F8D2, 'M', u'冒'), + (0x2F8D3, 'M', u'冕'), + (0x2F8D4, 'M', u'最'), + (0x2F8D5, 'M', u'暜'), + (0x2F8D6, 'M', u'肭'), + (0x2F8D7, 'M', u'䏙'), + (0x2F8D8, 'M', u'朗'), + (0x2F8D9, 'M', u'望'), + (0x2F8DA, 'M', u'朡'), + (0x2F8DB, 'M', u'杞'), + (0x2F8DC, 'M', u'杓'), + (0x2F8DD, 'M', u'𣏃'), + (0x2F8DE, 'M', u'㭉'), + (0x2F8DF, 'M', u'柺'), + (0x2F8E0, 'M', u'枅'), + (0x2F8E1, 'M', u'桒'), + (0x2F8E2, 'M', u'梅'), + (0x2F8E3, 'M', u'𣑭'), + (0x2F8E4, 'M', u'梎'), + (0x2F8E5, 'M', u'栟'), + (0x2F8E6, 'M', u'椔'), + (0x2F8E7, 'M', u'㮝'), + (0x2F8E8, 'M', u'楂'), + (0x2F8E9, 'M', u'榣'), + (0x2F8EA, 'M', u'槪'), + (0x2F8EB, 'M', u'檨'), + (0x2F8EC, 'M', u'𣚣'), + (0x2F8ED, 'M', u'櫛'), + (0x2F8EE, 'M', u'㰘'), + (0x2F8EF, 'M', u'次'), + (0x2F8F0, 'M', u'𣢧'), + (0x2F8F1, 'M', u'歔'), + (0x2F8F2, 'M', u'㱎'), + (0x2F8F3, 'M', u'歲'), + (0x2F8F4, 'M', u'殟'), + (0x2F8F5, 'M', u'殺'), + (0x2F8F6, 'M', u'殻'), + (0x2F8F7, 'M', u'𣪍'), + (0x2F8F8, 'M', u'𡴋'), + (0x2F8F9, 'M', u'𣫺'), + (0x2F8FA, 'M', u'汎'), + (0x2F8FB, 'M', u'𣲼'), + (0x2F8FC, 'M', u'沿'), + (0x2F8FD, 'M', u'泍'), + (0x2F8FE, 'M', u'汧'), + (0x2F8FF, 'M', u'洖'), + (0x2F900, 'M', u'派'), + (0x2F901, 'M', u'海'), + (0x2F902, 'M', u'流'), + (0x2F903, 'M', u'浩'), + (0x2F904, 'M', u'浸'), + (0x2F905, 'M', u'涅'), + (0x2F906, 'M', u'𣴞'), + (0x2F907, 'M', u'洴'), + (0x2F908, 'M', u'港'), + (0x2F909, 'M', u'湮'), + (0x2F90A, 'M', u'㴳'), + (0x2F90B, 'M', u'滋'), + (0x2F90C, 'M', u'滇'), + (0x2F90D, 'M', u'𣻑'), + (0x2F90E, 'M', u'淹'), + (0x2F90F, 'M', u'潮'), + (0x2F910, 'M', u'𣽞'), + (0x2F911, 'M', u'𣾎'), + (0x2F912, 'M', u'濆'), + (0x2F913, 'M', u'瀹'), + (0x2F914, 'M', u'瀞'), + (0x2F915, 'M', u'瀛'), + (0x2F916, 'M', u'㶖'), + (0x2F917, 'M', u'灊'), + (0x2F918, 'M', u'災'), + ] + +def _seg_70(): + return [ + (0x2F919, 'M', u'灷'), + (0x2F91A, 'M', u'炭'), + (0x2F91B, 'M', u'𠔥'), + (0x2F91C, 'M', u'煅'), + (0x2F91D, 'M', u'𤉣'), + (0x2F91E, 'M', u'熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', u'爨'), + (0x2F921, 'M', u'爵'), + (0x2F922, 'M', u'牐'), + (0x2F923, 'M', u'𤘈'), + (0x2F924, 'M', u'犀'), + (0x2F925, 'M', u'犕'), + (0x2F926, 'M', u'𤜵'), + (0x2F927, 'M', u'𤠔'), + (0x2F928, 'M', u'獺'), + (0x2F929, 'M', u'王'), + (0x2F92A, 'M', u'㺬'), + (0x2F92B, 'M', u'玥'), + (0x2F92C, 'M', u'㺸'), + (0x2F92E, 'M', u'瑇'), + (0x2F92F, 'M', u'瑜'), + (0x2F930, 'M', u'瑱'), + (0x2F931, 'M', u'璅'), + (0x2F932, 'M', u'瓊'), + (0x2F933, 'M', u'㼛'), + (0x2F934, 'M', u'甤'), + (0x2F935, 'M', u'𤰶'), + (0x2F936, 'M', u'甾'), + (0x2F937, 'M', u'𤲒'), + (0x2F938, 'M', u'異'), + (0x2F939, 'M', u'𢆟'), + (0x2F93A, 'M', u'瘐'), + (0x2F93B, 'M', u'𤾡'), + (0x2F93C, 'M', u'𤾸'), + (0x2F93D, 'M', u'𥁄'), + (0x2F93E, 'M', u'㿼'), + (0x2F93F, 'M', u'䀈'), + (0x2F940, 'M', u'直'), + (0x2F941, 'M', u'𥃳'), + (0x2F942, 'M', u'𥃲'), + (0x2F943, 'M', u'𥄙'), + (0x2F944, 'M', u'𥄳'), + (0x2F945, 'M', u'眞'), + (0x2F946, 'M', u'真'), + (0x2F948, 'M', u'睊'), + (0x2F949, 'M', u'䀹'), + (0x2F94A, 'M', u'瞋'), + (0x2F94B, 'M', u'䁆'), + (0x2F94C, 'M', u'䂖'), + (0x2F94D, 'M', u'𥐝'), + (0x2F94E, 'M', u'硎'), + (0x2F94F, 'M', u'碌'), + (0x2F950, 'M', u'磌'), + (0x2F951, 'M', u'䃣'), + (0x2F952, 'M', u'𥘦'), + (0x2F953, 'M', u'祖'), + (0x2F954, 'M', u'𥚚'), + (0x2F955, 'M', u'𥛅'), + (0x2F956, 'M', u'福'), + (0x2F957, 'M', u'秫'), + (0x2F958, 'M', u'䄯'), + (0x2F959, 'M', u'穀'), + (0x2F95A, 'M', u'穊'), + (0x2F95B, 'M', u'穏'), + (0x2F95C, 'M', u'𥥼'), + (0x2F95D, 'M', u'𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', u'䈂'), + (0x2F961, 'M', u'𥮫'), + (0x2F962, 'M', u'篆'), + (0x2F963, 'M', u'築'), + (0x2F964, 'M', u'䈧'), + (0x2F965, 'M', u'𥲀'), + (0x2F966, 'M', u'糒'), + (0x2F967, 'M', u'䊠'), + (0x2F968, 'M', u'糨'), + (0x2F969, 'M', u'糣'), + (0x2F96A, 'M', u'紀'), + (0x2F96B, 'M', u'𥾆'), + (0x2F96C, 'M', u'絣'), + (0x2F96D, 'M', u'䌁'), + (0x2F96E, 'M', u'緇'), + (0x2F96F, 'M', u'縂'), + (0x2F970, 'M', u'繅'), + (0x2F971, 'M', u'䌴'), + (0x2F972, 'M', u'𦈨'), + (0x2F973, 'M', u'𦉇'), + (0x2F974, 'M', u'䍙'), + (0x2F975, 'M', u'𦋙'), + (0x2F976, 'M', u'罺'), + (0x2F977, 'M', u'𦌾'), + (0x2F978, 'M', u'羕'), + (0x2F979, 'M', u'翺'), + (0x2F97A, 'M', u'者'), + (0x2F97B, 'M', u'𦓚'), + (0x2F97C, 'M', u'𦔣'), + (0x2F97D, 'M', u'聠'), + (0x2F97E, 'M', u'𦖨'), + (0x2F97F, 'M', u'聰'), + ] + +def _seg_71(): + return [ + (0x2F980, 'M', u'𣍟'), + (0x2F981, 'M', u'䏕'), + (0x2F982, 'M', u'育'), + (0x2F983, 'M', u'脃'), + (0x2F984, 'M', u'䐋'), + (0x2F985, 'M', u'脾'), + (0x2F986, 'M', u'媵'), + (0x2F987, 'M', u'𦞧'), + (0x2F988, 'M', u'𦞵'), + (0x2F989, 'M', u'𣎓'), + (0x2F98A, 'M', u'𣎜'), + (0x2F98B, 'M', u'舁'), + (0x2F98C, 'M', u'舄'), + (0x2F98D, 'M', u'辞'), + (0x2F98E, 'M', u'䑫'), + (0x2F98F, 'M', u'芑'), + (0x2F990, 'M', u'芋'), + (0x2F991, 'M', u'芝'), + (0x2F992, 'M', u'劳'), + (0x2F993, 'M', u'花'), + (0x2F994, 'M', u'芳'), + (0x2F995, 'M', u'芽'), + (0x2F996, 'M', u'苦'), + (0x2F997, 'M', u'𦬼'), + (0x2F998, 'M', u'若'), + (0x2F999, 'M', u'茝'), + (0x2F99A, 'M', u'荣'), + (0x2F99B, 'M', u'莭'), + (0x2F99C, 'M', u'茣'), + (0x2F99D, 'M', u'莽'), + (0x2F99E, 'M', u'菧'), + (0x2F99F, 'M', u'著'), + (0x2F9A0, 'M', u'荓'), + (0x2F9A1, 'M', u'菊'), + (0x2F9A2, 'M', u'菌'), + (0x2F9A3, 'M', u'菜'), + (0x2F9A4, 'M', u'𦰶'), + (0x2F9A5, 'M', u'𦵫'), + (0x2F9A6, 'M', u'𦳕'), + (0x2F9A7, 'M', u'䔫'), + (0x2F9A8, 'M', u'蓱'), + (0x2F9A9, 'M', u'蓳'), + (0x2F9AA, 'M', u'蔖'), + (0x2F9AB, 'M', u'𧏊'), + (0x2F9AC, 'M', u'蕤'), + (0x2F9AD, 'M', u'𦼬'), + (0x2F9AE, 'M', u'䕝'), + (0x2F9AF, 'M', u'䕡'), + (0x2F9B0, 'M', u'𦾱'), + (0x2F9B1, 'M', u'𧃒'), + (0x2F9B2, 'M', u'䕫'), + (0x2F9B3, 'M', u'虐'), + (0x2F9B4, 'M', u'虜'), + (0x2F9B5, 'M', u'虧'), + (0x2F9B6, 'M', u'虩'), + (0x2F9B7, 'M', u'蚩'), + (0x2F9B8, 'M', u'蚈'), + (0x2F9B9, 'M', u'蜎'), + (0x2F9BA, 'M', u'蛢'), + (0x2F9BB, 'M', u'蝹'), + (0x2F9BC, 'M', u'蜨'), + (0x2F9BD, 'M', u'蝫'), + (0x2F9BE, 'M', u'螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', u'蟡'), + (0x2F9C1, 'M', u'蠁'), + (0x2F9C2, 'M', u'䗹'), + (0x2F9C3, 'M', u'衠'), + (0x2F9C4, 'M', u'衣'), + (0x2F9C5, 'M', u'𧙧'), + (0x2F9C6, 'M', u'裗'), + (0x2F9C7, 'M', u'裞'), + (0x2F9C8, 'M', u'䘵'), + (0x2F9C9, 'M', u'裺'), + (0x2F9CA, 'M', u'㒻'), + (0x2F9CB, 'M', u'𧢮'), + (0x2F9CC, 'M', u'𧥦'), + (0x2F9CD, 'M', u'䚾'), + (0x2F9CE, 'M', u'䛇'), + (0x2F9CF, 'M', u'誠'), + (0x2F9D0, 'M', u'諭'), + (0x2F9D1, 'M', u'變'), + (0x2F9D2, 'M', u'豕'), + (0x2F9D3, 'M', u'𧲨'), + (0x2F9D4, 'M', u'貫'), + (0x2F9D5, 'M', u'賁'), + (0x2F9D6, 'M', u'贛'), + (0x2F9D7, 'M', u'起'), + (0x2F9D8, 'M', u'𧼯'), + (0x2F9D9, 'M', u'𠠄'), + (0x2F9DA, 'M', u'跋'), + (0x2F9DB, 'M', u'趼'), + (0x2F9DC, 'M', u'跰'), + (0x2F9DD, 'M', u'𠣞'), + (0x2F9DE, 'M', u'軔'), + (0x2F9DF, 'M', u'輸'), + (0x2F9E0, 'M', u'𨗒'), + (0x2F9E1, 'M', u'𨗭'), + (0x2F9E2, 'M', u'邔'), + (0x2F9E3, 'M', u'郱'), + ] + +def _seg_72(): + return [ + (0x2F9E4, 'M', u'鄑'), + (0x2F9E5, 'M', u'𨜮'), + (0x2F9E6, 'M', u'鄛'), + (0x2F9E7, 'M', u'鈸'), + (0x2F9E8, 'M', u'鋗'), + (0x2F9E9, 'M', u'鋘'), + (0x2F9EA, 'M', u'鉼'), + (0x2F9EB, 'M', u'鏹'), + (0x2F9EC, 'M', u'鐕'), + (0x2F9ED, 'M', u'𨯺'), + (0x2F9EE, 'M', u'開'), + (0x2F9EF, 'M', u'䦕'), + (0x2F9F0, 'M', u'閷'), + (0x2F9F1, 'M', u'𨵷'), + (0x2F9F2, 'M', u'䧦'), + (0x2F9F3, 'M', u'雃'), + (0x2F9F4, 'M', u'嶲'), + (0x2F9F5, 'M', u'霣'), + (0x2F9F6, 'M', u'𩅅'), + (0x2F9F7, 'M', u'𩈚'), + (0x2F9F8, 'M', u'䩮'), + (0x2F9F9, 'M', u'䩶'), + (0x2F9FA, 'M', u'韠'), + (0x2F9FB, 'M', u'𩐊'), + (0x2F9FC, 'M', u'䪲'), + (0x2F9FD, 'M', u'𩒖'), + (0x2F9FE, 'M', u'頋'), + (0x2FA00, 'M', u'頩'), + (0x2FA01, 'M', u'𩖶'), + (0x2FA02, 'M', u'飢'), + (0x2FA03, 'M', u'䬳'), + (0x2FA04, 'M', u'餩'), + (0x2FA05, 'M', u'馧'), + (0x2FA06, 'M', u'駂'), + (0x2FA07, 'M', u'駾'), + (0x2FA08, 'M', u'䯎'), + (0x2FA09, 'M', u'𩬰'), + (0x2FA0A, 'M', u'鬒'), + (0x2FA0B, 'M', u'鱀'), + (0x2FA0C, 'M', u'鳽'), + (0x2FA0D, 'M', u'䳎'), + (0x2FA0E, 'M', u'䳭'), + (0x2FA0F, 'M', u'鵧'), + (0x2FA10, 'M', u'𪃎'), + (0x2FA11, 'M', u'䳸'), + (0x2FA12, 'M', u'𪄅'), + (0x2FA13, 'M', u'𪈎'), + (0x2FA14, 'M', u'𪊑'), + (0x2FA15, 'M', u'麻'), + (0x2FA16, 'M', u'䵖'), + (0x2FA17, 'M', u'黹'), + (0x2FA18, 'M', u'黾'), + (0x2FA19, 'M', u'鼅'), + (0x2FA1A, 'M', u'鼏'), + (0x2FA1B, 'M', u'鼖'), + (0x2FA1C, 'M', u'鼻'), + (0x2FA1D, 'M', u'𪘀'), + (0x2FA1E, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() +) diff --git a/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/PKG-INFO b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..b49c2a5 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/PKG-INFO @@ -0,0 +1,19 @@ +Metadata-Version: 1.1 +Name: imgurpython +Version: 1.1.7 +Summary: Official Imgur python library with OAuth2 and samples +Home-page: https://github.com/Imgur/imgurpython +Author: Imgur Inc. +Author-email: api@imgur.com +License: MIT +Description: UNKNOWN +Keywords: api,imgur,client +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 diff --git a/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/SOURCES.txt b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..a3d71cf --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,27 @@ +setup.cfg +setup.py +imgurpython/__init__.py +imgurpython/client.py +imgurpython.egg-info/PKG-INFO +imgurpython.egg-info/SOURCES.txt +imgurpython.egg-info/dependency_links.txt +imgurpython.egg-info/requires.txt +imgurpython.egg-info/top_level.txt +imgurpython/helpers/__init__.py +imgurpython/helpers/error.py +imgurpython/helpers/format.py +imgurpython/imgur/__init__.py +imgurpython/imgur/models/__init__.py +imgurpython/imgur/models/account.py +imgurpython/imgur/models/account_settings.py +imgurpython/imgur/models/album.py +imgurpython/imgur/models/comment.py +imgurpython/imgur/models/conversation.py +imgurpython/imgur/models/custom_gallery.py +imgurpython/imgur/models/gallery_album.py +imgurpython/imgur/models/gallery_image.py +imgurpython/imgur/models/image.py +imgurpython/imgur/models/message.py +imgurpython/imgur/models/notification.py +imgurpython/imgur/models/tag.py +imgurpython/imgur/models/tag_vote.py \ No newline at end of file diff --git a/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/dependency_links.txt b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/installed-files.txt b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..013d3b7 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/installed-files.txt @@ -0,0 +1,45 @@ +..\imgurpython\client.py +..\imgurpython\__init__.py +..\imgurpython\helpers\error.py +..\imgurpython\helpers\format.py +..\imgurpython\helpers\__init__.py +..\imgurpython\imgur\__init__.py +..\imgurpython\imgur\models\account.py +..\imgurpython\imgur\models\account_settings.py +..\imgurpython\imgur\models\album.py +..\imgurpython\imgur\models\comment.py +..\imgurpython\imgur\models\conversation.py +..\imgurpython\imgur\models\custom_gallery.py +..\imgurpython\imgur\models\gallery_album.py +..\imgurpython\imgur\models\gallery_image.py +..\imgurpython\imgur\models\image.py +..\imgurpython\imgur\models\message.py +..\imgurpython\imgur\models\notification.py +..\imgurpython\imgur\models\tag.py +..\imgurpython\imgur\models\tag_vote.py +..\imgurpython\imgur\models\__init__.py +..\imgurpython\__pycache__\client.cpython-36.pyc +..\imgurpython\__pycache__\__init__.cpython-36.pyc +..\imgurpython\helpers\__pycache__\error.cpython-36.pyc +..\imgurpython\helpers\__pycache__\format.cpython-36.pyc +..\imgurpython\helpers\__pycache__\__init__.cpython-36.pyc +..\imgurpython\imgur\__pycache__\__init__.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\account.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\account_settings.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\album.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\comment.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\conversation.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\custom_gallery.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\gallery_album.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\gallery_image.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\image.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\message.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\notification.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\tag.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\tag_vote.cpython-36.pyc +..\imgurpython\imgur\models\__pycache__\__init__.cpython-36.pyc +dependency_links.txt +PKG-INFO +requires.txt +SOURCES.txt +top_level.txt diff --git a/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/requires.txt b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..f229360 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/requires.txt @@ -0,0 +1 @@ +requests diff --git a/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/top_level.txt b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..294f01e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython-1.1.7-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +imgurpython diff --git a/RBXLegacyDiscordBot/lib/imgurpython/__init__.py b/RBXLegacyDiscordBot/lib/imgurpython/__init__.py new file mode 100644 index 0000000..651dc85 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/__init__.py @@ -0,0 +1 @@ +from .client import ImgurClient \ No newline at end of file diff --git a/RBXLegacyDiscordBot/lib/imgurpython/client.py b/RBXLegacyDiscordBot/lib/imgurpython/client.py new file mode 100644 index 0000000..b860a1a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/client.py @@ -0,0 +1,681 @@ +import base64 +import requests +from .imgur.models.tag import Tag +from .imgur.models.album import Album +from .imgur.models.image import Image +from .imgur.models.account import Account +from .imgur.models.comment import Comment +from .imgur.models.tag_vote import TagVote +from .helpers.error import ImgurClientError +from .helpers.format import build_notification +from .helpers.format import format_comment_tree +from .helpers.format import build_notifications +from .imgur.models.conversation import Conversation +from .helpers.error import ImgurClientRateLimitError +from .helpers.format import build_gallery_images_and_albums +from .imgur.models.custom_gallery import CustomGallery +from .imgur.models.account_settings import AccountSettings + +API_URL = 'https://api.imgur.com/' +MASHAPE_URL = 'https://imgur-apiv3.p.mashape.com/' + + +class AuthWrapper(object): + def __init__(self, access_token, refresh_token, client_id, client_secret): + self.current_access_token = access_token + + if refresh_token is None: + raise TypeError('A refresh token must be provided') + + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + + def get_refresh_token(self): + return self.refresh_token + + def get_current_access_token(self): + return self.current_access_token + + def refresh(self): + data = { + 'refresh_token': self.refresh_token, + 'client_id': self.client_id, + 'client_secret': self.client_secret, + 'grant_type': 'refresh_token' + } + + url = API_URL + 'oauth2/token' + + response = requests.post(url, data=data) + + if response.status_code != 200: + raise ImgurClientError('Error refreshing access token!', response.status_code) + + response_data = response.json() + self.current_access_token = response_data['access_token'] + + +class ImgurClient(object): + allowed_album_fields = { + 'ids', 'title', 'description', 'privacy', 'layout', 'cover' + } + + allowed_advanced_search_fields = { + 'q_all', 'q_any', 'q_exactly', 'q_not', 'q_type', 'q_size_px' + } + + allowed_account_fields = { + 'bio', 'public_images', 'messaging_enabled', 'album_privacy', 'accepted_gallery_terms', 'username' + } + + allowed_image_fields = { + 'album', 'name', 'title', 'description' + } + + def __init__(self, client_id, client_secret, access_token=None, refresh_token=None, mashape_key=None): + self.client_id = client_id + self.client_secret = client_secret + self.auth = None + self.mashape_key = mashape_key + + if refresh_token is not None: + self.auth = AuthWrapper(access_token, refresh_token, client_id, client_secret) + + self.credits = self.get_credits() + + def set_user_auth(self, access_token, refresh_token): + self.auth = AuthWrapper(access_token, refresh_token, self.client_id, self.client_secret) + + def get_client_id(self): + return self.client_id + + def get_credits(self): + return self.make_request('GET', 'credits', None, True) + + def get_auth_url(self, response_type='pin'): + return '%soauth2/authorize?client_id=%s&response_type=%s' % (API_URL, self.client_id, response_type) + + def authorize(self, response, grant_type='pin'): + return self.make_request('POST', 'oauth2/token', { + 'client_id': self.client_id, + 'client_secret': self.client_secret, + 'grant_type': grant_type, + 'code' if grant_type == 'authorization_code' else grant_type: response + }, True) + + def prepare_headers(self, force_anon=False): + headers = {} + if force_anon or self.auth is None: + if self.client_id is None: + raise ImgurClientError('Client credentials not found!') + else: + headers['Authorization'] = 'Client-ID %s' % self.get_client_id() + else: + headers['Authorization'] = 'Bearer %s' % self.auth.get_current_access_token() + + if self.mashape_key is not None: + headers['X-Mashape-Key'] = self.mashape_key + + return headers + + + def make_request(self, method, route, data=None, force_anon=False): + method = method.lower() + method_to_call = getattr(requests, method) + + header = self.prepare_headers(force_anon) + url = (MASHAPE_URL if self.mashape_key is not None else API_URL) + ('3/%s' % route if 'oauth2' not in route else route) + + if method in ('delete', 'get'): + response = method_to_call(url, headers=header, params=data, data=data) + else: + response = method_to_call(url, headers=header, data=data) + + if response.status_code == 403 and self.auth is not None: + self.auth.refresh() + header = self.prepare_headers() + if method in ('delete', 'get'): + response = method_to_call(url, headers=header, params=data, data=data) + else: + response = method_to_call(url, headers=header, data=data) + + self.credits = { + 'UserLimit': response.headers.get('X-RateLimit-UserLimit'), + 'UserRemaining': response.headers.get('X-RateLimit-UserRemaining'), + 'UserReset': response.headers.get('X-RateLimit-UserReset'), + 'ClientLimit': response.headers.get('X-RateLimit-ClientLimit'), + 'ClientRemaining': response.headers.get('X-RateLimit-ClientRemaining') + } + + # Rate-limit check + if response.status_code == 429: + raise ImgurClientRateLimitError() + + try: + response_data = response.json() + except: + raise ImgurClientError('JSON decoding of response failed.') + + if 'data' in response_data and isinstance(response_data['data'], dict) and 'error' in response_data['data']: + raise ImgurClientError(response_data['data']['error'], response.status_code) + + return response_data['data'] if 'data' in response_data else response_data + + def validate_user_context(self, username): + if username == 'me' and self.auth is None: + raise ImgurClientError('\'me\' can only be used in the authenticated context.') + + def logged_in(self): + if self.auth is None: + raise ImgurClientError('Must be logged in to complete request.') + + # Account-related endpoints + def get_account(self, username): + self.validate_user_context(username) + account_data = self.make_request('GET', 'account/%s' % username) + + return Account( + account_data['id'], + account_data['url'], + account_data['bio'], + account_data['reputation'], + account_data['created'], + account_data['pro_expiration'], + ) + + def get_gallery_favorites(self, username, page=0): + self.validate_user_context(username) + gallery_favorites = self.make_request('GET', 'account/%s/gallery_favorites/%d' % (username, page)) + + return build_gallery_images_and_albums(gallery_favorites) + + def get_account_favorites(self, username, page=0): + self.validate_user_context(username) + favorites = self.make_request('GET', 'account/%s/favorites/%d' % (username, page)) + + return build_gallery_images_and_albums(favorites) + + def get_account_submissions(self, username, page=0): + self.validate_user_context(username) + submissions = self.make_request('GET', 'account/%s/submissions/%d' % (username, page)) + + return build_gallery_images_and_albums(submissions) + + def get_account_settings(self, username): + self.logged_in() + settings = self.make_request('GET', 'account/%s/settings' % username) + + return AccountSettings( + settings['email'], + settings['high_quality'], + settings['public_images'], + settings['album_privacy'], + settings['pro_expiration'], + settings['accepted_gallery_terms'], + settings['active_emails'], + settings['messaging_enabled'], + settings['blocked_users'] + ) + + def change_account_settings(self, username, fields): + post_data = {setting: fields[setting] for setting in set(self.allowed_account_fields).intersection(fields.keys())} + return self.make_request('POST', 'account/%s/settings' % username, post_data) + + def get_email_verification_status(self, username): + self.logged_in() + self.validate_user_context(username) + return self.make_request('GET', 'account/%s/verifyemail' % username) + + def send_verification_email(self, username): + self.logged_in() + self.validate_user_context(username) + return self.make_request('POST', 'account/%s/verifyemail' % username) + + def get_account_albums(self, username, page=0): + self.validate_user_context(username) + + albums = self.make_request('GET', 'account/%s/albums/%d' % (username, page)) + return [Album(album) for album in albums] + + def get_account_album_ids(self, username, page=0): + self.validate_user_context(username) + return self.make_request('GET', 'account/%s/albums/ids/%d' % (username, page)) + + def get_account_album_count(self, username): + self.validate_user_context(username) + return self.make_request('GET', 'account/%s/albums/count' % username) + + def get_account_comments(self, username, sort='newest', page=0): + self.validate_user_context(username) + comments = self.make_request('GET', 'account/%s/comments/%s/%s' % (username, sort, page)) + + return [Comment(comment) for comment in comments] + + def get_account_comment_ids(self, username, sort='newest', page=0): + self.validate_user_context(username) + return self.make_request('GET', 'account/%s/comments/ids/%s/%s' % (username, sort, page)) + + def get_account_comment_count(self, username): + self.validate_user_context(username) + return self.make_request('GET', 'account/%s/comments/count' % username) + + def get_account_images(self, username, page=0): + self.validate_user_context(username) + images = self.make_request('GET', 'account/%s/images/%d' % (username, page)) + + return [Image(image) for image in images] + + def get_account_image_ids(self, username, page=0): + self.validate_user_context(username) + return self.make_request('GET', 'account/%s/images/ids/%d' % (username, page)) + + def get_account_images_count(self, username): + self.validate_user_context(username) + return self.make_request('GET', 'account/%s/images/count' % username) + + # Album-related endpoints + def get_album(self, album_id): + album = self.make_request('GET', 'album/%s' % album_id) + return Album(album) + + def get_album_images(self, album_id): + images = self.make_request('GET', 'album/%s/images' % album_id) + return [Image(image) for image in images] + + def create_album(self, fields): + post_data = {field: fields[field] for field in set(self.allowed_album_fields).intersection(fields.keys())} + + if 'ids' in post_data: + self.logged_in() + + return self.make_request('POST', 'album', data=post_data) + + def update_album(self, album_id, fields): + post_data = {field: fields[field] for field in set(self.allowed_album_fields).intersection(fields.keys())} + + if isinstance(post_data['ids'], list): + post_data['ids'] = ','.join(post_data['ids']) + + return self.make_request('POST', 'album/%s' % album_id, data=post_data) + + def album_delete(self, album_id): + return self.make_request('DELETE', 'album/%s' % album_id) + + def album_favorite(self, album_id): + self.logged_in() + return self.make_request('POST', 'album/%s/favorite' % album_id) + + def album_set_images(self, album_id, ids): + if isinstance(ids, list): + ids = ','.join(ids) + + return self.make_request('POST', 'album/%s/' % album_id, {'ids': ids}) + + def album_add_images(self, album_id, ids): + if isinstance(ids, list): + ids = ','.join(ids) + + return self.make_request('POST', 'album/%s/add' % album_id, {'ids': ids}) + + def album_remove_images(self, album_id, ids): + if isinstance(ids, list): + ids = ','.join(ids) + + return self.make_request('DELETE', 'album/%s/remove_images' % album_id, {'ids': ids}) + + # Comment-related endpoints + def get_comment(self, comment_id): + comment = self.make_request('GET', 'comment/%d' % comment_id) + return Comment(comment) + + def delete_comment(self, comment_id): + self.logged_in() + return self.make_request('DELETE', 'comment/%d' % comment_id) + + def get_comment_replies(self, comment_id): + replies = self.make_request('GET', 'comment/%d/replies' % comment_id) + return format_comment_tree(replies) + + def post_comment_reply(self, comment_id, image_id, comment): + self.logged_in() + data = { + 'image_id': image_id, + 'comment': comment + } + + return self.make_request('POST', 'comment/%d' % comment_id, data) + + def comment_vote(self, comment_id, vote='up'): + self.logged_in() + return self.make_request('POST', 'comment/%d/vote/%s' % (comment_id, vote)) + + def comment_report(self, comment_id): + self.logged_in() + return self.make_request('POST', 'comment/%d/report' % comment_id) + + # Custom Gallery Endpoints + def get_custom_gallery(self, gallery_id, sort='viral', window='week', page=0): + gallery = self.make_request('GET', 'g/%s/%s/%s/%s' % (gallery_id, sort, window, page)) + return CustomGallery( + gallery['id'], + gallery['name'], + gallery['datetime'], + gallery['account_url'], + gallery['link'], + gallery['tags'], + gallery['item_count'], + gallery['items'] + ) + + def get_user_galleries(self): + self.logged_in() + galleries = self.make_request('GET', 'g') + + return [CustomGallery( + gallery['id'], + gallery['name'], + gallery['datetime'], + gallery['account_url'], + gallery['link'], + gallery['tags'] + ) for gallery in galleries] + + def create_custom_gallery(self, name, tags=None): + self.logged_in() + data = {'name': name} + + if tags: + data['tags'] = ','.join(tags) + + gallery = self.make_request('POST', 'g', data) + + return CustomGallery( + gallery['id'], + gallery['name'], + gallery['datetime'], + gallery['account_url'], + gallery['link'], + gallery['tags'] + ) + + def custom_gallery_update(self, gallery_id, name): + self.logged_in() + data = { + 'id': gallery_id, + 'name': name + } + + gallery = self.make_request('POST', 'g/%s' % gallery_id, data) + + return CustomGallery( + gallery['id'], + gallery['name'], + gallery['datetime'], + gallery['account_url'], + gallery['link'], + gallery['tags'] + ) + + def custom_gallery_add_tags(self, gallery_id, tags): + self.logged_in() + + if tags: + data = {'tags': ','.join(tags)} + else: + raise ImgurClientError('tags must not be empty!') + + return self.make_request('PUT', 'g/%s/add_tags' % gallery_id, data) + + def custom_gallery_remove_tags(self, gallery_id, tags): + self.logged_in() + + if tags: + data = {'tags': ','.join(tags)} + else: + raise ImgurClientError('tags must not be empty!') + + return self.make_request('DELETE', 'g/%s/remove_tags' % gallery_id, data) + + def custom_gallery_delete(self, gallery_id): + self.logged_in() + return self.make_request('DELETE', 'g/%s' % gallery_id) + + def filtered_out_tags(self): + self.logged_in() + return self.make_request('GET', 'g/filtered_out') + + def block_tag(self, tag): + self.logged_in() + return self.make_request('POST', 'g/block_tag', data={'tag': tag}) + + def unblock_tag(self, tag): + self.logged_in() + return self.make_request('POST', 'g/unblock_tag', data={'tag': tag}) + + # Gallery-related endpoints + def gallery(self, section='hot', sort='viral', page=0, window='day', show_viral=True): + if section == 'top': + response = self.make_request('GET', 'gallery/%s/%s/%s/%d?showViral=%s' + % (section, sort, window, page, str(show_viral).lower())) + else: + response = self.make_request('GET', 'gallery/%s/%s/%d?showViral=%s' + % (section, sort, page, str(show_viral).lower())) + + return build_gallery_images_and_albums(response) + + def memes_subgallery(self, sort='viral', page=0, window='week'): + if sort == 'top': + response = self.make_request('GET', 'g/memes/%s/%s/%d' % (sort, window, page)) + else: + response = self.make_request('GET', 'g/memes/%s/%d' % (sort, page)) + + return build_gallery_images_and_albums(response) + + def memes_subgallery_image(self, item_id): + item = self.make_request('GET', 'g/memes/%s' % item_id) + return build_gallery_images_and_albums(item) + + def subreddit_gallery(self, subreddit, sort='time', window='week', page=0): + if sort == 'top': + response = self.make_request('GET', 'gallery/r/%s/%s/%s/%d' % (subreddit, sort, window, page)) + else: + response = self.make_request('GET', 'gallery/r/%s/%s/%d' % (subreddit, sort, page)) + + return build_gallery_images_and_albums(response) + + def subreddit_image(self, subreddit, image_id): + item = self.make_request('GET', 'gallery/r/%s/%s' % (subreddit, image_id)) + return build_gallery_images_and_albums(item) + + def gallery_tag(self, tag, sort='viral', page=0, window='week'): + if sort == 'top': + response = self.make_request('GET', 'gallery/t/%s/%s/%s/%d' % (tag, sort, window, page)) + else: + response = self.make_request('GET', 'gallery/t/%s/%s/%d' % (tag, sort, page)) + + return Tag( + response['name'], + response['followers'], + response['total_items'], + response['following'], + response['items'] + ) + + def gallery_tag_image(self, tag, item_id): + item = self.make_request('GET', 'gallery/t/%s/%s' % (tag, item_id)) + return build_gallery_images_and_albums(item) + + def gallery_item_tags(self, item_id): + response = self.make_request('GET', 'gallery/%s/tags' % item_id) + + return [TagVote( + item['ups'], + item['downs'], + item['name'], + item['author'] + ) for item in response['tags']] + + def gallery_tag_vote(self, item_id, tag, vote): + self.logged_in() + response = self.make_request('POST', 'gallery/%s/vote/tag/%s/%s' % (item_id, tag, vote)) + return response + + def gallery_search(self, q, advanced=None, sort='time', window='all', page=0): + if advanced: + data = {field: advanced[field] + for field in set(self.allowed_advanced_search_fields).intersection(advanced.keys())} + else: + data = {'q': q} + + response = self.make_request('GET', 'gallery/search/%s/%s/%s' % (sort, window, page), data) + return build_gallery_images_and_albums(response) + + def gallery_random(self, page=0): + response = self.make_request('GET', 'gallery/random/random/%d' % page) + return build_gallery_images_and_albums(response) + + def share_on_imgur(self, item_id, title, terms=0): + self.logged_in() + data = { + 'title': title, + 'terms': terms + } + + return self.make_request('POST', 'gallery/%s' % item_id, data) + + def remove_from_gallery(self, item_id): + self.logged_in() + return self.make_request('DELETE', 'gallery/%s' % item_id) + + def gallery_item(self, item_id): + response = self.make_request('GET', 'gallery/%s' % item_id) + return build_gallery_images_and_albums(response) + + def report_gallery_item(self, item_id): + self.logged_in() + return self.make_request('POST', 'gallery/%s/report' % item_id) + + def gallery_item_vote(self, item_id, vote='up'): + self.logged_in() + return self.make_request('POST', 'gallery/%s/vote/%s' % (item_id, vote)) + + def gallery_item_comments(self, item_id, sort='best'): + response = self.make_request('GET', 'gallery/%s/comments/%s' % (item_id, sort)) + return format_comment_tree(response) + + def gallery_comment(self, item_id, comment): + self.logged_in() + return self.make_request('POST', 'gallery/%s/comment' % item_id, {'comment': comment}) + + def gallery_comment_ids(self, item_id): + return self.make_request('GET', 'gallery/%s/comments/ids' % item_id) + + def gallery_comment_count(self, item_id): + return self.make_request('GET', 'gallery/%s/comments/count' % item_id) + + # Image-related endpoints + def get_image(self, image_id): + image = self.make_request('GET', 'image/%s' % image_id) + return Image(image) + + def upload_from_path(self, path, config=None, anon=True): + if not config: + config = dict() + + fd = open(path, 'rb') + contents = fd.read() + b64 = base64.b64encode(contents) + data = { + 'image': b64, + 'type': 'base64', + } + data.update({meta: config[meta] for meta in set(self.allowed_image_fields).intersection(config.keys())}) + fd.close() + + return self.make_request('POST', 'upload', data, anon) + + def upload_from_url(self, url, config=None, anon=True): + if not config: + config = dict() + + data = { + 'image': url, + 'type': 'url', + } + + data.update({meta: config[meta] for meta in set(self.allowed_image_fields).intersection(config.keys())}) + return self.make_request('POST', 'upload', data, anon) + + def delete_image(self, image_id): + return self.make_request('DELETE', 'image/%s' % image_id) + + def favorite_image(self, image_id): + self.logged_in() + return self.make_request('POST', 'image/%s/favorite' % image_id) + + # Conversation-related endpoints + def conversation_list(self): + self.logged_in() + + conversations = self.make_request('GET', 'conversations') + return [Conversation( + conversation['id'], + conversation['last_message_preview'], + conversation['datetime'], + conversation['with_account_id'], + conversation['with_account'], + conversation['message_count'], + ) for conversation in conversations] + + def get_conversation(self, conversation_id, page=1, offset=0): + self.logged_in() + + conversation = self.make_request('GET', 'conversations/%d/%d/%d' % (conversation_id, page, offset)) + return Conversation( + conversation['id'], + conversation['last_message_preview'], + conversation['datetime'], + conversation['with_account_id'], + conversation['with_account'], + conversation['message_count'], + conversation['messages'], + conversation['done'], + conversation['page'] + ) + + def create_message(self, recipient, body): + self.logged_in() + return self.make_request('POST', 'conversations/%s' % recipient, {'body': body}) + + def delete_conversation(self, conversation_id): + self.logged_in() + return self.make_request('DELETE', 'conversations/%d' % conversation_id) + + def report_sender(self, username): + self.logged_in() + return self.make_request('POST', 'conversations/report/%s' % username) + + def block_sender(self, username): + self.logged_in() + return self.make_request('POST', 'conversations/block/%s' % username) + + # Notification-related endpoints + def get_notifications(self, new=True): + self.logged_in() + response = self.make_request('GET', 'notification', {'new': str(new).lower()}) + return build_notifications(response) + + def get_notification(self, notification_id): + self.logged_in() + response = self.make_request('GET', 'notification/%d' % notification_id) + return build_notification(response) + + def mark_notifications_as_read(self, notification_ids): + self.logged_in() + return self.make_request('POST', 'notification', ','.join(notification_ids)) + + # Memegen-related endpoints + def default_memes(self): + response = self.make_request('GET', 'memegen/defaults') + return [Image(meme) for meme in response] diff --git a/RBXLegacyDiscordBot/lib/imgurpython/helpers/__init__.py b/RBXLegacyDiscordBot/lib/imgurpython/helpers/__init__.py new file mode 100644 index 0000000..6142833 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/helpers/__init__.py @@ -0,0 +1,4 @@ +from ..imgur.models.comment import Comment +from ..imgur.models.notification import Notification +from ..imgur.models.gallery_album import GalleryAlbum +from ..imgur.models.gallery_image import GalleryImage \ No newline at end of file diff --git a/RBXLegacyDiscordBot/lib/imgurpython/helpers/error.py b/RBXLegacyDiscordBot/lib/imgurpython/helpers/error.py new file mode 100644 index 0000000..f385b28 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/helpers/error.py @@ -0,0 +1,15 @@ +class ImgurClientError(Exception): + def __init__(self, error_message, status_code=None): + self.status_code = status_code + self.error_message = error_message + + def __str__(self): + if self.status_code: + return "(%s) %s" % (self.status_code, self.error_message) + else: + return self.error_message + + +class ImgurClientRateLimitError(Exception): + def __str__(self): + return 'Rate-limit exceeded!' diff --git a/RBXLegacyDiscordBot/lib/imgurpython/helpers/format.py b/RBXLegacyDiscordBot/lib/imgurpython/helpers/format.py new file mode 100644 index 0000000..70cb39f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/helpers/format.py @@ -0,0 +1,83 @@ +from ..helpers import Comment +from ..helpers import GalleryAlbum +from ..helpers import GalleryImage +from ..helpers import Notification + + +def build_comment_tree(children): + children_objects = [] + for child in children: + to_insert = Comment(child) + to_insert.children = build_comment_tree(to_insert.children) + children_objects.append(to_insert) + + return children_objects + + +def format_comment_tree(response): + if isinstance(response, list): + result = [] + for comment in response: + formatted = Comment(comment) + formatted.children = build_comment_tree(comment['children']) + result.append(formatted) + else: + result = Comment(response) + result.children = build_comment_tree(response['children']) + + return result + + +def build_gallery_images_and_albums(response): + if isinstance(response, list): + result = [] + for item in response: + if item['is_album']: + result.append(GalleryAlbum(item)) + else: + result.append(GalleryImage(item)) + else: + if response['is_album']: + result = GalleryAlbum(response) + else: + result = GalleryImage(response) + + return result + + +def build_notifications(response): + result = { + 'replies': [], + 'messages': [Notification( + item['id'], + item['account_id'], + item['viewed'], + item['content'] + ) for item in response['messages']] + } + + for item in response['replies']: + notification = Notification( + item['id'], + item['account_id'], + item['viewed'], + item['content'] + ) + notification.content = format_comment_tree(item['content']) + result['replies'].append(notification) + + return result + + +def build_notification(item): + notification = Notification( + item['id'], + item['account_id'], + item['viewed'], + item['content'] + ) + + if 'comment' in notification.content: + notification.content = format_comment_tree(item['content']) + + return notification diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/__init__.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/__init__.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/account.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/account.py new file mode 100644 index 0000000..01a798a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/account.py @@ -0,0 +1,9 @@ +class Account(object): + + def __init__(self, account_id, url, bio, reputation, created, pro_expiration): + self.id = account_id + self.url = url + self.bio = bio + self.reputation = reputation + self.created = created + self.pro_expiration = pro_expiration diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/account_settings.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/account_settings.py new file mode 100644 index 0000000..045aaf2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/account_settings.py @@ -0,0 +1,13 @@ +class AccountSettings(object): + + def __init__(self, email, high_quality, public_images, album_privacy, pro_expiration, accepted_gallery_terms, + active_emails, messaging_enabled, blocked_users): + self.email = email + self.high_quality = high_quality + self.public_images = public_images + self.album_privacy = album_privacy + self.pro_expiration = pro_expiration + self.accepted_gallery_terms = accepted_gallery_terms + self.active_emails = active_emails + self.messaging_enabled = messaging_enabled + self.blocked_users = blocked_users diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/album.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/album.py new file mode 100644 index 0000000..414acc2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/album.py @@ -0,0 +1,9 @@ +class Album(object): + + # See documentation at https://api.imgur.com/ for available fields + def __init__(self, *initial_data, **kwargs): + for dictionary in initial_data: + for key in dictionary: + setattr(self, key, dictionary[key]) + for key in kwargs: + setattr(self, key, kwargs[key]) diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/comment.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/comment.py new file mode 100644 index 0000000..29e4a9f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/comment.py @@ -0,0 +1,9 @@ +class Comment(object): + + # See documentation at https://api.imgur.com/ for available fields + def __init__(self, *initial_data, **kwargs): + for dictionary in initial_data: + for key in dictionary: + setattr(self, key, dictionary[key]) + for key in kwargs: + setattr(self, key, kwargs[key]) diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/conversation.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/conversation.py new file mode 100644 index 0000000..335196c --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/conversation.py @@ -0,0 +1,27 @@ +from .message import Message + +class Conversation(object): + + def __init__(self, conversation_id, last_message_preview, datetime, with_account_id, with_account, message_count, messages=None, + done=None, page=None): + self.id = conversation_id + self.last_message_preview = last_message_preview + self.datetime = datetime + self.with_account_id = with_account_id + self.with_account = with_account + self.message_count = message_count + self.page = page + self.done = done + + if messages: + self.messages = [Message( + message['id'], + message['from'], + message['account_id'], + message['sender_id'], + message['body'], + message['conversation_id'], + message['datetime'], + ) for message in messages] + else: + self.messages = None diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/custom_gallery.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/custom_gallery.py new file mode 100644 index 0000000..912ba6d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/custom_gallery.py @@ -0,0 +1,16 @@ +from .gallery_album import GalleryAlbum +from .gallery_image import GalleryImage + + +class CustomGallery(object): + + def __init__(self, custom_gallery_id, name, datetime, account_url, link, tags, item_count=None, items=None): + self.id = custom_gallery_id + self.name = name + self.datetime = datetime + self.account_url = account_url + self.link = link + self.tags = tags + self.item_count = item_count + self.items = [GalleryAlbum(item) if item['is_album'] else GalleryImage(item) for item in items] \ + if items else None diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/gallery_album.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/gallery_album.py new file mode 100644 index 0000000..1622c99 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/gallery_album.py @@ -0,0 +1,9 @@ +class GalleryAlbum(object): + + # See documentation at https://api.imgur.com/ for available fields + def __init__(self, *initial_data, **kwargs): + for dictionary in initial_data: + for key in dictionary: + setattr(self, key, dictionary[key]) + for key in kwargs: + setattr(self, key, kwargs[key]) diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/gallery_image.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/gallery_image.py new file mode 100644 index 0000000..88faf19 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/gallery_image.py @@ -0,0 +1,9 @@ +class GalleryImage(object): + + # See documentation at https://api.imgur.com/ for available fields + def __init__(self, *initial_data, **kwargs): + for dictionary in initial_data: + for key in dictionary: + setattr(self, key, dictionary[key]) + for key in kwargs: + setattr(self, key, kwargs[key]) diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/image.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/image.py new file mode 100644 index 0000000..18f257e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/image.py @@ -0,0 +1,9 @@ +class Image(object): + + # See documentation at https://api.imgur.com/ for available fields + def __init__(self, *initial_data, **kwargs): + for dictionary in initial_data: + for key in dictionary: + setattr(self, key, dictionary[key]) + for key in kwargs: + setattr(self, key, kwargs[key]) diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/message.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/message.py new file mode 100644 index 0000000..0f98f5e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/message.py @@ -0,0 +1,10 @@ +class Message(object): + + def __init__(self, message_id, from_user, account_id, sender_id, body, conversation_id, datetime): + self.id = message_id + self.from_user = from_user + self.account_id = account_id + self.sender_id = sender_id + self.body = body + self.conversation_id = conversation_id + self.datetime = datetime diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/notification.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/notification.py new file mode 100644 index 0000000..7966953 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/notification.py @@ -0,0 +1,7 @@ +class Notification(object): + + def __init__(self, notification_id, account_id, viewed, content): + self.id = notification_id + self.account_id = account_id + self.viewed = viewed + self.content = content diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/tag.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/tag.py new file mode 100644 index 0000000..d9f8547 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/tag.py @@ -0,0 +1,13 @@ +from .gallery_album import GalleryAlbum +from .gallery_image import GalleryImage + + +class Tag(object): + + def __init__(self, name, followers, total_items, following, items): + self.name = name + self.followers = followers + self.total_items = total_items + self.following = following + self.items = [GalleryAlbum(item) if item['is_album'] else GalleryImage(item) for item in items] \ + if items else None diff --git a/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/tag_vote.py b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/tag_vote.py new file mode 100644 index 0000000..eb1a995 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/imgurpython/imgur/models/tag_vote.py @@ -0,0 +1,7 @@ +class TagVote(object): + + def __init__(self, ups, downs, name, author): + self.ups = ups + self.downs = downs + self.name = name + self.author = author diff --git a/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/DESCRIPTION.rst b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..5946846 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/DESCRIPTION.rst @@ -0,0 +1,214 @@ +========= +multidict +========= + +.. image:: https://travis-ci.org/aio-libs/multidict.svg?branch=master + :target: https://travis-ci.org/aio-libs/multidict + :align: right +.. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/multidict +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter +Multidicts are useful for working with HTTP headers, URL +query args etc. + +The code was extracted from aiohttp_ library. + +Introduction +------------ + +*HTTP Headers* and *URL query string* require specific data structure: +*multidict*. It behaves mostly like a regular ``dict`` but it may have +several *values* for the same *key* and *preserves insertion ordering*. + +The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). + +``multidict`` has four multidict classes: +``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` +and ``CIMultiDictProxy``. + +Immutable proxies (``MultiDictProxy`` and +``CIMultiDictProxy``) provide a dynamic view for the +proxied multidict, the view reflects underlying collection changes. They +implement the ``collections.abc.Mapping`` interface. + +Regular mutable (``MultiDict`` and ``CIMultiDict``) classes +implement ``collections.abc.MutableMapping`` and allows to change +their own content. + + +*Case insensitive* (``CIMultiDict`` and +``CIMultiDictProxy``) ones assume the *keys* are case +insensitive, e.g.:: + + >>> dct = CIMultiDict(key='val') + >>> 'Key' in dct + True + >>> dct['Key'] + 'val' + +*Keys* should be ``str`` or ``istr`` instances. + +The library has optional Cython_ optimization for sake of speed. + + +License +------- + +Apache 2 + + +.. _aiohttp: https://github.com/KeepSafe/aiohttp +.. _Cython: http://cython.org/ + +3.1.3 (2017-07-14) +------------------ + +* Fix build + +3.1.2 (2017-07-14) +------------------ + +* Fix type annotations + + +3.1.1 (2017-07-09) +------------------ + +* Fix #105: Remove memory leak in `istr` implementation + +3.1.0 (2017-06-25) +------------------ + +* Fix #99: raise `RuntimeError` on dict iterations if the dict was changed + +* Update `__init__.pyi` signatures + +3.0.0 (2017-06-21) +------------------ + +* Refactor internal data structures: main dict operations are about + 100% faster now. + +* Preserve order on multidict updates #68 + + Updates are `md[key] = val` and `md.update(...)` calls. + + Now **the last** entry is replaced with new key/value pair, all + previous occurrences are removed. + + If key is not present in dictionary the pair is added to the end + +* Force keys to `str` instances #88 + +* Implement `.popall(key[, default])` #84 + +* `.pop()` removes only first occurence, `.popone()` added #92 + +* Implement dict's version #86 + +* Proxies are not pickable anymore #77 + +2.1.7 (2017-05-29) +------------------ + +* Fix import warning on Python 3.6 #79 + +2.1.6 (2017-05-27) +------------------ + +* Rebuild the library for fixning missing `__spec__` attribute #79 + +2.1.5 (2017-05-13) +------------------ + +* Build Python 3.6 binary wheels + +2.1.4 (2016-12-1) +------------------ + +* Remove LICENSE filename extension @ MANIFEST.in file #31 + +2.1.3 (2016-11-26) +------------------ + +* Add a fastpath for multidict extending by multidict + + +2.1.2 (2016-09-25) +------------------ + +* Fix `CIMultiDict.update()` for case of accepting `istr` + + +2.1.1 (2016-09-22) +------------------ + +* Fix `CIMultiDict` constructor for case of accepting `istr` #11 + + +2.1.0 (2016-09-18) +------------------ + +* Allow to create proxy from proxy + +* Add type hints (PEP-484) + + +2.0.1 (2016-08-02) +------------------ + +* Don't crash on `{} - MultiDict().keys()` and similar operations #6 + + +2.0.0 (2016-07-28) +------------------ + +* Switch from uppercase approach for case-insensitive string to + `str.title()` #5 + +* Deprecase `upstr` class in favor of `istr` alias. + +1.2.2 (2016-08-02) +------------------ + +* Don't crash on `{} - MultiDict().keys()` and similar operations #6 + +1.2.1 (2016-07-21) +------------------ + +* Don't expose `multidict.__version__` + + +1.2.0 (2016-07-16) +------------------ + +* Make `upstr(upstr('abc'))` much faster + + +1.1.0 (2016-07-06) +------------------ + +* Don't double-iterate during MultiDict initialization #3 + +* Fix CIMultiDict.pop: it is case insensitive now #1 + +* Provide manylinux wheels as well as Windows ones + +1.0.3 (2016-03-24) +------------------ + +* Add missing MANIFEST.in + +1.0.2 (2016-03-24) +------------------ + +* Fix setup build + + +1.0.0 (2016-02-19) +------------------ + +* Initial implementation + diff --git a/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/INSTALLER b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/METADATA b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/METADATA new file mode 100644 index 0000000..43913cd --- /dev/null +++ b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/METADATA @@ -0,0 +1,231 @@ +Metadata-Version: 2.0 +Name: multidict +Version: 3.1.3 +Summary: multidict implementation +Home-page: https://github.com/aio-libs/multidict/ +Author: Andrew Svetlov +Author-email: andrew.svetlov@gmail.com +License: Apache 2 +Platform: UNKNOWN +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Development Status :: 5 - Production/Stable + +========= +multidict +========= + +.. image:: https://travis-ci.org/aio-libs/multidict.svg?branch=master + :target: https://travis-ci.org/aio-libs/multidict + :align: right +.. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/multidict +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter +Multidicts are useful for working with HTTP headers, URL +query args etc. + +The code was extracted from aiohttp_ library. + +Introduction +------------ + +*HTTP Headers* and *URL query string* require specific data structure: +*multidict*. It behaves mostly like a regular ``dict`` but it may have +several *values* for the same *key* and *preserves insertion ordering*. + +The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). + +``multidict`` has four multidict classes: +``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` +and ``CIMultiDictProxy``. + +Immutable proxies (``MultiDictProxy`` and +``CIMultiDictProxy``) provide a dynamic view for the +proxied multidict, the view reflects underlying collection changes. They +implement the ``collections.abc.Mapping`` interface. + +Regular mutable (``MultiDict`` and ``CIMultiDict``) classes +implement ``collections.abc.MutableMapping`` and allows to change +their own content. + + +*Case insensitive* (``CIMultiDict`` and +``CIMultiDictProxy``) ones assume the *keys* are case +insensitive, e.g.:: + + >>> dct = CIMultiDict(key='val') + >>> 'Key' in dct + True + >>> dct['Key'] + 'val' + +*Keys* should be ``str`` or ``istr`` instances. + +The library has optional Cython_ optimization for sake of speed. + + +License +------- + +Apache 2 + + +.. _aiohttp: https://github.com/KeepSafe/aiohttp +.. _Cython: http://cython.org/ + +3.1.3 (2017-07-14) +------------------ + +* Fix build + +3.1.2 (2017-07-14) +------------------ + +* Fix type annotations + + +3.1.1 (2017-07-09) +------------------ + +* Fix #105: Remove memory leak in `istr` implementation + +3.1.0 (2017-06-25) +------------------ + +* Fix #99: raise `RuntimeError` on dict iterations if the dict was changed + +* Update `__init__.pyi` signatures + +3.0.0 (2017-06-21) +------------------ + +* Refactor internal data structures: main dict operations are about + 100% faster now. + +* Preserve order on multidict updates #68 + + Updates are `md[key] = val` and `md.update(...)` calls. + + Now **the last** entry is replaced with new key/value pair, all + previous occurrences are removed. + + If key is not present in dictionary the pair is added to the end + +* Force keys to `str` instances #88 + +* Implement `.popall(key[, default])` #84 + +* `.pop()` removes only first occurence, `.popone()` added #92 + +* Implement dict's version #86 + +* Proxies are not pickable anymore #77 + +2.1.7 (2017-05-29) +------------------ + +* Fix import warning on Python 3.6 #79 + +2.1.6 (2017-05-27) +------------------ + +* Rebuild the library for fixning missing `__spec__` attribute #79 + +2.1.5 (2017-05-13) +------------------ + +* Build Python 3.6 binary wheels + +2.1.4 (2016-12-1) +------------------ + +* Remove LICENSE filename extension @ MANIFEST.in file #31 + +2.1.3 (2016-11-26) +------------------ + +* Add a fastpath for multidict extending by multidict + + +2.1.2 (2016-09-25) +------------------ + +* Fix `CIMultiDict.update()` for case of accepting `istr` + + +2.1.1 (2016-09-22) +------------------ + +* Fix `CIMultiDict` constructor for case of accepting `istr` #11 + + +2.1.0 (2016-09-18) +------------------ + +* Allow to create proxy from proxy + +* Add type hints (PEP-484) + + +2.0.1 (2016-08-02) +------------------ + +* Don't crash on `{} - MultiDict().keys()` and similar operations #6 + + +2.0.0 (2016-07-28) +------------------ + +* Switch from uppercase approach for case-insensitive string to + `str.title()` #5 + +* Deprecase `upstr` class in favor of `istr` alias. + +1.2.2 (2016-08-02) +------------------ + +* Don't crash on `{} - MultiDict().keys()` and similar operations #6 + +1.2.1 (2016-07-21) +------------------ + +* Don't expose `multidict.__version__` + + +1.2.0 (2016-07-16) +------------------ + +* Make `upstr(upstr('abc'))` much faster + + +1.1.0 (2016-07-06) +------------------ + +* Don't double-iterate during MultiDict initialization #3 + +* Fix CIMultiDict.pop: it is case insensitive now #1 + +* Provide manylinux wheels as well as Windows ones + +1.0.3 (2016-03-24) +------------------ + +* Add missing MANIFEST.in + +1.0.2 (2016-03-24) +------------------ + +* Fix setup build + + +1.0.0 (2016-02-19) +------------------ + +* Initial implementation + diff --git a/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/RECORD b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/RECORD new file mode 100644 index 0000000..29a9d70 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/RECORD @@ -0,0 +1,17 @@ +multidict/__init__.py,sha256=uX7p0mvAPsxkA8BnXLkgVTwIgVECgRfmAxz_pFBUcpI,1162 +multidict/__init__.pyi,sha256=t_u0CDq62TOWhgd0bCtl7Wbu6haVYpQ8AHkuegJqZc0,1881 +multidict/_istr.c,sha256=YXwlwqq8tk4Jpu2NpImjVCYzZKKrbH5aWEj9k4O1wW4,6775 +multidict/_istr.cp36-win32.pyd,sha256=-KuV_IkYhmPWRW7CJlwdDf8O-Umpq4ZA1unHorytkmU,10240 +multidict/_multidict.c,sha256=Vc7PngWe-6CBt0vOSoDPvwZR5oxOOloSa-sHM8vCPo0,747382 +multidict/_multidict.cp36-win32.pyd,sha256=fV6nRnyRZ3dnj1gMqVL69a34B7v95uYCIWw9mIPkZOA,117248 +multidict/_multidict.pyx,sha256=QA_WuC9eEuvloGbl0ObxvYmArGLh9cFlAuQqKTnJpe4,23338 +multidict/_multidict_py.py,sha256=dEHKGaRdjoZSpTeHXgVfVtVp7XQUSMfQcKuVQeputG8,12600 +multidict-3.1.3.dist-info/DESCRIPTION.rst,sha256=Q8DAZgxgNwakxiTyOBS9_zaw6iCx0ddKlsUQO1Xng_M,4479 +multidict-3.1.3.dist-info/METADATA,sha256=VsSG79Cn5QwyR0jL0FHbUg7b3Lxh9XEpCEbs6n_66Fg,5069 +multidict-3.1.3.dist-info/RECORD,, +multidict-3.1.3.dist-info/WHEEL,sha256=xiHTm3JxoVljPSD6nSGhq3B4VY9iUqMNXwYQ259n1PI,102 +multidict-3.1.3.dist-info/metadata.json,sha256=_7R4lgP5qZN_raHv6kJW2R66xr_0dTFsQnUP30kEDQ8,775 +multidict-3.1.3.dist-info/top_level.txt,sha256=-euDElkk5_qkmfIJ7WiqCab02ZlSFZWynejKg59qZQQ,10 +multidict-3.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +multidict/__pycache__/_multidict_py.cpython-36.pyc,, +multidict/__pycache__/__init__.cpython-36.pyc,, diff --git a/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/WHEEL b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/WHEEL new file mode 100644 index 0000000..7872c33 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: false +Tag: cp36-cp36m-win32 + diff --git a/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/top_level.txt b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/top_level.txt new file mode 100644 index 0000000..afcecdf --- /dev/null +++ b/RBXLegacyDiscordBot/lib/multidict-3.1.3.dist-info/top_level.txt @@ -0,0 +1 @@ +multidict diff --git a/RBXLegacyDiscordBot/lib/multidict/__init__.py b/RBXLegacyDiscordBot/lib/multidict/__init__.py new file mode 100644 index 0000000..df99b31 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/multidict/__init__.py @@ -0,0 +1,34 @@ +"""Multidict implementation. + +HTTP Headers and URL query string require specific data structure: +multidict. It behaves mostly like a dict but it can have +several values for the same key. +""" + +import os + +__all__ = ('MultiDictProxy', 'CIMultiDictProxy', + 'MultiDict', 'CIMultiDict', 'upstr', 'istr') + +__version__ = '3.1.3' + + +if bool(os.environ.get('MULTIDICT_NO_EXTENSIONS')): + from ._multidict_py import (MultiDictProxy, + CIMultiDictProxy, + MultiDict, + CIMultiDict, + upstr, istr) +else: + try: + from ._multidict import (MultiDictProxy, + CIMultiDictProxy, + MultiDict, + CIMultiDict, + upstr, istr) + except ImportError: # pragma: no cover + from ._multidict_py import (MultiDictProxy, + CIMultiDictProxy, + MultiDict, + CIMultiDict, + upstr, istr) diff --git a/RBXLegacyDiscordBot/lib/multidict/__init__.pyi b/RBXLegacyDiscordBot/lib/multidict/__init__.pyi new file mode 100644 index 0000000..e9f05d2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/multidict/__init__.pyi @@ -0,0 +1,69 @@ +from typing import Any, Mapping, MutableMapping, List, Union, Iterable +from typing import KeysView, ValuesView, ItemsView, Iterator, Tuple +from typing import overload, TypeVar, Generic, Optional + + +class istr(str): ... + +upstr = istr + +_S = Union[str, istr] + +_T = TypeVar('_T') + + +class MultiDict(MutableMapping[_S, _T]): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, map: Mapping[_S, _T]) -> None: ... + @overload + def __init__(self, iterable: Iterable[Tuple[_S, _T]]) -> None: ... + + def getall(self, key: _S, default: _T=...) -> List[_T]: ... + def getone(self, key: _S, default: _T=...) -> _T: ... + + def copy(self) -> MultiDict: ... + + def add(self, key: _S, value: _T) -> None: ... + + @overload + def extend(self, dct: MultiDict[_T]) -> None: ... + @overload + def extend(self, map: Mapping[_S, _T]) -> None: ... + @overload + def extend(self, iterable: Iterable[Tuple[_S, _T]]) -> None: ... + + @overload + def popone(self, key: _S) -> _T: ... + @overload + def popone(self, key: _S, default: _T=...) -> _T: ... + + @overload + def popall(self, key: _S) -> List[_T]: ... + @overload + def popall(self, key: _S, default: _T=...) -> List[_T]: ... + + +class CIMultiDict(MultiDict[_T]): + def copy(self) -> CIMultiDict[_T]: ... + + +class MultiDictProxy(Mapping[_S, _T]): + def __init__(self, arg: Union[MultiDict[_T], MultiDictProxy[_T]]) -> None: ... + + def getall(self, key: _S, default: _T=...) -> List[_T]: ... + def getone(self, key: _S, default: _T=...) -> _T: ... + + def copy(self) -> MultiDictProxy: ... + + + +class CIMultiDictProxy(MultiDictProxy[_T]): + def copy(self) -> CIMultiDictProxy[_T]: ... + + +def getversion(md: Union[MultiDict[_T], + CIMultiDict[_T], + MultiDictProxy[_T], + CIMultiDictProxy[_T]]) -> int: ... diff --git a/RBXLegacyDiscordBot/lib/multidict/_istr.c b/RBXLegacyDiscordBot/lib/multidict/_istr.c new file mode 100644 index 0000000..083cba3 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/multidict/_istr.c @@ -0,0 +1,238 @@ +#include "Python.h" +#include "structmember.h" + +PyDoc_STRVAR(istr__doc__, "istr class implementation"); + +/* We link this module statically for convenience. If compiled as a shared + library instead, some compilers don't allow addresses of Python objects + defined in other libraries to be used in static initializers here. The + DEFERRED_ADDRESS macro is used to tag the slots where such addresses + appear; the module init function must fill in the tagged slots at runtime. + The argument is for documentation -- the macro ignores it. +*/ +#define DEFERRED_ADDRESS(ADDR) 0 + + +typedef struct { + PyUnicodeObject str; + PyObject * canonical; +} istrobject; + +typedef struct { + PyObject *title; + PyObject *emptystr; + PyObject *emptydict; +} ModData; + +static struct PyModuleDef _istrmodule; +static PyTypeObject istr_type; + +static ModData * +modstate(PyObject *mod) +{ + return (ModData*)PyModule_GetState(mod); +} + +static ModData * +global_state(void) +{ + return modstate(PyState_FindModule(&_istrmodule)); +} + + + +static PyObject * +istr_title(istrobject *self, PyObject *args) +{ + if (!PyArg_ParseTuple(args, ":title")) + return NULL; + Py_INCREF(self); + return (PyObject*)self; +} + +static PyObject * +istr_str(istrobject *self) +{ + Py_INCREF(self->canonical); + return self->canonical; +} + +static PyMethodDef istr_methods[] = { + {"title", (PyCFunction)istr_title, METH_VARARGS, + PyDoc_STR("title()")}, + {NULL, NULL}, +}; + + +void istr_dealloc(istrobject *self) +{ + Py_XDECREF(self->canonical); + PyUnicode_Type.tp_dealloc((PyObject*)self); +} + +static PyObject * +istr_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + PyObject *x = NULL; + static char *kwlist[] = {"object", "encoding", "errors", 0}; + char *encoding = NULL; + char *errors = NULL; + PyObject *s = NULL; + PyObject *tmp = NULL; + PyObject * new_args = NULL; + PyObject * ret = NULL; + + ModData * state = global_state(); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|Oss:str", + kwlist, &x, &encoding, &errors)) + return NULL; + if (x == NULL) { + s = state->emptystr; + Py_INCREF(s); + } + else if (PyObject_IsInstance(x, (PyObject*)&istr_type)) { + Py_INCREF(x); + return x; + } + else { + if (encoding == NULL && errors == NULL) { + tmp = PyObject_Str(x); + } else { + tmp = PyUnicode_FromEncodedObject(x, encoding, errors); + } + if (!tmp) { + goto finish; + } + s = PyObject_CallMethodObjArgs(tmp, state->title, NULL); + } + if (!s) + goto finish; + + new_args = PyTuple_Pack(1, s); + if (!new_args) { + goto finish; + } + ret = PyUnicode_Type.tp_new(type, new_args, state->emptydict); + if (!ret) { + goto finish; + } + ((istrobject*)ret)->canonical = s; + s = NULL; /* the reference is stollen by .canonical */ +finish: + Py_XDECREF(tmp); + Py_XDECREF(s); + Py_XDECREF(new_args); + return ret; +} + +static PyTypeObject istr_type = { + PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) + "multidict._istr.istr", + sizeof(istrobject), + 0, + (destructor)istr_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_reserved */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + (reprfunc)istr_str, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_UNICODE_SUBCLASS, + /* tp_flags */ + 0, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + istr_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + DEFERRED_ADDRESS(&PyUnicode_Type), /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + (newfunc)istr_new, /* tp_new */ +}; + + +static int mod_clear(PyObject *m) +{ + Py_CLEAR(modstate(m)->title); + Py_CLEAR(modstate(m)->emptystr); + Py_CLEAR(modstate(m)->emptydict); + return 0; +} + + +static struct PyModuleDef _istrmodule = { + PyModuleDef_HEAD_INIT, + "multidict._istr", + istr__doc__, + sizeof(ModData), + NULL, /* m_methods */ + NULL, /* m_reload */ + NULL, /* m_traverse */ + mod_clear, /* m_clear */ + NULL /* m_free */ +}; + + +PyObject* PyInit__istr(void) +{ + PyObject * tmp; + PyObject *mod; + + mod = PyState_FindModule(&_istrmodule); + if (mod) { + Py_INCREF(mod); + return mod; + } + + istr_type.tp_base = &PyUnicode_Type; + if (PyType_Ready(&istr_type) < 0) { + return NULL; + } + + mod = PyModule_Create(&_istrmodule); + if (!mod) { + return NULL; + } + tmp = PyUnicode_FromString("title"); + if (!tmp) { + goto err; + } + modstate(mod)->title = tmp; + tmp = PyUnicode_New(0, 0); + if (!tmp) { + goto err; + } + modstate(mod)->emptystr = tmp; + tmp = PyUnicode_FromString("title"); + if(!tmp) { + goto err; + } + modstate(mod)->title = tmp; + + Py_INCREF(&istr_type); + if (PyModule_AddObject(mod, "istr", (PyObject *)&istr_type) < 0) + goto err; + + return mod; +err: + Py_DECREF(mod); + return NULL; +} diff --git a/RBXLegacyDiscordBot/lib/multidict/_istr.cp36-win32.pyd b/RBXLegacyDiscordBot/lib/multidict/_istr.cp36-win32.pyd new file mode 100644 index 0000000000000000000000000000000000000000..8b43f49a761678809298e64f7c620339b152d77a GIT binary patch literal 10240 zcmeHM4^*7hm45>>FiAir5h~F{Cw0=`pU@dT!ry^_F!@s2GWC?Jyo0C=vi#j`YEJKV;X6f`t9$& zZzchZp4~lr&Ys=&9qxPY-gobP@7;UfeQ!SMw;yDwj4?exRT&#bOQnj}UtF1Vj9qin zOV_aH7QDV+^QzbTC$@OJge^^Ry&+##zdWMY?Mk zTLQ|LH0si8(V75Oj+=Ntg|Rek_;SnoKp^9yq_8Sj3_>J#xz*6BLdF_FeF&|ahIcSF zP3`>B!`KmMnZmrw=8w(7O}2SH1pm=lIQ&_C*Kd8WRKnJ$D=F2pZ< z*nmv1Ri2a`84-m^LPXI?W~vrbWe{Rj6ISKl5fl>!lgLRoaF@c$sClE7TOt}Ri(b69arwg$U$04TN0A2eXzxW5; z{?-p7;tY#aWH=CmQhHaqGL_EQh=Fi&sNw``e@qgUkBy9Z{$(k&K8GWx{5i4dEiJhq z3=!dBjS#akljj&J<(@^DwPhd{zC{_wHlf7q8K%+aXmmXv)h42dDL9L@j5uaKRaI4~ z1)=|J)<7L0El?~^MJkzeW0R7vZAH3P4QF(vzz%Ye{D$&FtWAAKH7M201fwax1|EN) z^#e8h3*|3JBHoqZ87GT;2P21wZ6Nk7y%3w&ykUqiWdT;W-*N7WF;ciOenC5X31g9! z+(O9(-hc&mF>*sKsc#WZ^~bUvenoyo3DMNb(-2X^=adD>$yp&LS&T7xU&eqM&MK{- zX}?7tc?-JuysA!G7*SIsgWPJGu%T*brWszsxI7gNpBuQ8)&DpUjr`glRP{3X+n33s zn30nssxJKHflJ{V8=R!&DCiAM)H?t{t+xYG1FB?*sHx!%n%3?sTBS7_sSI55)PzN@ zpOkf5RI6vPi6H{;SyGvFp+#x@xXC$M0bSeHps2LGd8wu1s(oArU!|kzj$!_*qvKhiXE8O$g054| zF{5dvN$;Puzz1E|DJNm{ftY0IN@;ohmZ7OTm?@e$Np+4w_40F6uNkMh;$7682Xc%x z)MU1S&i9L(^yn9mXL&(Ffk0QPmMc6=*u9 zV~1#PXa;T{F-)PRb&nV>qDKd9#DEmZdtLf%_r%yppB^}!3=Nlg+KAQ}rh&gQ&g4vn zG(x!N9QLfd4=c&Pb0jrxKZG>&Mxrq;t>bT`2lNn8N*f?+rXX;a3c&}29VQl_Z#Qs;b z`TZmS5;RRTd(5W5PFD(>t0-hXzVQA53ifTl?bI5Y3#OFoLiJb$PJXxrB#tV7Pfaq;-nwd zP240qCS@j^y1xW!V_6R&5mG)#Fcid;R!mN(Qn?ij(2&))4<=q9#9pMh`nG1hz6vrp zlhXS3Ww0Su(}@;AfjFK&kJ z3DdwE;SBke%*E>&du80TXf(>t_b2}p<>y*+ArD1l#3hjmN}w*Bf8IMYuG`^O$E0!PMl8uGs(qWgi98iHzs~T_(}N{Zv2PPxWJcRR7~%s(-u( zb?1SHpXANpBfNP`;>|!0Zw~u-^Ft4sM;{z`6b(kOWZz?62 zM*Ugjq`v&);76{uyNUG}u~IdcHFG|zy7DNd9HZj7=Xz#xt82i*a&;z0k@4e zG&S>hL}UrCIisSYhNor6OyX40ejHQoA(FcgN1VA4Iw&nJtOYCIa)I2chLH~wSG5Nr zWb#lBQR1)>E^RIRSJq*-EHatbAe^mPu-pWOu7%2Xz(N_)Ppsk;6Jn7}8cGpPH8n?< zlg!}0GYwgPEq{D>G(CE+KDyZ$ZO)J#XXN^GSls=;@dm_=vSWNrKoWh6MrFslUAi^v z#Xxw`=(aA_@)^l+4P;kNtyQxsVre=i-CVIFErm(TDtf5Nsc@#HGU*x+(WHZz%EF7~ z3S=!)Ecx9Z z(65%LV+!V#7DV1m(bT!xe!wUp`L&(vM+~%Ua5B)DQQAt4N!lcw%F>;tG`to5xhU69 zN5<2HQ<3SkurP2c{MAEbw#cYa6UWJ6mmE{(A)-o4BBN%Fj?Be4O^ETp!wHWIu~I`e z*yx&!kEtt{A!yW%88IiaP9Fv%C-lezsVa*fdIYFv3L`^3c(ABBk0ytEQz75v`7O4UkbD9|c(0$pH5g85 zi#Kd-mMbI@?<2)Olh2Uavr?P9dJm{g>f2p-N=I|moFgQN(F}}Mf$rH4f#^0pWTr`- zD&0y{T7~Bzs-|d(Y9LI_c@mwJPf{om7W2YS@8(H3ahIP z&Yolav9ZXcsbOnVGdJc)t%d(g7gY4<9ma&S=<&}G!gs9gLR2SV%ONGfsG=o^xpxMp9|OmO7{G|7_1C~wLeI0XS62~_L1Bn9|6xExZ5 zrm-=(&Z3vA^I)3lJinz=L7oF2YO7%;3O8|!h9Fn;BHC0vI+&x(O9nm-YiadP;d z%KtM994p4vfw#j2wC@1E4>$o?O9Pl*Y0AM$u6VL=G0bCEz11{hN@^io&fMLK90F{zuxB~x+ zg?`@B&)%e1^4l&wQ0%xrC44KDuG2=po@DxOwDgwgv1vb>=|MF|1w8{%@VsK{YZ~il zl)km{@2(bqfDc-=Z?ySd`gm&LJBA+q88+W)?daUa*mFs|=B`G|wwh{^8=gzsPPAil zXd5|SY%Y!SotQ&&a=y{IG|o3Rht|mXPJ?E`T={%0jht^Bw1Y%Td?$uIwDoh`655ma zE=)Ej+iQH5`|i8Xy%XDGM};K6)M43J&F8^mH=%=Q+xa}hvuTYQA6~jw_!>1ne0XGi zv>raOWe>V6`K>)))D}bE+?~Xu*}Z^x&VfdkU4jNJh=0(IUZDZg;_@eWZ6x>*xhPeL zcO7Vnz2e&%>mi<o`kjt>9MZLz{Sz(IKA3XQLe=(7ubpd|cs zqD|u7#ajF&@weuUpZkLG7nd;8835@`%Ac7f|Mr*gW(UwXmE?H}!XnuAW$*y^0eS#W z;Fa)gw2ZmDAt{*O(IXVCzSkQN3i5k;-OSm$G2oS4e2jS#XPvk|f<So`5DO+;r*OmX&V8eAJg8v`M!Jc0^Gx)CGj1e7Dbs`^vC2;As!IeWKg3r$g+KyxoCyUY{glwRamDMCo&fm!x3& zns6{E1|(;&yFt&Z38Xu?4$cS%F(`&0#wzt&!eX%3DF!>c zgMKo1O?MbSOoQy&&uZW3X4lQ4HhSP$H;C+gqBit~B+=jK^^4UZY(S(QyIxo84YltC z#+mE9JA>`PUNUNkZA+!2=56B)MG@#gaGB+3j+RILY0;E+3RIwt@LYe@H}c4ReXyrY_CG!9rI@x8L6#U`;i5 zZQazkvEE@Tv}#&QkPomNY~9aFuk>9at7fi{=y7#&r?_YdOBwV8I=#DKNmrmf80@~+ z)gIino6ksEv`r<4#K3NEush%${cY>okAnrlBk;p`hRWesl+=WeuL04j=GaTrk zkifqfu@|u;j8L5;bQ9JRlw5E)6tU}o@eXV6@vioVD6_4`_-g#;TY^JY`)sRis}ozk zS~~h)qdmLyV1>>SKy5-d+0ESHYY&CY zUOy5oMSVMu;Q6!6#p%Ja9`H2)*_HmC|EKcDY5^U~#nu6Rb1k5w50doc`V1ZaHbmyv z-m2f7E5tGT+C6@sdAAr0A)8jLvgKP>nUlGrV%65hb*oEOnd!yhZucRG#wgQWub`tnrVUgv`~5q?lseW&NJ@`_>;@-?09x^_=w&))ZTo?d!H)Tg3KV z+mLP8cFATec(vfS1vNsQ&@8kG-xT_V2ZV#d_k>4nD^Sz%asSvV=YF1#(gC;U?Q zjqrOx6%2)0g-Z)>DJ(7ASa?_A3x)Ts`I|MrUz1s6F1o$QUQ}NcD%xN4V9~=xPZd2| zlvTW}xTg5dVt4W5#V;1WR{U1+d&M6WFD+S7QdDwB$)=LVlI +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x03030000 || (PY_MAJOR_VERSION == 2 && PY_VERSION_HEX >= 0x02070000) + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject **args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__multidict___multidict +#define __PYX_HAVE_API__multidict___multidict +#include +#include +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#ifdef PYREX_WITHOUT_ASSERTIONS +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) && defined (_M_X64) + #define __Pyx_sst_abs(value) _abs64(value) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyObject_AsSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +#if PY_MAJOR_VERSION < 3 +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) +{ + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#else +#define __Pyx_Py_UNICODE_strlen Py_UNICODE_strlen +#endif +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ + +static PyObject *__pyx_m; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "multidict\\_multidict.pyx", + "type.pxd", +}; + +/*--- Type declarations ---*/ +struct __pyx_obj_9multidict_10_multidict__Pair; +struct __pyx_obj_9multidict_10_multidict__Impl; +struct __pyx_obj_9multidict_10_multidict__Base; +struct __pyx_obj_9multidict_10_multidict_MultiDictProxy; +struct __pyx_obj_9multidict_10_multidict_CIMultiDictProxy; +struct __pyx_obj_9multidict_10_multidict_MultiDict; +struct __pyx_obj_9multidict_10_multidict_CIMultiDict; +struct __pyx_obj_9multidict_10_multidict__ViewBase; +struct __pyx_obj_9multidict_10_multidict__ViewBaseSet; +struct __pyx_obj_9multidict_10_multidict__ItemsIter; +struct __pyx_obj_9multidict_10_multidict__ItemsView; +struct __pyx_obj_9multidict_10_multidict__ValuesIter; +struct __pyx_obj_9multidict_10_multidict__ValuesView; +struct __pyx_obj_9multidict_10_multidict__KeysIter; +struct __pyx_obj_9multidict_10_multidict__KeysView; + +/* "multidict/_multidict.pyx":54 + * + * + * cdef class _Pair: # <<<<<<<<<<<<<< + * cdef str _identity + * cdef Py_hash_t _hash + */ +struct __pyx_obj_9multidict_10_multidict__Pair { + PyObject_HEAD + PyObject *_identity; + Py_hash_t _hash; + PyObject *_key; + PyObject *_value; +}; + + +/* "multidict/_multidict.pyx":70 + * + * + * cdef class _Impl: # <<<<<<<<<<<<<< + * cdef list _items + * cdef unsigned long long _version + */ +struct __pyx_obj_9multidict_10_multidict__Impl { + PyObject_HEAD + struct __pyx_vtabstruct_9multidict_10_multidict__Impl *__pyx_vtab; + PyObject *_items; + unsigned PY_LONG_LONG _version; +}; + + +/* "multidict/_multidict.pyx":84 + * + * + * cdef class _Base: # <<<<<<<<<<<<<< + * + * cdef _Impl _impl + */ +struct __pyx_obj_9multidict_10_multidict__Base { + PyObject_HEAD + struct __pyx_vtabstruct_9multidict_10_multidict__Base *__pyx_vtab; + struct __pyx_obj_9multidict_10_multidict__Impl *_impl; +}; + + +/* "multidict/_multidict.pyx":217 + * + * + * cdef class MultiDictProxy(_Base): # <<<<<<<<<<<<<< + * _proxy_classes = (MultiDict, MultiDictProxy) + * _base_class = MultiDict + */ +struct __pyx_obj_9multidict_10_multidict_MultiDictProxy { + struct __pyx_obj_9multidict_10_multidict__Base __pyx_base; +}; + + +/* "multidict/_multidict.pyx":243 + * + * + * cdef class CIMultiDictProxy(MultiDictProxy): # <<<<<<<<<<<<<< + * _proxy_classes = (CIMultiDict, CIMultiDictProxy) + * _base_class = CIMultiDict + */ +struct __pyx_obj_9multidict_10_multidict_CIMultiDictProxy { + struct __pyx_obj_9multidict_10_multidict_MultiDictProxy __pyx_base; +}; + + +/* "multidict/_multidict.pyx":272 + * + * + * cdef class MultiDict(_Base): # <<<<<<<<<<<<<< + * """An ordered dictionary that can have multiple values for each key.""" + * + */ +struct __pyx_obj_9multidict_10_multidict_MultiDict { + struct __pyx_obj_9multidict_10_multidict__Base __pyx_base; +}; + + +/* "multidict/_multidict.pyx":525 + * + * + * cdef class CIMultiDict(MultiDict): # <<<<<<<<<<<<<< + * """An ordered dictionary that can have multiple values for each key.""" + * + */ +struct __pyx_obj_9multidict_10_multidict_CIMultiDict { + struct __pyx_obj_9multidict_10_multidict_MultiDict __pyx_base; +}; + + +/* "multidict/_multidict.pyx":545 + * + * + * cdef class _ViewBase: # <<<<<<<<<<<<<< + * + * cdef _Impl _impl + */ +struct __pyx_obj_9multidict_10_multidict__ViewBase { + PyObject_HEAD + struct __pyx_obj_9multidict_10_multidict__Impl *_impl; +}; + + +/* "multidict/_multidict.pyx":556 + * + * + * cdef class _ViewBaseSet(_ViewBase): # <<<<<<<<<<<<<< + * + * def __richcmp__(self, other, op): + */ +struct __pyx_obj_9multidict_10_multidict__ViewBaseSet { + struct __pyx_obj_9multidict_10_multidict__ViewBase __pyx_base; +}; + + +/* "multidict/_multidict.pyx":637 + * + * + * cdef class _ItemsIter: # <<<<<<<<<<<<<< + * cdef _Impl _impl + * cdef int _current + */ +struct __pyx_obj_9multidict_10_multidict__ItemsIter { + PyObject_HEAD + struct __pyx_obj_9multidict_10_multidict__Impl *_impl; + int _current; + int _len; + unsigned PY_LONG_LONG _version; +}; + + +/* "multidict/_multidict.pyx":662 + * + * + * cdef class _ItemsView(_ViewBaseSet): # <<<<<<<<<<<<<< + * + * def isdisjoint(self, other): + */ +struct __pyx_obj_9multidict_10_multidict__ItemsView { + struct __pyx_obj_9multidict_10_multidict__ViewBaseSet __pyx_base; +}; + + +/* "multidict/_multidict.pyx":703 + * + * + * cdef class _ValuesIter: # <<<<<<<<<<<<<< + * cdef _Impl _impl + * cdef int _current + */ +struct __pyx_obj_9multidict_10_multidict__ValuesIter { + PyObject_HEAD + struct __pyx_obj_9multidict_10_multidict__Impl *_impl; + int _current; + int _len; + unsigned PY_LONG_LONG _version; +}; + + +/* "multidict/_multidict.pyx":728 + * + * + * cdef class _ValuesView(_ViewBase): # <<<<<<<<<<<<<< + * + * def __contains__(self, value): + */ +struct __pyx_obj_9multidict_10_multidict__ValuesView { + struct __pyx_obj_9multidict_10_multidict__ViewBase __pyx_base; +}; + + +/* "multidict/_multidict.pyx":754 + * + * + * cdef class _KeysIter: # <<<<<<<<<<<<<< + * cdef _Impl _impl + * cdef int _current + */ +struct __pyx_obj_9multidict_10_multidict__KeysIter { + PyObject_HEAD + struct __pyx_obj_9multidict_10_multidict__Impl *_impl; + int _current; + int _len; + unsigned PY_LONG_LONG _version; +}; + + +/* "multidict/_multidict.pyx":779 + * + * + * cdef class _KeysView(_ViewBaseSet): # <<<<<<<<<<<<<< + * + * def isdisjoint(self, other): + */ +struct __pyx_obj_9multidict_10_multidict__KeysView { + struct __pyx_obj_9multidict_10_multidict__ViewBaseSet __pyx_base; +}; + + + +/* "multidict/_multidict.pyx":70 + * + * + * cdef class _Impl: # <<<<<<<<<<<<<< + * cdef list _items + * cdef unsigned long long _version + */ + +struct __pyx_vtabstruct_9multidict_10_multidict__Impl { + void (*incr_version)(struct __pyx_obj_9multidict_10_multidict__Impl *); +}; +static struct __pyx_vtabstruct_9multidict_10_multidict__Impl *__pyx_vtabptr_9multidict_10_multidict__Impl; + + +/* "multidict/_multidict.pyx":84 + * + * + * cdef class _Base: # <<<<<<<<<<<<<< + * + * cdef _Impl _impl + */ + +struct __pyx_vtabstruct_9multidict_10_multidict__Base { + PyObject *(*_title)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *); + PyObject *(*_getall)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *, PyObject *, PyObject *); + PyObject *(*_getone)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *, PyObject *, PyObject *); + PyObject *(*_contains)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *); + PyObject *(*keys)(struct __pyx_obj_9multidict_10_multidict__Base *, int __pyx_skip_dispatch); + PyObject *(*_eq_to_mapping)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *); +}; +static struct __pyx_vtabstruct_9multidict_10_multidict__Base *__pyx_vtabptr_9multidict_10_multidict__Base; + + +/* "multidict/_multidict.pyx":217 + * + * + * cdef class MultiDictProxy(_Base): # <<<<<<<<<<<<<< + * _proxy_classes = (MultiDict, MultiDictProxy) + * _base_class = MultiDict + */ + +struct __pyx_vtabstruct_9multidict_10_multidict_MultiDictProxy { + struct __pyx_vtabstruct_9multidict_10_multidict__Base __pyx_base; +}; +static struct __pyx_vtabstruct_9multidict_10_multidict_MultiDictProxy *__pyx_vtabptr_9multidict_10_multidict_MultiDictProxy; + + +/* "multidict/_multidict.pyx":243 + * + * + * cdef class CIMultiDictProxy(MultiDictProxy): # <<<<<<<<<<<<<< + * _proxy_classes = (CIMultiDict, CIMultiDictProxy) + * _base_class = CIMultiDict + */ + +struct __pyx_vtabstruct_9multidict_10_multidict_CIMultiDictProxy { + struct __pyx_vtabstruct_9multidict_10_multidict_MultiDictProxy __pyx_base; +}; +static struct __pyx_vtabstruct_9multidict_10_multidict_CIMultiDictProxy *__pyx_vtabptr_9multidict_10_multidict_CIMultiDictProxy; + + +/* "multidict/_multidict.pyx":272 + * + * + * cdef class MultiDict(_Base): # <<<<<<<<<<<<<< + * """An ordered dictionary that can have multiple values for each key.""" + * + */ + +struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict { + struct __pyx_vtabstruct_9multidict_10_multidict__Base __pyx_base; + PyObject *(*_extend)(struct __pyx_obj_9multidict_10_multidict_MultiDict *, PyObject *, PyObject *, PyObject *, int); + PyObject *(*_add)(struct __pyx_obj_9multidict_10_multidict_MultiDict *, PyObject *, PyObject *); + PyObject *(*_replace)(struct __pyx_obj_9multidict_10_multidict_MultiDict *, PyObject *, PyObject *); + PyObject *(*_remove)(struct __pyx_obj_9multidict_10_multidict_MultiDict *, PyObject *); +}; +static struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *__pyx_vtabptr_9multidict_10_multidict_MultiDict; + + +/* "multidict/_multidict.pyx":525 + * + * + * cdef class CIMultiDict(MultiDict): # <<<<<<<<<<<<<< + * """An ordered dictionary that can have multiple values for each key.""" + * + */ + +struct __pyx_vtabstruct_9multidict_10_multidict_CIMultiDict { + struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict __pyx_base; +}; +static struct __pyx_vtabstruct_9multidict_10_multidict_CIMultiDict *__pyx_vtabptr_9multidict_10_multidict_CIMultiDict; + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* ArgTypeTest.proto */ +static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* StrEquals.proto */ +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals +#else +#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals +#endif + +/* GetModuleGlobalName.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* KeywordStringCheck.proto */ +static CYTHON_INLINE int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* ListAppend.proto */ +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { + Py_INCREF(x); + PyList_SET_ITEM(list, len, x); + Py_SIZE(list) = len+1; + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = PyThreadState_GET(); +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyObjectCallNoArg.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); +#else +#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) +#endif + +/* StringJoin.proto */ +#if PY_MAJOR_VERSION < 3 +#define __Pyx_PyString_Join __Pyx_PyBytes_Join +#define __Pyx_PyBaseString_Join(s, v) (PyUnicode_CheckExact(s) ? PyUnicode_Join(s, v) : __Pyx_PyBytes_Join(s, v)) +#else +#define __Pyx_PyString_Join PyUnicode_Join +#define __Pyx_PyBaseString_Join PyUnicode_Join +#endif +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION < 3 + #define __Pyx_PyBytes_Join _PyString_Join + #else + #define __Pyx_PyBytes_Join _PyBytes_Join + #endif +#else +static CYTHON_INLINE PyObject* __Pyx_PyBytes_Join(PyObject* sep, PyObject* values); +#endif + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* IterFinish.proto */ +static CYTHON_INLINE int __Pyx_IterFinish(void); + +/* UnpackItemEndCheck.proto */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); + +/* PyIntBinop.proto */ +#if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, long intval, int inplace); +#else +#define __Pyx_PyInt_EqObjC(op1, op2, intval, inplace)\ + PyObject_RichCompare(op1, op2, Py_EQ) + #endif + +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* ListExtend.proto */ +static CYTHON_INLINE int __Pyx_PyList_Extend(PyObject* L, PyObject* v) { +#if CYTHON_COMPILING_IN_CPYTHON + PyObject* none = _PyList_Extend((PyListObject*)L, v); + if (unlikely(!none)) + return -1; + Py_DECREF(none); + return 0; +#else + return PyList_SetSlice(L, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, v); +#endif +} + +/* py_dict_items.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyDict_Items(PyObject* d); + +/* UnpackUnboundCMethod.proto */ +typedef struct { + PyObject *type; + PyObject **method_name; + PyCFunction func; + PyObject *method; + int flag; +} __Pyx_CachedCFunction; + +/* CallUnboundCMethod0.proto */ +static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_CallUnboundCMethod0(cfunc, self)\ + ((likely((cfunc)->func)) ?\ + (likely((cfunc)->flag == METH_NOARGS) ? (*((cfunc)->func))(self, NULL) :\ + (likely((cfunc)->flag == (METH_VARARGS | METH_KEYWORDS)) ? ((*(PyCFunctionWithKeywords)(cfunc)->func)(self, __pyx_empty_tuple, NULL)) :\ + ((cfunc)->flag == METH_VARARGS ? (*((cfunc)->func))(self, __pyx_empty_tuple) :\ + (PY_VERSION_HEX >= 0x030600B1 && (cfunc)->flag == METH_FASTCALL ? (*(__Pyx_PyCFunctionFast)(cfunc)->func)(self, &PyTuple_GET_ITEM(__pyx_empty_tuple, 0), 0, NULL) :\ + __Pyx__CallUnboundCMethod0(cfunc, self))))) :\ + __Pyx__CallUnboundCMethod0(cfunc, self)) +#else +#define __Pyx_CallUnboundCMethod0(cfunc, self) __Pyx__CallUnboundCMethod0(cfunc, self) +#endif + +/* DelItemInt.proto */ +#define __Pyx_DelItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_DelItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list assignment index out of range"), -1) :\ + __Pyx_DelItem_Generic(o, to_py_func(i)))) +static CYTHON_INLINE int __Pyx_DelItem_Generic(PyObject *o, PyObject *j); +static CYTHON_INLINE int __Pyx_DelItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound); + +/* PyObjectCallMethod1.proto */ +static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); + +/* pop_index.proto */ +static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix); +static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix); +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix); +#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ + (likely(PyList_CheckExact(L) && __Pyx_fits_Py_ssize_t(ix, type, is_signed))) ?\ + __Pyx__PyList_PopIndex(L, py_ix, ix) : (\ + (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ + __Pyx__PyObject_PopIndex(L, py_ix))) +#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ + __Pyx_fits_Py_ssize_t(ix, type, is_signed) ?\ + __Pyx__PyList_PopIndex(L, py_ix, ix) : (\ + (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ + __Pyx__PyObject_PopIndex(L, py_ix))) +#else +#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func)\ + __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) +#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ + (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ + __Pyx__PyObject_PopIndex(L, py_ix)) +#endif + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable); + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* GetNameInClass.proto */ +static PyObject *__Pyx_GetNameInClass(PyObject *nmspace, PyObject *name); + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* PyIdentifierFromString.proto */ +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + +/* ModuleImport.proto */ +static PyObject *__Pyx_ImportModule(const char *name); + +/* TypeImport.proto */ +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +static void __pyx_f_9multidict_10_multidict_5_Impl_incr_version(struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_5_Base__title(CYTHON_UNUSED struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_s); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_5_Base__getall(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_identity, PyObject *__pyx_v_key, PyObject *__pyx_v_default); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_5_Base__getone(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_identity, PyObject *__pyx_v_key, PyObject *__pyx_v_default); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_5_Base__contains(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_identity); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_5_Base_keys(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, int __pyx_skip_dispatch); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_5_Base__eq_to_mapping(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_other); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_16CIMultiDictProxy__title(CYTHON_UNUSED struct __pyx_obj_9multidict_10_multidict_CIMultiDictProxy *__pyx_v_self, PyObject *__pyx_v_s); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_9MultiDict__extend(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs, PyObject *__pyx_v_name, int __pyx_v_do_add); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_9MultiDict__add(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_9MultiDict__replace(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_9MultiDict__remove(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key); /* proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_11CIMultiDict__title(CYTHON_UNUSED struct __pyx_obj_9multidict_10_multidict_CIMultiDict *__pyx_v_self, PyObject *__pyx_v_s); /* proto*/ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'multidict._multidict' */ +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__Pair = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__Impl = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__Base = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict_MultiDictProxy = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict_MultiDict = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict_CIMultiDict = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__ViewBase = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__ViewBaseSet = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__ItemsIter = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__ItemsView = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__ValuesIter = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__ValuesView = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__KeysIter = 0; +static PyTypeObject *__pyx_ptype_9multidict_10_multidict__KeysView = 0; +static PyObject *__pyx_v_9multidict_10_multidict__marker = 0; +static PyObject *__pyx_v_9multidict_10_multidict__istr = 0; +static unsigned PY_LONG_LONG __pyx_v_9multidict_10_multidict__version; +static PyObject *__pyx_f_9multidict_10_multidict__eq(PyObject *, PyObject *); /*proto*/ +static PyObject *__pyx_f_9multidict_10_multidict__str(PyObject *); /*proto*/ +#define __Pyx_MODULE_NAME "multidict._multidict" +int __pyx_module_is_main_multidict___multidict = 0; + +/* Implementation of 'multidict._multidict' */ +static PyObject *__pyx_builtin_object; +static PyObject *__pyx_builtin_range; +static PyObject *__pyx_builtin_NotImplemented; +static PyObject *__pyx_builtin_KeyError; +static PyObject *__pyx_builtin_TypeError; +static PyObject *__pyx_builtin_RuntimeError; +static PyObject *__pyx_builtin_StopIteration; +static const char __pyx_k_r[] = "'{}': {!r}"; +static const char __pyx_k__3[] = ", "; +static const char __pyx_k__4[] = "<{}({})>"; +static const char __pyx_k_md[] = "md"; +static const char __pyx_k_or[] = " or "; +static const char __pyx_k_Set[] = "Set"; +static const char __pyx_k__10[] = "{}({})"; +static const char __pyx_k_abc[] = "abc"; +static const char __pyx_k_arg[] = "arg"; +static const char __pyx_k_key[] = "key"; +static const char __pyx_k_pop[] = "pop"; +static const char __pyx_k_r_2[] = "{!r}"; +static const char __pyx_k_r_r[] = "{!r}: {!r}"; +static const char __pyx_k_sys[] = "sys"; +static const char __pyx_k_impl[] = "impl"; +static const char __pyx_k_istr[] = "_istr"; +static const char __pyx_k_join[] = "join"; +static const char __pyx_k_keys[] = "keys"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_class[] = "__class__"; +static const char __pyx_k_clear[] = "clear"; +static const char __pyx_k_items[] = "items"; +static const char __pyx_k_range[] = "range"; +static const char __pyx_k_title[] = "title"; +static const char __pyx_k_upstr[] = "upstr"; +static const char __pyx_k_value[] = "value"; +static const char __pyx_k_extend[] = "extend"; +static const char __pyx_k_format[] = "format"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_istr_2[] = "istr"; +static const char __pyx_k_object[] = "object"; +static const char __pyx_k_popone[] = "popone"; +static const char __pyx_k_update[] = "update"; +static const char __pyx_k_Mapping[] = "Mapping"; +static const char __pyx_k_default[] = "default"; +static const char __pyx_k_Iterable[] = "Iterable"; +static const char __pyx_k_KeyError[] = "KeyError"; +static const char __pyx_k_KeysView[] = "KeysView"; +static const char __pyx_k_identity[] = "identity"; +static const char __pyx_k_register[] = "register"; +static const char __pyx_k_ItemsView[] = "ItemsView"; +static const char __pyx_k_MultiDict[] = "MultiDict"; +static const char __pyx_k_TypeError[] = "TypeError"; +static const char __pyx_k_ValuesView[] = "ValuesView"; +static const char __pyx_k_base_class[] = "_base_class"; +static const char __pyx_k_getversion[] = "getversion"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_CIMultiDict[] = "CIMultiDict"; +static const char __pyx_k_collections[] = "collections"; +static const char __pyx_k_RuntimeError[] = "RuntimeError"; +static const char __pyx_k_StopIteration[] = "StopIteration"; +static const char __pyx_k_proxy_classes[] = "_proxy_classes"; +static const char __pyx_k_MutableMapping[] = "MutableMapping"; +static const char __pyx_k_NotImplemented[] = "NotImplemented"; +static const char __pyx_k_Key_not_found_r[] = "Key not found: %r"; +static const char __pyx_k_collections_abc[] = "collections.abc"; +static const char __pyx_k_empty_multidict[] = "empty multidict"; +static const char __pyx_k_can_t_pickle_objects[] = "can't pickle {} objects"; +static const char __pyx_k_multidict__multidict[] = "multidict._multidict"; +static const char __pyx_k_ctor_requires_instance_not[] = "ctor requires {} instance, not {}"; +static const char __pyx_k_C_projects_multidict_multidict[] = "C:\\projects\\multidict\\multidict\\_multidict.pyx"; +static const char __pyx_k_MultiDict_keys_should_be_either[] = "MultiDict keys should be either str or subclasses of str"; +static const char __pyx_k_takes_at_most_1_positional_argu[] = "{} takes at most 1 positional argument ({} given)"; +static const char __pyx_k_takes_either_dict_or_list_of_ke[] = "{} takes either dict or list of (key, value) tuples"; +static const char __pyx_k_Dictionary_changed_during_iterat[] = "Dictionary changed during iteration"; +static PyObject *__pyx_n_s_CIMultiDict; +static PyObject *__pyx_kp_s_C_projects_multidict_multidict; +static PyObject *__pyx_kp_s_Dictionary_changed_during_iterat; +static PyObject *__pyx_n_s_ItemsView; +static PyObject *__pyx_n_s_Iterable; +static PyObject *__pyx_n_s_KeyError; +static PyObject *__pyx_kp_s_Key_not_found_r; +static PyObject *__pyx_n_s_KeysView; +static PyObject *__pyx_n_s_Mapping; +static PyObject *__pyx_n_s_MultiDict; +static PyObject *__pyx_kp_s_MultiDict_keys_should_be_either; +static PyObject *__pyx_n_s_MutableMapping; +static PyObject *__pyx_n_s_NotImplemented; +static PyObject *__pyx_n_s_RuntimeError; +static PyObject *__pyx_n_s_Set; +static PyObject *__pyx_n_s_StopIteration; +static PyObject *__pyx_n_s_TypeError; +static PyObject *__pyx_n_s_ValuesView; +static PyObject *__pyx_kp_s__10; +static PyObject *__pyx_kp_s__3; +static PyObject *__pyx_kp_s__4; +static PyObject *__pyx_n_s_abc; +static PyObject *__pyx_n_s_arg; +static PyObject *__pyx_n_s_base_class; +static PyObject *__pyx_kp_s_can_t_pickle_objects; +static PyObject *__pyx_n_s_class; +static PyObject *__pyx_n_s_clear; +static PyObject *__pyx_n_s_collections; +static PyObject *__pyx_n_s_collections_abc; +static PyObject *__pyx_kp_s_ctor_requires_instance_not; +static PyObject *__pyx_n_s_default; +static PyObject *__pyx_kp_s_empty_multidict; +static PyObject *__pyx_n_s_extend; +static PyObject *__pyx_n_s_format; +static PyObject *__pyx_n_s_getversion; +static PyObject *__pyx_n_s_identity; +static PyObject *__pyx_n_s_impl; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_istr; +static PyObject *__pyx_n_s_istr_2; +static PyObject *__pyx_n_s_items; +static PyObject *__pyx_n_s_join; +static PyObject *__pyx_n_s_key; +static PyObject *__pyx_n_s_keys; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_md; +static PyObject *__pyx_n_s_multidict__multidict; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_n_s_object; +static PyObject *__pyx_kp_s_or; +static PyObject *__pyx_n_s_pop; +static PyObject *__pyx_n_s_popone; +static PyObject *__pyx_n_s_proxy_classes; +static PyObject *__pyx_n_s_pyx_vtable; +static PyObject *__pyx_kp_s_r; +static PyObject *__pyx_kp_s_r_2; +static PyObject *__pyx_kp_s_r_r; +static PyObject *__pyx_n_s_range; +static PyObject *__pyx_n_s_register; +static PyObject *__pyx_n_s_sys; +static PyObject *__pyx_kp_s_takes_at_most_1_positional_argu; +static PyObject *__pyx_kp_s_takes_either_dict_or_list_of_ke; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_title; +static PyObject *__pyx_n_s_update; +static PyObject *__pyx_n_s_upstr; +static PyObject *__pyx_n_s_value; +static PyObject *__pyx_pf_9multidict_10_multidict_getversion(CYTHON_UNUSED PyObject *__pyx_self, struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_md); /* proto */ +static int __pyx_pf_9multidict_10_multidict_5_Pair___cinit__(struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_self, PyObject *__pyx_v_identity, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_9multidict_10_multidict_5_Impl___cinit__(struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_getall(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_2getone(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_4__getitem__(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_6get(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default); /* proto */ +static int __pyx_pf_9multidict_10_multidict_5_Base_8__contains__(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_10__iter__(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self); /* proto */ +static Py_ssize_t __pyx_pf_9multidict_10_multidict_5_Base_12__len__(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_14keys(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_16items(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_18values(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_20__repr__(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_22__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, PyObject *__pyx_v_op); /* proto */ +static int __pyx_pf_9multidict_10_multidict_14MultiDictProxy___init__(struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *__pyx_v_self, PyObject *__pyx_v_arg); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_14MultiDictProxy_2__reduce__(struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_14MultiDictProxy_4copy(struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *__pyx_v_self); /* proto */ +static int __pyx_pf_9multidict_10_multidict_9MultiDict___init__(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_2__reduce__(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_4add(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_6copy(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_8extend(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_10clear(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self); /* proto */ +static int __pyx_pf_9multidict_10_multidict_9MultiDict_12__setitem__(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_9multidict_10_multidict_9MultiDict_14__delitem__(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_16setdefault(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_18popone(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_20popall(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_22popitem(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_24update(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs); /* proto */ +static int __pyx_pf_9multidict_10_multidict_11CIMultiDict___init__(struct __pyx_obj_9multidict_10_multidict_CIMultiDict *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs); /* proto */ +static int __pyx_pf_9multidict_10_multidict_9_ViewBase___cinit__(struct __pyx_obj_9multidict_10_multidict__ViewBase *__pyx_v_self, struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl); /* proto */ +static Py_ssize_t __pyx_pf_9multidict_10_multidict_9_ViewBase_2__len__(struct __pyx_obj_9multidict_10_multidict__ViewBase *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_12_ViewBaseSet___richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, PyObject *__pyx_v_op); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_12_ViewBaseSet_2__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_12_ViewBaseSet_4__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_12_ViewBaseSet_6__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_12_ViewBaseSet_8__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static int __pyx_pf_9multidict_10_multidict_10_ItemsIter___cinit__(struct __pyx_obj_9multidict_10_multidict__ItemsIter *__pyx_v_self, struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_10_ItemsIter_2__iter__(struct __pyx_obj_9multidict_10_multidict__ItemsIter *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_10_ItemsIter_4__next__(struct __pyx_obj_9multidict_10_multidict__ItemsIter *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_10_ItemsView_isdisjoint(struct __pyx_obj_9multidict_10_multidict__ItemsView *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static int __pyx_pf_9multidict_10_multidict_10_ItemsView_2__contains__(struct __pyx_obj_9multidict_10_multidict__ItemsView *__pyx_v_self, PyObject *__pyx_v_i); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_10_ItemsView_4__iter__(struct __pyx_obj_9multidict_10_multidict__ItemsView *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_10_ItemsView_6__repr__(struct __pyx_obj_9multidict_10_multidict__ItemsView *__pyx_v_self); /* proto */ +static int __pyx_pf_9multidict_10_multidict_11_ValuesIter___cinit__(struct __pyx_obj_9multidict_10_multidict__ValuesIter *__pyx_v_self, struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_11_ValuesIter_2__iter__(struct __pyx_obj_9multidict_10_multidict__ValuesIter *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_11_ValuesIter_4__next__(struct __pyx_obj_9multidict_10_multidict__ValuesIter *__pyx_v_self); /* proto */ +static int __pyx_pf_9multidict_10_multidict_11_ValuesView___contains__(struct __pyx_obj_9multidict_10_multidict__ValuesView *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_11_ValuesView_2__iter__(struct __pyx_obj_9multidict_10_multidict__ValuesView *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_11_ValuesView_4__repr__(struct __pyx_obj_9multidict_10_multidict__ValuesView *__pyx_v_self); /* proto */ +static int __pyx_pf_9multidict_10_multidict_9_KeysIter___cinit__(struct __pyx_obj_9multidict_10_multidict__KeysIter *__pyx_v_self, struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9_KeysIter_2__iter__(struct __pyx_obj_9multidict_10_multidict__KeysIter *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9_KeysIter_4__next__(struct __pyx_obj_9multidict_10_multidict__KeysIter *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9_KeysView_isdisjoint(struct __pyx_obj_9multidict_10_multidict__KeysView *__pyx_v_self, PyObject *__pyx_v_other); /* proto */ +static int __pyx_pf_9multidict_10_multidict_9_KeysView_2__contains__(struct __pyx_obj_9multidict_10_multidict__KeysView *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9_KeysView_4__iter__(struct __pyx_obj_9multidict_10_multidict__KeysView *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9multidict_10_multidict_9_KeysView_6__repr__(struct __pyx_obj_9multidict_10_multidict__KeysView *__pyx_v_self); /* proto */ +static PyObject *__pyx_tp_new_9multidict_10_multidict__Pair(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict__Impl(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict__Base(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict_MultiDictProxy(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict_CIMultiDictProxy(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict_MultiDict(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict_CIMultiDict(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict__ViewBase(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict__ViewBaseSet(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict__ItemsIter(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict__ItemsView(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict__ValuesIter(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict__ValuesView(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict__KeysIter(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9multidict_10_multidict__KeysView(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_items = {0, &__pyx_n_s_items, 0, 0, 0}; +static PyObject *__pyx_int_0; +static PyObject *__pyx_int_1; +static PyObject *__pyx_int_2; +static PyObject *__pyx_int_3; +static PyObject *__pyx_int_4; +static PyObject *__pyx_int_5; +static PyObject *__pyx_k_; +static PyObject *__pyx_k__2; +static PyObject *__pyx_k__6; +static PyObject *__pyx_k__7; +static PyObject *__pyx_tuple__5; +static PyObject *__pyx_tuple__8; +static PyObject *__pyx_tuple__9; +static PyObject *__pyx_tuple__11; +static PyObject *__pyx_tuple__12; +static PyObject *__pyx_tuple__13; +static PyObject *__pyx_codeobj__14; + +/* "multidict/_multidict.pyx":17 + * + * + * def getversion(_Base md): # <<<<<<<<<<<<<< + * return md._impl._version + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_1getversion(PyObject *__pyx_self, PyObject *__pyx_v_md); /*proto*/ +static PyMethodDef __pyx_mdef_9multidict_10_multidict_1getversion = {"getversion", (PyCFunction)__pyx_pw_9multidict_10_multidict_1getversion, METH_O, 0}; +static PyObject *__pyx_pw_9multidict_10_multidict_1getversion(PyObject *__pyx_self, PyObject *__pyx_v_md) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("getversion (wrapper)", 0); + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_md), __pyx_ptype_9multidict_10_multidict__Base, 1, "md", 0))) __PYX_ERR(0, 17, __pyx_L1_error) + __pyx_r = __pyx_pf_9multidict_10_multidict_getversion(__pyx_self, ((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_md)); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_getversion(CYTHON_UNUSED PyObject *__pyx_self, struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_md) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("getversion", 0); + + /* "multidict/_multidict.pyx":18 + * + * def getversion(_Base md): + * return md._impl._version # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_unsigned_PY_LONG_LONG(__pyx_v_md->_impl->_version); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":17 + * + * + * def getversion(_Base md): # <<<<<<<<<<<<<< + * return md._impl._version + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict.getversion", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":21 + * + * + * cdef _eq(self, other): # <<<<<<<<<<<<<< + * cdef int is_left_base, is_right_base + * cdef Py_ssize_t i, l + */ + +static PyObject *__pyx_f_9multidict_10_multidict__eq(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + int __pyx_v_is_left_base; + int __pyx_v_is_right_base; + Py_ssize_t __pyx_v_i; + Py_ssize_t __pyx_v_l; + PyObject *__pyx_v_lft_items = 0; + PyObject *__pyx_v_rgt_items = 0; + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_lft = 0; + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_rgt = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + Py_ssize_t __pyx_t_4; + Py_ssize_t __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + __Pyx_RefNannySetupContext("_eq", 0); + + /* "multidict/_multidict.pyx":27 + * cdef _Pair lft, rgt + * + * is_left_base = isinstance(self, _Base) # <<<<<<<<<<<<<< + * is_right_base = isinstance(other, _Base) + * + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_self, __pyx_ptype_9multidict_10_multidict__Base); + __pyx_v_is_left_base = __pyx_t_1; + + /* "multidict/_multidict.pyx":28 + * + * is_left_base = isinstance(self, _Base) + * is_right_base = isinstance(other, _Base) # <<<<<<<<<<<<<< + * + * if is_left_base and is_right_base: + */ + __pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_9multidict_10_multidict__Base); + __pyx_v_is_right_base = __pyx_t_1; + + /* "multidict/_multidict.pyx":30 + * is_right_base = isinstance(other, _Base) + * + * if is_left_base and is_right_base: # <<<<<<<<<<<<<< + * lft_items = (<_Base>self)._impl._items + * rgt_items = (<_Base>other)._impl._items + */ + __pyx_t_2 = (__pyx_v_is_left_base != 0); + if (__pyx_t_2) { + } else { + __pyx_t_1 = __pyx_t_2; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = (__pyx_v_is_right_base != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L4_bool_binop_done:; + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":31 + * + * if is_left_base and is_right_base: + * lft_items = (<_Base>self)._impl._items # <<<<<<<<<<<<<< + * rgt_items = (<_Base>other)._impl._items + * l = len(lft_items) + */ + __pyx_t_3 = ((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self)->_impl->_items; + __Pyx_INCREF(__pyx_t_3); + __pyx_v_lft_items = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":32 + * if is_left_base and is_right_base: + * lft_items = (<_Base>self)._impl._items + * rgt_items = (<_Base>other)._impl._items # <<<<<<<<<<<<<< + * l = len(lft_items) + * if l != len(rgt_items): + */ + __pyx_t_3 = ((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_other)->_impl->_items; + __Pyx_INCREF(__pyx_t_3); + __pyx_v_rgt_items = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":33 + * lft_items = (<_Base>self)._impl._items + * rgt_items = (<_Base>other)._impl._items + * l = len(lft_items) # <<<<<<<<<<<<<< + * if l != len(rgt_items): + * return False + */ + if (unlikely(__pyx_v_lft_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 33, __pyx_L1_error) + } + __pyx_t_4 = PyList_GET_SIZE(__pyx_v_lft_items); if (unlikely(__pyx_t_4 == -1)) __PYX_ERR(0, 33, __pyx_L1_error) + __pyx_v_l = __pyx_t_4; + + /* "multidict/_multidict.pyx":34 + * rgt_items = (<_Base>other)._impl._items + * l = len(lft_items) + * if l != len(rgt_items): # <<<<<<<<<<<<<< + * return False + * for i in range(l): + */ + if (unlikely(__pyx_v_rgt_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 34, __pyx_L1_error) + } + __pyx_t_4 = PyList_GET_SIZE(__pyx_v_rgt_items); if (unlikely(__pyx_t_4 == -1)) __PYX_ERR(0, 34, __pyx_L1_error) + __pyx_t_1 = ((__pyx_v_l != __pyx_t_4) != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":35 + * l = len(lft_items) + * if l != len(rgt_items): + * return False # <<<<<<<<<<<<<< + * for i in range(l): + * lft = <_Pair>(lft_items[i]) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":34 + * rgt_items = (<_Base>other)._impl._items + * l = len(lft_items) + * if l != len(rgt_items): # <<<<<<<<<<<<<< + * return False + * for i in range(l): + */ + } + + /* "multidict/_multidict.pyx":36 + * if l != len(rgt_items): + * return False + * for i in range(l): # <<<<<<<<<<<<<< + * lft = <_Pair>(lft_items[i]) + * rgt = <_Pair>(rgt_items[i]) + */ + __pyx_t_4 = __pyx_v_l; + for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { + __pyx_v_i = __pyx_t_5; + + /* "multidict/_multidict.pyx":37 + * return False + * for i in range(l): + * lft = <_Pair>(lft_items[i]) # <<<<<<<<<<<<<< + * rgt = <_Pair>(rgt_items[i]) + * if lft._hash != rgt._hash: + */ + if (unlikely(__pyx_v_lft_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 37, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_GetItemInt_List(__pyx_v_lft_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 37, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __pyx_t_3; + __Pyx_INCREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_v_lft, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_6)); + __pyx_t_6 = 0; + + /* "multidict/_multidict.pyx":38 + * for i in range(l): + * lft = <_Pair>(lft_items[i]) + * rgt = <_Pair>(rgt_items[i]) # <<<<<<<<<<<<<< + * if lft._hash != rgt._hash: + * return False + */ + if (unlikely(__pyx_v_rgt_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 38, __pyx_L1_error) + } + __pyx_t_6 = __Pyx_GetItemInt_List(__pyx_v_rgt_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 38, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __pyx_t_6; + __Pyx_INCREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF_SET(__pyx_v_rgt, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":39 + * lft = <_Pair>(lft_items[i]) + * rgt = <_Pair>(rgt_items[i]) + * if lft._hash != rgt._hash: # <<<<<<<<<<<<<< + * return False + * if lft._identity != rgt._identity: + */ + __pyx_t_1 = ((__pyx_v_lft->_hash != __pyx_v_rgt->_hash) != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":40 + * rgt = <_Pair>(rgt_items[i]) + * if lft._hash != rgt._hash: + * return False # <<<<<<<<<<<<<< + * if lft._identity != rgt._identity: + * return False + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":39 + * lft = <_Pair>(lft_items[i]) + * rgt = <_Pair>(rgt_items[i]) + * if lft._hash != rgt._hash: # <<<<<<<<<<<<<< + * return False + * if lft._identity != rgt._identity: + */ + } + + /* "multidict/_multidict.pyx":41 + * if lft._hash != rgt._hash: + * return False + * if lft._identity != rgt._identity: # <<<<<<<<<<<<<< + * return False + * if lft._value != rgt._value: + */ + __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_lft->_identity, __pyx_v_rgt->_identity, Py_NE)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 41, __pyx_L1_error) + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":42 + * return False + * if lft._identity != rgt._identity: + * return False # <<<<<<<<<<<<<< + * if lft._value != rgt._value: + * return False + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":41 + * if lft._hash != rgt._hash: + * return False + * if lft._identity != rgt._identity: # <<<<<<<<<<<<<< + * return False + * if lft._value != rgt._value: + */ + } + + /* "multidict/_multidict.pyx":43 + * if lft._identity != rgt._identity: + * return False + * if lft._value != rgt._value: # <<<<<<<<<<<<<< + * return False + * return True + */ + __pyx_t_3 = PyObject_RichCompare(__pyx_v_lft->_value, __pyx_v_rgt->_value, Py_NE); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 43, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 43, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":44 + * return False + * if lft._value != rgt._value: + * return False # <<<<<<<<<<<<<< + * return True + * elif is_left_base and isinstance(other, abc.Mapping): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":43 + * if lft._identity != rgt._identity: + * return False + * if lft._value != rgt._value: # <<<<<<<<<<<<<< + * return False + * return True + */ + } + } + + /* "multidict/_multidict.pyx":45 + * if lft._value != rgt._value: + * return False + * return True # <<<<<<<<<<<<<< + * elif is_left_base and isinstance(other, abc.Mapping): + * return (<_Base>self)._eq_to_mapping(other) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":30 + * is_right_base = isinstance(other, _Base) + * + * if is_left_base and is_right_base: # <<<<<<<<<<<<<< + * lft_items = (<_Base>self)._impl._items + * rgt_items = (<_Base>other)._impl._items + */ + } + + /* "multidict/_multidict.pyx":46 + * return False + * return True + * elif is_left_base and isinstance(other, abc.Mapping): # <<<<<<<<<<<<<< + * return (<_Base>self)._eq_to_mapping(other) + * elif is_right_base and isinstance(self, abc.Mapping): + */ + __pyx_t_1 = (__pyx_v_is_left_base != 0); + if (__pyx_t_1) { + } else { + __pyx_t_2 = __pyx_t_1; + goto __pyx_L12_bool_binop_done; + } + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_abc); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 46, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 46, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_1 = PyObject_IsInstance(__pyx_v_other, __pyx_t_6); if (unlikely(__pyx_t_1 == -1)) __PYX_ERR(0, 46, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_7 = (__pyx_t_1 != 0); + __pyx_t_2 = __pyx_t_7; + __pyx_L12_bool_binop_done:; + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":47 + * return True + * elif is_left_base and isinstance(other, abc.Mapping): + * return (<_Base>self)._eq_to_mapping(other) # <<<<<<<<<<<<<< + * elif is_right_base and isinstance(self, abc.Mapping): + * return (<_Base>other)._eq_to_mapping(self) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_6 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self)->__pyx_vtab)->_eq_to_mapping(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), __pyx_v_other); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 47, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":46 + * return False + * return True + * elif is_left_base and isinstance(other, abc.Mapping): # <<<<<<<<<<<<<< + * return (<_Base>self)._eq_to_mapping(other) + * elif is_right_base and isinstance(self, abc.Mapping): + */ + } + + /* "multidict/_multidict.pyx":48 + * elif is_left_base and isinstance(other, abc.Mapping): + * return (<_Base>self)._eq_to_mapping(other) + * elif is_right_base and isinstance(self, abc.Mapping): # <<<<<<<<<<<<<< + * return (<_Base>other)._eq_to_mapping(self) + * else: + */ + __pyx_t_7 = (__pyx_v_is_right_base != 0); + if (__pyx_t_7) { + } else { + __pyx_t_2 = __pyx_t_7; + goto __pyx_L14_bool_binop_done; + } + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_abc); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_7 = PyObject_IsInstance(__pyx_v_self, __pyx_t_3); if (unlikely(__pyx_t_7 == -1)) __PYX_ERR(0, 48, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_1 = (__pyx_t_7 != 0); + __pyx_t_2 = __pyx_t_1; + __pyx_L14_bool_binop_done:; + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":49 + * return (<_Base>self)._eq_to_mapping(other) + * elif is_right_base and isinstance(self, abc.Mapping): + * return (<_Base>other)._eq_to_mapping(self) # <<<<<<<<<<<<<< + * else: + * return NotImplemented + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_other)->__pyx_vtab)->_eq_to_mapping(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_other), __pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":48 + * elif is_left_base and isinstance(other, abc.Mapping): + * return (<_Base>self)._eq_to_mapping(other) + * elif is_right_base and isinstance(self, abc.Mapping): # <<<<<<<<<<<<<< + * return (<_Base>other)._eq_to_mapping(self) + * else: + */ + } + + /* "multidict/_multidict.pyx":51 + * return (<_Base>other)._eq_to_mapping(self) + * else: + * return NotImplemented # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + } + + /* "multidict/_multidict.pyx":21 + * + * + * cdef _eq(self, other): # <<<<<<<<<<<<<< + * cdef int is_left_base, is_right_base + * cdef Py_ssize_t i, l + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("multidict._multidict._eq", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_lft_items); + __Pyx_XDECREF(__pyx_v_rgt_items); + __Pyx_XDECREF((PyObject *)__pyx_v_lft); + __Pyx_XDECREF((PyObject *)__pyx_v_rgt); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":60 + * cdef object _value + * + * def __cinit__(self, identity, key, value): # <<<<<<<<<<<<<< + * self._hash = hash(identity) + * self._identity = identity + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_5_Pair_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_5_Pair_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_identity = 0; + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_value = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_identity,&__pyx_n_s_key,&__pyx_n_s_value,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_identity)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_key)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 3, 3, 1); __PYX_ERR(0, 60, __pyx_L3_error) + } + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_value)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 3, 3, 2); __PYX_ERR(0, 60, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__cinit__") < 0)) __PYX_ERR(0, 60, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_identity = values[0]; + __pyx_v_key = values[1]; + __pyx_v_value = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 60, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict._Pair.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Pair___cinit__(((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_v_self), __pyx_v_identity, __pyx_v_key, __pyx_v_value); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_5_Pair___cinit__(struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_self, PyObject *__pyx_v_identity, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + Py_hash_t __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("__cinit__", 0); + + /* "multidict/_multidict.pyx":61 + * + * def __cinit__(self, identity, key, value): + * self._hash = hash(identity) # <<<<<<<<<<<<<< + * self._identity = identity + * self._key = key + */ + __pyx_t_1 = PyObject_Hash(__pyx_v_identity); if (unlikely(__pyx_t_1 == -1)) __PYX_ERR(0, 61, __pyx_L1_error) + __pyx_v_self->_hash = __pyx_t_1; + + /* "multidict/_multidict.pyx":62 + * def __cinit__(self, identity, key, value): + * self._hash = hash(identity) + * self._identity = identity # <<<<<<<<<<<<<< + * self._key = key + * self._value = value + */ + __pyx_t_2 = __pyx_v_identity; + __Pyx_INCREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->_identity); + __Pyx_DECREF(__pyx_v_self->_identity); + __pyx_v_self->_identity = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":63 + * self._hash = hash(identity) + * self._identity = identity + * self._key = key # <<<<<<<<<<<<<< + * self._value = value + * + */ + __pyx_t_2 = __pyx_v_key; + __Pyx_INCREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_v_self->_key); + __Pyx_DECREF(__pyx_v_self->_key); + __pyx_v_self->_key = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":64 + * self._identity = identity + * self._key = key + * self._value = value # <<<<<<<<<<<<<< + * + * + */ + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->_value); + __Pyx_DECREF(__pyx_v_self->_value); + __pyx_v_self->_value = __pyx_v_value; + + /* "multidict/_multidict.pyx":60 + * cdef object _value + * + * def __cinit__(self, identity, key, value): # <<<<<<<<<<<<<< + * self._hash = hash(identity) + * self._identity = identity + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._Pair.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":74 + * cdef unsigned long long _version + * + * def __cinit__(self): # <<<<<<<<<<<<<< + * self._items = [] + * self.incr_version() + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_5_Impl_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_5_Impl_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} + if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Impl___cinit__(((struct __pyx_obj_9multidict_10_multidict__Impl *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_5_Impl___cinit__(struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__cinit__", 0); + + /* "multidict/_multidict.pyx":75 + * + * def __cinit__(self): + * self._items = [] # <<<<<<<<<<<<<< + * self.incr_version() + * + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 75, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_items); + __Pyx_DECREF(__pyx_v_self->_items); + __pyx_v_self->_items = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":76 + * def __cinit__(self): + * self._items = [] + * self.incr_version() # <<<<<<<<<<<<<< + * + * cdef void incr_version(self): + */ + ((struct __pyx_vtabstruct_9multidict_10_multidict__Impl *)__pyx_v_self->__pyx_vtab)->incr_version(__pyx_v_self); + + /* "multidict/_multidict.pyx":74 + * cdef unsigned long long _version + * + * def __cinit__(self): # <<<<<<<<<<<<<< + * self._items = [] + * self.incr_version() + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict._Impl.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":78 + * self.incr_version() + * + * cdef void incr_version(self): # <<<<<<<<<<<<<< + * global _version + * _version += 1 + */ + +static void __pyx_f_9multidict_10_multidict_5_Impl_incr_version(struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_self) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("incr_version", 0); + + /* "multidict/_multidict.pyx":80 + * cdef void incr_version(self): + * global _version + * _version += 1 # <<<<<<<<<<<<<< + * self._version = _version + * + */ + __pyx_v_9multidict_10_multidict__version = (__pyx_v_9multidict_10_multidict__version + 1); + + /* "multidict/_multidict.pyx":81 + * global _version + * _version += 1 + * self._version = _version # <<<<<<<<<<<<<< + * + * + */ + __pyx_v_self->_version = __pyx_v_9multidict_10_multidict__version; + + /* "multidict/_multidict.pyx":78 + * self.incr_version() + * + * cdef void incr_version(self): # <<<<<<<<<<<<<< + * global _version + * _version += 1 + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "multidict/_multidict.pyx":88 + * cdef _Impl _impl + * + * cdef str _title(self, s): # <<<<<<<<<<<<<< + * typ = type(s) + * if typ is str: + */ + +static PyObject *__pyx_f_9multidict_10_multidict_5_Base__title(CYTHON_UNUSED struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_s) { + PyTypeObject *__pyx_v_typ = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("_title", 0); + + /* "multidict/_multidict.pyx":89 + * + * cdef str _title(self, s): + * typ = type(s) # <<<<<<<<<<<<<< + * if typ is str: + * return s + */ + __Pyx_INCREF(((PyObject *)Py_TYPE(__pyx_v_s))); + __pyx_v_typ = ((PyTypeObject*)((PyObject *)Py_TYPE(__pyx_v_s))); + + /* "multidict/_multidict.pyx":90 + * cdef str _title(self, s): + * typ = type(s) + * if typ is str: # <<<<<<<<<<<<<< + * return s + * elif typ is _istr: + */ + __pyx_t_1 = (__pyx_v_typ == (&PyString_Type)); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":91 + * typ = type(s) + * if typ is str: + * return s # <<<<<<<<<<<<<< + * elif typ is _istr: + * return PyObject_Str(s) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject*)__pyx_v_s)); + __pyx_r = ((PyObject*)__pyx_v_s); + goto __pyx_L0; + + /* "multidict/_multidict.pyx":90 + * cdef str _title(self, s): + * typ = type(s) + * if typ is str: # <<<<<<<<<<<<<< + * return s + * elif typ is _istr: + */ + } + + /* "multidict/_multidict.pyx":92 + * if typ is str: + * return s + * elif typ is _istr: # <<<<<<<<<<<<<< + * return PyObject_Str(s) + * else: + */ + __pyx_t_2 = (__pyx_v_typ == ((PyTypeObject*)__pyx_v_9multidict_10_multidict__istr)); + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":93 + * return s + * elif typ is _istr: + * return PyObject_Str(s) # <<<<<<<<<<<<<< + * else: + * return str(s) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyObject_Str(__pyx_v_s); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 93, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyString_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 93, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":92 + * if typ is str: + * return s + * elif typ is _istr: # <<<<<<<<<<<<<< + * return PyObject_Str(s) + * else: + */ + } + + /* "multidict/_multidict.pyx":95 + * return PyObject_Str(s) + * else: + * return str(s) # <<<<<<<<<<<<<< + * + * def getall(self, key, default=_marker): + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_s); + __Pyx_GIVEREF(__pyx_v_s); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_s); + __pyx_t_4 = __Pyx_PyObject_Call(((PyObject *)(&PyString_Type)), __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 95, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(0, 95, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "multidict/_multidict.pyx":88 + * cdef _Impl _impl + * + * cdef str _title(self, s): # <<<<<<<<<<<<<< + * typ = type(s) + * if typ is str: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict._Base._title", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_typ); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":97 + * return str(s) + * + * def getall(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Return a list of all values matching the key.""" + * return self._getall(self._title(key), key, default) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_1getall(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_5_Base_getall[] = "Return a list of all values matching the key."; +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_1getall(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_default = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("getall (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; + PyObject* values[2] = {0,0}; + values[1] = __pyx_k_; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_key)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (kw_args > 0) { + PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_default); + if (value) { values[1] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "getall") < 0)) __PYX_ERR(0, 97, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_key = values[0]; + __pyx_v_default = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("getall", 0, 1, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 97, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict._Base.getall", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_getall(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), __pyx_v_key, __pyx_v_default); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_getall(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("getall", 0); + + /* "multidict/_multidict.pyx":99 + * def getall(self, key, default=_marker): + * """Return a list of all values matching the key.""" + * return self._getall(self._title(key), key, default) # <<<<<<<<<<<<<< + * + * cdef _getall(self, str identity, key, default): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_title(__pyx_v_self, __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_getall(__pyx_v_self, ((PyObject*)__pyx_t_1), __pyx_v_key, __pyx_v_default); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":97 + * return str(s) + * + * def getall(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Return a list of all values matching the key.""" + * return self._getall(self._title(key), key, default) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._Base.getall", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":101 + * return self._getall(self._title(key), key, default) + * + * cdef _getall(self, str identity, key, default): # <<<<<<<<<<<<<< + * cdef list res + * cdef _Pair item + */ + +static PyObject *__pyx_f_9multidict_10_multidict_5_Base__getall(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_identity, PyObject *__pyx_v_key, PyObject *__pyx_v_default) { + PyObject *__pyx_v_res = 0; + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + Py_hash_t __pyx_v_h; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + Py_hash_t __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + Py_ssize_t __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + __Pyx_RefNannySetupContext("_getall", 0); + + /* "multidict/_multidict.pyx":104 + * cdef list res + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) # <<<<<<<<<<<<<< + * res = [] + * for i in self._impl._items: + */ + __pyx_t_1 = PyObject_Hash(__pyx_v_identity); if (unlikely(__pyx_t_1 == -1)) __PYX_ERR(0, 104, __pyx_L1_error) + __pyx_v_h = __pyx_t_1; + + /* "multidict/_multidict.pyx":105 + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) + * res = [] # <<<<<<<<<<<<<< + * for i in self._impl._items: + * item = <_Pair>i + */ + __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 105, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v_res = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":106 + * cdef Py_hash_t h = hash(identity) + * res = [] + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._hash != h: + */ + if (unlikely(__pyx_v_self->_impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 106, __pyx_L1_error) + } + __pyx_t_2 = __pyx_v_self->_impl->_items; __Pyx_INCREF(__pyx_t_2); __pyx_t_3 = 0; + for (;;) { + if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_2)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_4); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 106, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 106, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":107 + * res = [] + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * if item._hash != h: + * continue + */ + __pyx_t_4 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":108 + * for i in self._impl._items: + * item = <_Pair>i + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + __pyx_t_5 = ((__pyx_v_item->_hash != __pyx_v_h) != 0); + if (__pyx_t_5) { + + /* "multidict/_multidict.pyx":109 + * item = <_Pair>i + * if item._hash != h: + * continue # <<<<<<<<<<<<<< + * if item._identity == identity: + * res.append(item._value) + */ + goto __pyx_L3_continue; + + /* "multidict/_multidict.pyx":108 + * for i in self._impl._items: + * item = <_Pair>i + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + } + + /* "multidict/_multidict.pyx":110 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * res.append(item._value) + * if res: + */ + __pyx_t_5 = (__Pyx_PyString_Equals(__pyx_v_item->_identity, __pyx_v_identity, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 110, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":111 + * continue + * if item._identity == identity: + * res.append(item._value) # <<<<<<<<<<<<<< + * if res: + * return res + */ + __pyx_t_4 = __pyx_v_item->_value; + __Pyx_INCREF(__pyx_t_4); + __pyx_t_7 = __Pyx_PyList_Append(__pyx_v_res, __pyx_t_4); if (unlikely(__pyx_t_7 == -1)) __PYX_ERR(0, 111, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":110 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * res.append(item._value) + * if res: + */ + } + + /* "multidict/_multidict.pyx":106 + * cdef Py_hash_t h = hash(identity) + * res = [] + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._hash != h: + */ + __pyx_L3_continue:; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":112 + * if item._identity == identity: + * res.append(item._value) + * if res: # <<<<<<<<<<<<<< + * return res + * elif default is not _marker: + */ + __pyx_t_6 = (__pyx_v_res != Py_None) && (PyList_GET_SIZE(__pyx_v_res) != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":113 + * res.append(item._value) + * if res: + * return res # <<<<<<<<<<<<<< + * elif default is not _marker: + * return default + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_res); + __pyx_r = __pyx_v_res; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":112 + * if item._identity == identity: + * res.append(item._value) + * if res: # <<<<<<<<<<<<<< + * return res + * elif default is not _marker: + */ + } + + /* "multidict/_multidict.pyx":114 + * if res: + * return res + * elif default is not _marker: # <<<<<<<<<<<<<< + * return default + * else: + */ + __pyx_t_6 = (__pyx_v_default != __pyx_v_9multidict_10_multidict__marker); + __pyx_t_5 = (__pyx_t_6 != 0); + if (__pyx_t_5) { + + /* "multidict/_multidict.pyx":115 + * return res + * elif default is not _marker: + * return default # <<<<<<<<<<<<<< + * else: + * raise KeyError('Key not found: %r' % key) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_default); + __pyx_r = __pyx_v_default; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":114 + * if res: + * return res + * elif default is not _marker: # <<<<<<<<<<<<<< + * return default + * else: + */ + } + + /* "multidict/_multidict.pyx":117 + * return default + * else: + * raise KeyError('Key not found: %r' % key) # <<<<<<<<<<<<<< + * + * def getone(self, key, default=_marker): + */ + /*else*/ { + __pyx_t_2 = __Pyx_PyString_Format(__pyx_kp_s_Key_not_found_r, __pyx_v_key); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 117, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 117, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2); + __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_KeyError, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 117, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_2, 0, 0, 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __PYX_ERR(0, 117, __pyx_L1_error) + } + + /* "multidict/_multidict.pyx":101 + * return self._getall(self._title(key), key, default) + * + * cdef _getall(self, str identity, key, default): # <<<<<<<<<<<<<< + * cdef list res + * cdef _Pair item + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict._Base._getall", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_res); + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":119 + * raise KeyError('Key not found: %r' % key) + * + * def getone(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Get first value matching the key.""" + * return self._getone(self._title(key), key, default) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_3getone(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_5_Base_2getone[] = "Get first value matching the key."; +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_3getone(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_default = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("getone (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; + PyObject* values[2] = {0,0}; + values[1] = __pyx_k__2; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_key)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (kw_args > 0) { + PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_default); + if (value) { values[1] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "getone") < 0)) __PYX_ERR(0, 119, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_key = values[0]; + __pyx_v_default = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("getone", 0, 1, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 119, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict._Base.getone", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_2getone(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), __pyx_v_key, __pyx_v_default); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_2getone(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("getone", 0); + + /* "multidict/_multidict.pyx":121 + * def getone(self, key, default=_marker): + * """Get first value matching the key.""" + * return self._getone(self._title(key), key, default) # <<<<<<<<<<<<<< + * + * cdef _getone(self, str identity, key, default): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_title(__pyx_v_self, __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 121, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_getone(__pyx_v_self, ((PyObject*)__pyx_t_1), __pyx_v_key, __pyx_v_default); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 121, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":119 + * raise KeyError('Key not found: %r' % key) + * + * def getone(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Get first value matching the key.""" + * return self._getone(self._title(key), key, default) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._Base.getone", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":123 + * return self._getone(self._title(key), key, default) + * + * cdef _getone(self, str identity, key, default): # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) + */ + +static PyObject *__pyx_f_9multidict_10_multidict_5_Base__getone(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_identity, PyObject *__pyx_v_key, PyObject *__pyx_v_default) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + Py_hash_t __pyx_v_h; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + Py_hash_t __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + Py_ssize_t __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + __Pyx_RefNannySetupContext("_getone", 0); + + /* "multidict/_multidict.pyx":125 + * cdef _getone(self, str identity, key, default): + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) # <<<<<<<<<<<<<< + * for i in self._impl._items: + * item = <_Pair>i + */ + __pyx_t_1 = PyObject_Hash(__pyx_v_identity); if (unlikely(__pyx_t_1 == -1)) __PYX_ERR(0, 125, __pyx_L1_error) + __pyx_v_h = __pyx_t_1; + + /* "multidict/_multidict.pyx":126 + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._hash != h: + */ + if (unlikely(__pyx_v_self->_impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 126, __pyx_L1_error) + } + __pyx_t_2 = __pyx_v_self->_impl->_items; __Pyx_INCREF(__pyx_t_2); __pyx_t_3 = 0; + for (;;) { + if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_2)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_4); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 126, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 126, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":127 + * cdef Py_hash_t h = hash(identity) + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * if item._hash != h: + * continue + */ + __pyx_t_4 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":128 + * for i in self._impl._items: + * item = <_Pair>i + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + __pyx_t_5 = ((__pyx_v_item->_hash != __pyx_v_h) != 0); + if (__pyx_t_5) { + + /* "multidict/_multidict.pyx":129 + * item = <_Pair>i + * if item._hash != h: + * continue # <<<<<<<<<<<<<< + * if item._identity == identity: + * return item._value + */ + goto __pyx_L3_continue; + + /* "multidict/_multidict.pyx":128 + * for i in self._impl._items: + * item = <_Pair>i + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + } + + /* "multidict/_multidict.pyx":130 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * return item._value + * if default is not _marker: + */ + __pyx_t_5 = (__Pyx_PyString_Equals(__pyx_v_item->_identity, __pyx_v_identity, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 130, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":131 + * continue + * if item._identity == identity: + * return item._value # <<<<<<<<<<<<<< + * if default is not _marker: + * return default + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_item->_value); + __pyx_r = __pyx_v_item->_value; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":130 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * return item._value + * if default is not _marker: + */ + } + + /* "multidict/_multidict.pyx":126 + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._hash != h: + */ + __pyx_L3_continue:; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":132 + * if item._identity == identity: + * return item._value + * if default is not _marker: # <<<<<<<<<<<<<< + * return default + * raise KeyError('Key not found: %r' % key) + */ + __pyx_t_6 = (__pyx_v_default != __pyx_v_9multidict_10_multidict__marker); + __pyx_t_5 = (__pyx_t_6 != 0); + if (__pyx_t_5) { + + /* "multidict/_multidict.pyx":133 + * return item._value + * if default is not _marker: + * return default # <<<<<<<<<<<<<< + * raise KeyError('Key not found: %r' % key) + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_default); + __pyx_r = __pyx_v_default; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":132 + * if item._identity == identity: + * return item._value + * if default is not _marker: # <<<<<<<<<<<<<< + * return default + * raise KeyError('Key not found: %r' % key) + */ + } + + /* "multidict/_multidict.pyx":134 + * if default is not _marker: + * return default + * raise KeyError('Key not found: %r' % key) # <<<<<<<<<<<<<< + * + * # Mapping interface # + */ + __pyx_t_2 = __Pyx_PyString_Format(__pyx_kp_s_Key_not_found_r, __pyx_v_key); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2); + __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_KeyError, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_2, 0, 0, 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __PYX_ERR(0, 134, __pyx_L1_error) + + /* "multidict/_multidict.pyx":123 + * return self._getone(self._title(key), key, default) + * + * cdef _getone(self, str identity, key, default): # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict._Base._getone", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":138 + * # Mapping interface # + * + * def __getitem__(self, key): # <<<<<<<<<<<<<< + * return self._getone(self._title(key), key, _marker) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_4__getitem__(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), ((PyObject *)__pyx_v_key)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_4__getitem__(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_key) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__getitem__", 0); + + /* "multidict/_multidict.pyx":139 + * + * def __getitem__(self, key): + * return self._getone(self._title(key), key, _marker) # <<<<<<<<<<<<<< + * + * def get(self, key, default=None): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_title(__pyx_v_self, __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __pyx_v_9multidict_10_multidict__marker; + __Pyx_INCREF(__pyx_t_2); + __pyx_t_3 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_getone(__pyx_v_self, ((PyObject*)__pyx_t_1), __pyx_v_key, __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":138 + * # Mapping interface # + * + * def __getitem__(self, key): # <<<<<<<<<<<<<< + * return self._getone(self._title(key), key, _marker) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict._Base.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":141 + * return self._getone(self._title(key), key, _marker) + * + * def get(self, key, default=None): # <<<<<<<<<<<<<< + * """Get first value matching the key. + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_7get(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_5_Base_6get[] = "Get first value matching the key.\n\n The method is alias for .getone().\n "; +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_7get(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_default = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; + PyObject* values[2] = {0,0}; + values[1] = ((PyObject *)Py_None); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_key)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (kw_args > 0) { + PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_default); + if (value) { values[1] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "get") < 0)) __PYX_ERR(0, 141, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_key = values[0]; + __pyx_v_default = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("get", 0, 1, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 141, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict._Base.get", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_6get(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), __pyx_v_key, __pyx_v_default); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_6get(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("get", 0); + + /* "multidict/_multidict.pyx":146 + * The method is alias for .getone(). + * """ + * return self._getone(self._title(key), key, default) # <<<<<<<<<<<<<< + * + * def __contains__(self, key): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_title(__pyx_v_self, __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_getone(__pyx_v_self, ((PyObject*)__pyx_t_1), __pyx_v_key, __pyx_v_default); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":141 + * return self._getone(self._title(key), key, _marker) + * + * def get(self, key, default=None): # <<<<<<<<<<<<<< + * """Get first value matching the key. + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._Base.get", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":148 + * return self._getone(self._title(key), key, default) + * + * def __contains__(self, key): # <<<<<<<<<<<<<< + * return self._contains(self._title(key)) + * + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_5_Base_9__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_5_Base_9__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__contains__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_8__contains__(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), ((PyObject *)__pyx_v_key)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_5_Base_8__contains__(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_key) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + __Pyx_RefNannySetupContext("__contains__", 0); + + /* "multidict/_multidict.pyx":149 + * + * def __contains__(self, key): + * return self._contains(self._title(key)) # <<<<<<<<<<<<<< + * + * cdef _contains(self, str identity): + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_title(__pyx_v_self, __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_contains(__pyx_v_self, ((PyObject*)__pyx_t_1)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_3; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":148 + * return self._getone(self._title(key), key, default) + * + * def __contains__(self, key): # <<<<<<<<<<<<<< + * return self._contains(self._title(key)) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._Base.__contains__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":151 + * return self._contains(self._title(key)) + * + * cdef _contains(self, str identity): # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) + */ + +static PyObject *__pyx_f_9multidict_10_multidict_5_Base__contains(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_identity) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + Py_hash_t __pyx_v_h; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + Py_hash_t __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + Py_ssize_t __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + __Pyx_RefNannySetupContext("_contains", 0); + + /* "multidict/_multidict.pyx":153 + * cdef _contains(self, str identity): + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) # <<<<<<<<<<<<<< + * for i in self._impl._items: + * item = <_Pair>i + */ + __pyx_t_1 = PyObject_Hash(__pyx_v_identity); if (unlikely(__pyx_t_1 == -1)) __PYX_ERR(0, 153, __pyx_L1_error) + __pyx_v_h = __pyx_t_1; + + /* "multidict/_multidict.pyx":154 + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._hash != h: + */ + if (unlikely(__pyx_v_self->_impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 154, __pyx_L1_error) + } + __pyx_t_2 = __pyx_v_self->_impl->_items; __Pyx_INCREF(__pyx_t_2); __pyx_t_3 = 0; + for (;;) { + if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_2)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_4); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 154, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_2, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 154, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":155 + * cdef Py_hash_t h = hash(identity) + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * if item._hash != h: + * continue + */ + __pyx_t_4 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":156 + * for i in self._impl._items: + * item = <_Pair>i + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + __pyx_t_5 = ((__pyx_v_item->_hash != __pyx_v_h) != 0); + if (__pyx_t_5) { + + /* "multidict/_multidict.pyx":157 + * item = <_Pair>i + * if item._hash != h: + * continue # <<<<<<<<<<<<<< + * if item._identity == identity: + * return True + */ + goto __pyx_L3_continue; + + /* "multidict/_multidict.pyx":156 + * for i in self._impl._items: + * item = <_Pair>i + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + } + + /* "multidict/_multidict.pyx":158 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * return True + * return False + */ + __pyx_t_5 = (__Pyx_PyString_Equals(__pyx_v_item->_identity, __pyx_v_identity, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 158, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":159 + * continue + * if item._identity == identity: + * return True # <<<<<<<<<<<<<< + * return False + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":158 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * return True + * return False + */ + } + + /* "multidict/_multidict.pyx":154 + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._hash != h: + */ + __pyx_L3_continue:; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":160 + * if item._identity == identity: + * return True + * return False # <<<<<<<<<<<<<< + * + * def __iter__(self): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":151 + * return self._contains(self._title(key)) + * + * cdef _contains(self, str identity): # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef Py_hash_t h = hash(identity) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict._Base._contains", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":162 + * return False + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return iter(self.keys()) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_11__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_11__iter__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_10__iter__(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_10__iter__(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("__iter__", 0); + + /* "multidict/_multidict.pyx":163 + * + * def __iter__(self): + * return iter(self.keys()) # <<<<<<<<<<<<<< + * + * def __len__(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->keys(__pyx_v_self, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 163, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 163, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":162 + * return False + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return iter(self.keys()) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._Base.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":165 + * return iter(self.keys()) + * + * def __len__(self): # <<<<<<<<<<<<<< + * return len(self._impl._items) + * + */ + +/* Python wrapper */ +static Py_ssize_t __pyx_pw_9multidict_10_multidict_5_Base_13__len__(PyObject *__pyx_v_self); /*proto*/ +static Py_ssize_t __pyx_pw_9multidict_10_multidict_5_Base_13__len__(PyObject *__pyx_v_self) { + Py_ssize_t __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_12__len__(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static Py_ssize_t __pyx_pf_9multidict_10_multidict_5_Base_12__len__(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self) { + Py_ssize_t __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + __Pyx_RefNannySetupContext("__len__", 0); + + /* "multidict/_multidict.pyx":166 + * + * def __len__(self): + * return len(self._impl._items) # <<<<<<<<<<<<<< + * + * cpdef keys(self): + */ + __pyx_t_1 = __pyx_v_self->_impl->_items; + __Pyx_INCREF(__pyx_t_1); + if (unlikely(__pyx_t_1 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 166, __pyx_L1_error) + } + __pyx_t_2 = PyList_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 166, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":165 + * return iter(self.keys()) + * + * def __len__(self): # <<<<<<<<<<<<<< + * return len(self._impl._items) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict._Base.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":168 + * return len(self._impl._items) + * + * cpdef keys(self): # <<<<<<<<<<<<<< + * """Return a new view of the dictionary's keys.""" + * return _KeysView.__new__(_KeysView, self._impl) + */ + +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_15keys(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_f_9multidict_10_multidict_5_Base_keys(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, int __pyx_skip_dispatch) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("keys", 0); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely(Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0)) { + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_keys); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)__pyx_pw_9multidict_10_multidict_5_Base_15keys)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (__pyx_t_4) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else { + __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 168, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + + /* "multidict/_multidict.pyx":170 + * cpdef keys(self): + * """Return a new view of the dictionary's keys.""" + * return _KeysView.__new__(_KeysView, self._impl) # <<<<<<<<<<<<<< + * + * def items(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 170, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)__pyx_v_self->_impl)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self->_impl)); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_self->_impl)); + __pyx_t_2 = __pyx_tp_new_9multidict_10_multidict__KeysView(((PyTypeObject *)__pyx_ptype_9multidict_10_multidict__KeysView), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 170, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":168 + * return len(self._impl._items) + * + * cpdef keys(self): # <<<<<<<<<<<<<< + * """Return a new view of the dictionary's keys.""" + * return _KeysView.__new__(_KeysView, self._impl) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict._Base.keys", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_15keys(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_5_Base_14keys[] = "Return a new view of the dictionary's keys."; +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_15keys(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("keys (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_14keys(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_14keys(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("keys", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_9multidict_10_multidict_5_Base_keys(__pyx_v_self, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict._Base.keys", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":172 + * return _KeysView.__new__(_KeysView, self._impl) + * + * def items(self): # <<<<<<<<<<<<<< + * """Return a new view of the dictionary's items *(key, value) pairs).""" + * return _ItemsView.__new__(_ItemsView, self._impl) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_17items(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_5_Base_16items[] = "Return a new view of the dictionary's items *(key, value) pairs)."; +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_17items(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("items (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_16items(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_16items(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("items", 0); + + /* "multidict/_multidict.pyx":174 + * def items(self): + * """Return a new view of the dictionary's items *(key, value) pairs).""" + * return _ItemsView.__new__(_ItemsView, self._impl) # <<<<<<<<<<<<<< + * + * def values(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 174, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)__pyx_v_self->_impl)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self->_impl)); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_self->_impl)); + __pyx_t_2 = __pyx_tp_new_9multidict_10_multidict__ItemsView(((PyTypeObject *)__pyx_ptype_9multidict_10_multidict__ItemsView), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 174, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":172 + * return _KeysView.__new__(_KeysView, self._impl) + * + * def items(self): # <<<<<<<<<<<<<< + * """Return a new view of the dictionary's items *(key, value) pairs).""" + * return _ItemsView.__new__(_ItemsView, self._impl) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._Base.items", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":176 + * return _ItemsView.__new__(_ItemsView, self._impl) + * + * def values(self): # <<<<<<<<<<<<<< + * """Return a new view of the dictionary's values.""" + * return _ValuesView.__new__(_ValuesView, self._impl) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_19values(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_5_Base_18values[] = "Return a new view of the dictionary's values."; +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_19values(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("values (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_18values(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_18values(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("values", 0); + + /* "multidict/_multidict.pyx":178 + * def values(self): + * """Return a new view of the dictionary's values.""" + * return _ValuesView.__new__(_ValuesView, self._impl) # <<<<<<<<<<<<<< + * + * def __repr__(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 178, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)__pyx_v_self->_impl)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self->_impl)); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_self->_impl)); + __pyx_t_2 = __pyx_tp_new_9multidict_10_multidict__ValuesView(((PyTypeObject *)__pyx_ptype_9multidict_10_multidict__ValuesView), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 178, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":176 + * return _ItemsView.__new__(_ItemsView, self._impl) + * + * def values(self): # <<<<<<<<<<<<<< + * """Return a new view of the dictionary's values.""" + * return _ValuesView.__new__(_ValuesView, self._impl) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._Base.values", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":180 + * return _ValuesView.__new__(_ValuesView, self._impl) + * + * def __repr__(self): # <<<<<<<<<<<<<< + * cdef _Pair item + * lst = [] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_21__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_21__repr__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_20__repr__(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_20__repr__(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_lst = NULL; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_v_body = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + __Pyx_RefNannySetupContext("__repr__", 0); + + /* "multidict/_multidict.pyx":182 + * def __repr__(self): + * cdef _Pair item + * lst = [] # <<<<<<<<<<<<<< + * for i in self._impl._items: + * item = <_Pair>i + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 182, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_lst = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":183 + * cdef _Pair item + * lst = [] + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * lst.append("'{}': {!r}".format(item._key, item._value)) + */ + if (unlikely(__pyx_v_self->_impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 183, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_self->_impl->_items; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 183, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 183, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":184 + * lst = [] + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * lst.append("'{}': {!r}".format(item._key, item._value)) + * body = ', '.join(lst) + */ + __pyx_t_3 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":185 + * for i in self._impl._items: + * item = <_Pair>i + * lst.append("'{}': {!r}".format(item._key, item._value)) # <<<<<<<<<<<<<< + * body = ', '.join(lst) + * return '<{}({})>'.format(self.__class__.__name__, body) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_r, __pyx_n_s_format); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 185, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + __pyx_t_6 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_6 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_item->_key, __pyx_v_item->_value}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 185, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_item->_key, __pyx_v_item->_value}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 185, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_7 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 185, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_item->_key); + __Pyx_GIVEREF(__pyx_v_item->_key); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_6, __pyx_v_item->_key); + __Pyx_INCREF(__pyx_v_item->_value); + __Pyx_GIVEREF(__pyx_v_item->_value); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_6, __pyx_v_item->_value); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 185, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_lst, __pyx_t_3); if (unlikely(__pyx_t_8 == -1)) __PYX_ERR(0, 185, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":183 + * cdef _Pair item + * lst = [] + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * lst.append("'{}': {!r}".format(item._key, item._value)) + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":186 + * item = <_Pair>i + * lst.append("'{}': {!r}".format(item._key, item._value)) + * body = ', '.join(lst) # <<<<<<<<<<<<<< + * return '<{}({})>'.format(self.__class__.__name__, body) + * + */ + __pyx_t_1 = __Pyx_PyString_Join(__pyx_kp_s__3, __pyx_v_lst); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 186, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_body = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":187 + * lst.append("'{}': {!r}".format(item._key, item._value)) + * body = ', '.join(lst) + * return '<{}({})>'.format(self.__class__.__name__, body) # <<<<<<<<<<<<<< + * + * cdef _eq_to_mapping(self, other): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s__4, __pyx_n_s_format); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_name); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + __pyx_t_6 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_6 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_7, __pyx_v_body}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_7, __pyx_v_body}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_5 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_6, __pyx_t_7); + __Pyx_INCREF(__pyx_v_body); + __Pyx_GIVEREF(__pyx_v_body); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_6, __pyx_v_body); + __pyx_t_7 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 187, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":180 + * return _ValuesView.__new__(_ValuesView, self._impl) + * + * def __repr__(self): # <<<<<<<<<<<<<< + * cdef _Pair item + * lst = [] + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("multidict._multidict._Base.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_lst); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XDECREF(__pyx_v_body); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":189 + * return '<{}({})>'.format(self.__class__.__name__, body) + * + * cdef _eq_to_mapping(self, other): # <<<<<<<<<<<<<< + * cdef _Pair item + * if len(self._impl._items) != len(other): + */ + +static PyObject *__pyx_f_9multidict_10_multidict_5_Base__eq_to_mapping(struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_self, PyObject *__pyx_v_other) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_v_k = NULL; + PyObject *__pyx_v_v = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + Py_ssize_t __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *(*__pyx_t_8)(PyObject *); + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *(*__pyx_t_11)(PyObject *); + int __pyx_t_12; + __Pyx_RefNannySetupContext("_eq_to_mapping", 0); + + /* "multidict/_multidict.pyx":191 + * cdef _eq_to_mapping(self, other): + * cdef _Pair item + * if len(self._impl._items) != len(other): # <<<<<<<<<<<<<< + * return False + * for i in self._impl._items: + */ + __pyx_t_1 = __pyx_v_self->_impl->_items; + __Pyx_INCREF(__pyx_t_1); + if (unlikely(__pyx_t_1 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 191, __pyx_L1_error) + } + __pyx_t_2 = PyList_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 191, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = PyObject_Length(__pyx_v_other); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 191, __pyx_L1_error) + __pyx_t_4 = ((__pyx_t_2 != __pyx_t_3) != 0); + if (__pyx_t_4) { + + /* "multidict/_multidict.pyx":192 + * cdef _Pair item + * if len(self._impl._items) != len(other): + * return False # <<<<<<<<<<<<<< + * for i in self._impl._items: + * item = <_Pair>i + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":191 + * cdef _eq_to_mapping(self, other): + * cdef _Pair item + * if len(self._impl._items) != len(other): # <<<<<<<<<<<<<< + * return False + * for i in self._impl._items: + */ + } + + /* "multidict/_multidict.pyx":193 + * if len(self._impl._items) != len(other): + * return False + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * for k, v in other.items(): + */ + if (unlikely(__pyx_v_self->_impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 193, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_self->_impl->_items; __Pyx_INCREF(__pyx_t_1); __pyx_t_3 = 0; + for (;;) { + if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_5 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_3); __Pyx_INCREF(__pyx_t_5); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 193, __pyx_L1_error) + #else + __pyx_t_5 = PySequence_ITEM(__pyx_t_1, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 193, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_5); + __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":194 + * return False + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * for k, v in other.items(): + * if self._title(k) != item._identity: + */ + __pyx_t_5 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_5); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_5)); + __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":195 + * for i in self._impl._items: + * item = <_Pair>i + * for k, v in other.items(): # <<<<<<<<<<<<<< + * if self._title(k) != item._identity: + * continue + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_other, __pyx_n_s_items); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 195, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + if (__pyx_t_7) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_7); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 195, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else { + __pyx_t_5 = __Pyx_PyObject_CallNoArg(__pyx_t_6); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 195, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (likely(PyList_CheckExact(__pyx_t_5)) || PyTuple_CheckExact(__pyx_t_5)) { + __pyx_t_6 = __pyx_t_5; __Pyx_INCREF(__pyx_t_6); __pyx_t_2 = 0; + __pyx_t_8 = NULL; + } else { + __pyx_t_2 = -1; __pyx_t_6 = PyObject_GetIter(__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 195, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = Py_TYPE(__pyx_t_6)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 195, __pyx_L1_error) + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + for (;;) { + if (likely(!__pyx_t_8)) { + if (likely(PyList_CheckExact(__pyx_t_6))) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_6)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_5 = PyList_GET_ITEM(__pyx_t_6, __pyx_t_2); __Pyx_INCREF(__pyx_t_5); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 195, __pyx_L1_error) + #else + __pyx_t_5 = PySequence_ITEM(__pyx_t_6, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 195, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + } else { + if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_6)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_2); __Pyx_INCREF(__pyx_t_5); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 195, __pyx_L1_error) + #else + __pyx_t_5 = PySequence_ITEM(__pyx_t_6, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 195, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + } + } else { + __pyx_t_5 = __pyx_t_8(__pyx_t_6); + if (unlikely(!__pyx_t_5)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 195, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_5); + } + if ((likely(PyTuple_CheckExact(__pyx_t_5))) || (PyList_CheckExact(__pyx_t_5))) { + PyObject* sequence = __pyx_t_5; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 195, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_9 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_7 = PyList_GET_ITEM(sequence, 0); + __pyx_t_9 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_9); + #else + __pyx_t_7 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 195, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 195, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + #endif + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_10 = PyObject_GetIter(__pyx_t_5); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 195, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_11 = Py_TYPE(__pyx_t_10)->tp_iternext; + index = 0; __pyx_t_7 = __pyx_t_11(__pyx_t_10); if (unlikely(!__pyx_t_7)) goto __pyx_L8_unpacking_failed; + __Pyx_GOTREF(__pyx_t_7); + index = 1; __pyx_t_9 = __pyx_t_11(__pyx_t_10); if (unlikely(!__pyx_t_9)) goto __pyx_L8_unpacking_failed; + __Pyx_GOTREF(__pyx_t_9); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_11(__pyx_t_10), 2) < 0) __PYX_ERR(0, 195, __pyx_L1_error) + __pyx_t_11 = NULL; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + goto __pyx_L9_unpacking_done; + __pyx_L8_unpacking_failed:; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_11 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 195, __pyx_L1_error) + __pyx_L9_unpacking_done:; + } + __Pyx_XDECREF_SET(__pyx_v_k, __pyx_t_7); + __pyx_t_7 = 0; + __Pyx_XDECREF_SET(__pyx_v_v, __pyx_t_9); + __pyx_t_9 = 0; + + /* "multidict/_multidict.pyx":196 + * item = <_Pair>i + * for k, v in other.items(): + * if self._title(k) != item._identity: # <<<<<<<<<<<<<< + * continue + * if v == item._value: + */ + __pyx_t_5 = ((struct __pyx_vtabstruct_9multidict_10_multidict__Base *)__pyx_v_self->__pyx_vtab)->_title(__pyx_v_self, __pyx_v_k); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 196, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = (__Pyx_PyString_Equals(__pyx_t_5, __pyx_v_item->_identity, Py_NE)); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 196, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_12 = (__pyx_t_4 != 0); + if (__pyx_t_12) { + + /* "multidict/_multidict.pyx":197 + * for k, v in other.items(): + * if self._title(k) != item._identity: + * continue # <<<<<<<<<<<<<< + * if v == item._value: + * break + */ + goto __pyx_L6_continue; + + /* "multidict/_multidict.pyx":196 + * item = <_Pair>i + * for k, v in other.items(): + * if self._title(k) != item._identity: # <<<<<<<<<<<<<< + * continue + * if v == item._value: + */ + } + + /* "multidict/_multidict.pyx":198 + * if self._title(k) != item._identity: + * continue + * if v == item._value: # <<<<<<<<<<<<<< + * break + * else: + */ + __pyx_t_5 = PyObject_RichCompare(__pyx_v_v, __pyx_v_item->_value, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 198, __pyx_L1_error) + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 198, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_12) { + + /* "multidict/_multidict.pyx":199 + * continue + * if v == item._value: + * break # <<<<<<<<<<<<<< + * else: + * return False + */ + goto __pyx_L7_break; + + /* "multidict/_multidict.pyx":198 + * if self._title(k) != item._identity: + * continue + * if v == item._value: # <<<<<<<<<<<<<< + * break + * else: + */ + } + + /* "multidict/_multidict.pyx":195 + * for i in self._impl._items: + * item = <_Pair>i + * for k, v in other.items(): # <<<<<<<<<<<<<< + * if self._title(k) != item._identity: + * continue + */ + __pyx_L6_continue:; + } + /*else*/ { + + /* "multidict/_multidict.pyx":201 + * break + * else: + * return False # <<<<<<<<<<<<<< + * return True + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L0; + } + + /* "multidict/_multidict.pyx":195 + * for i in self._impl._items: + * item = <_Pair>i + * for k, v in other.items(): # <<<<<<<<<<<<<< + * if self._title(k) != item._identity: + * continue + */ + __pyx_L7_break:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "multidict/_multidict.pyx":193 + * if len(self._impl._items) != len(other): + * return False + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * for k, v in other.items(): + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":202 + * else: + * return False + * return True # <<<<<<<<<<<<<< + * + * def __richcmp__(self, other, op): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":189 + * return '<{}({})>'.format(self.__class__.__name__, body) + * + * cdef _eq_to_mapping(self, other): # <<<<<<<<<<<<<< + * cdef _Pair item + * if len(self._impl._items) != len(other): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_AddTraceback("multidict._multidict._Base._eq_to_mapping", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XDECREF(__pyx_v_k); + __Pyx_XDECREF(__pyx_v_v); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":204 + * return True + * + * def __richcmp__(self, other, op): # <<<<<<<<<<<<<< + * if op == 2: # == + * return _eq(self, other) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_23__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_5_Base_23__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op) { + PyObject *__pyx_v_op = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__richcmp__ (wrapper)", 0); + __pyx_v_op = __Pyx_PyInt_From_int(__pyx_arg_op); if (unlikely(!__pyx_v_op)) __PYX_ERR(0, 204, __pyx_L3_error) + __Pyx_GOTREF(__pyx_v_op); + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict._Base.__richcmp__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_5_Base_22__richcmp__(((PyObject *)__pyx_v_self), ((PyObject *)__pyx_v_other), ((PyObject *)__pyx_v_op)); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_op); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_5_Base_22__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, PyObject *__pyx_v_op) { + PyObject *__pyx_v_ret = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + __Pyx_RefNannySetupContext("__richcmp__", 0); + + /* "multidict/_multidict.pyx":205 + * + * def __richcmp__(self, other, op): + * if op == 2: # == # <<<<<<<<<<<<<< + * return _eq(self, other) + * elif op == 3: # != + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_2, 2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 205, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 205, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":206 + * def __richcmp__(self, other, op): + * if op == 2: # == + * return _eq(self, other) # <<<<<<<<<<<<<< + * elif op == 3: # != + * ret = _eq(self, other) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_9multidict_10_multidict__eq(__pyx_v_self, __pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 206, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":205 + * + * def __richcmp__(self, other, op): + * if op == 2: # == # <<<<<<<<<<<<<< + * return _eq(self, other) + * elif op == 3: # != + */ + } + + /* "multidict/_multidict.pyx":207 + * if op == 2: # == + * return _eq(self, other) + * elif op == 3: # != # <<<<<<<<<<<<<< + * ret = _eq(self, other) + * if ret is NotImplemented: + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_3, 3, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 207, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 207, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":208 + * return _eq(self, other) + * elif op == 3: # != + * ret = _eq(self, other) # <<<<<<<<<<<<<< + * if ret is NotImplemented: + * return ret + */ + __pyx_t_1 = __pyx_f_9multidict_10_multidict__eq(__pyx_v_self, __pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 208, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_ret = __pyx_t_1; + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":209 + * elif op == 3: # != + * ret = _eq(self, other) + * if ret is NotImplemented: # <<<<<<<<<<<<<< + * return ret + * else: + */ + __pyx_t_2 = (__pyx_v_ret == __pyx_builtin_NotImplemented); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":210 + * ret = _eq(self, other) + * if ret is NotImplemented: + * return ret # <<<<<<<<<<<<<< + * else: + * return not ret + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_ret); + __pyx_r = __pyx_v_ret; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":209 + * elif op == 3: # != + * ret = _eq(self, other) + * if ret is NotImplemented: # <<<<<<<<<<<<<< + * return ret + * else: + */ + } + + /* "multidict/_multidict.pyx":212 + * return ret + * else: + * return not ret # <<<<<<<<<<<<<< + * else: + * return NotImplemented + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_ret); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 212, __pyx_L1_error) + __pyx_t_1 = __Pyx_PyBool_FromLong((!__pyx_t_3)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 212, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + } + + /* "multidict/_multidict.pyx":207 + * if op == 2: # == + * return _eq(self, other) + * elif op == 3: # != # <<<<<<<<<<<<<< + * ret = _eq(self, other) + * if ret is NotImplemented: + */ + } + + /* "multidict/_multidict.pyx":214 + * return not ret + * else: + * return NotImplemented # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + } + + /* "multidict/_multidict.pyx":204 + * return True + * + * def __richcmp__(self, other, op): # <<<<<<<<<<<<<< + * if op == 2: # == + * return _eq(self, other) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict._Base.__richcmp__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_ret); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":221 + * _base_class = MultiDict + * + * def __init__(self, arg): # <<<<<<<<<<<<<< + * cdef _Base base + * if not isinstance(arg, self._proxy_classes): + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_14MultiDictProxy_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_14MultiDictProxy_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_arg = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_arg,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 221, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_arg = values[0]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 221, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict.MultiDictProxy.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_14MultiDictProxy___init__(((struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *)__pyx_v_self), __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_14MultiDictProxy___init__(struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *__pyx_v_self, PyObject *__pyx_v_arg) { + struct __pyx_obj_9multidict_10_multidict__Base *__pyx_v_base = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "multidict/_multidict.pyx":223 + * def __init__(self, arg): + * cdef _Base base + * if not isinstance(arg, self._proxy_classes): # <<<<<<<<<<<<<< + * raise TypeError( + * 'ctor requires {} instance' + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_proxy_classes); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 223, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_IsInstance(__pyx_v_arg, __pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 223, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":226 + * raise TypeError( + * 'ctor requires {} instance' + * ', not {}'.format( # <<<<<<<<<<<<<< + * ' or '.join(self._proxy_classes), + * type(arg))) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_ctor_requires_instance_not, __pyx_n_s_format); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 226, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + + /* "multidict/_multidict.pyx":227 + * 'ctor requires {} instance' + * ', not {}'.format( + * ' or '.join(self._proxy_classes), # <<<<<<<<<<<<<< + * type(arg))) + * + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_proxy_classes); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyString_Join(__pyx_kp_s_or, __pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 227, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":228 + * ', not {}'.format( + * ' or '.join(self._proxy_classes), + * type(arg))) # <<<<<<<<<<<<<< + * + * base = arg + */ + __pyx_t_5 = NULL; + __pyx_t_7 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_7 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_t_6, ((PyObject *)Py_TYPE(__pyx_v_arg))}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 226, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_t_6, ((PyObject *)Py_TYPE(__pyx_v_arg))}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 226, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 226, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_7, __pyx_t_6); + __Pyx_INCREF(((PyObject *)Py_TYPE(__pyx_v_arg))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(__pyx_v_arg))); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_7, ((PyObject *)Py_TYPE(__pyx_v_arg))); + __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 226, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":224 + * cdef _Base base + * if not isinstance(arg, self._proxy_classes): + * raise TypeError( # <<<<<<<<<<<<<< + * 'ctor requires {} instance' + * ', not {}'.format( + */ + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 224, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 224, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 224, __pyx_L1_error) + + /* "multidict/_multidict.pyx":223 + * def __init__(self, arg): + * cdef _Base base + * if not isinstance(arg, self._proxy_classes): # <<<<<<<<<<<<<< + * raise TypeError( + * 'ctor requires {} instance' + */ + } + + /* "multidict/_multidict.pyx":230 + * type(arg))) + * + * base = arg # <<<<<<<<<<<<<< + * self._impl = base._impl + * + */ + if (!(likely(((__pyx_v_arg) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_arg, __pyx_ptype_9multidict_10_multidict__Base))))) __PYX_ERR(0, 230, __pyx_L1_error) + __pyx_t_1 = __pyx_v_arg; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_base = ((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":231 + * + * base = arg + * self._impl = base._impl # <<<<<<<<<<<<<< + * + * def __reduce__(self): + */ + __pyx_t_1 = ((PyObject *)__pyx_v_base->_impl); + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->__pyx_base._impl); + __Pyx_DECREF(((PyObject *)__pyx_v_self->__pyx_base._impl)); + __pyx_v_self->__pyx_base._impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":221 + * _base_class = MultiDict + * + * def __init__(self, arg): # <<<<<<<<<<<<<< + * cdef _Base base + * if not isinstance(arg, self._proxy_classes): + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("multidict._multidict.MultiDictProxy.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_base); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":233 + * self._impl = base._impl + * + * def __reduce__(self): # <<<<<<<<<<<<<< + * raise TypeError("can't pickle {} objects".format(self.__class__.__name__)) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_14MultiDictProxy_3__reduce__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_14MultiDictProxy_3__reduce__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_14MultiDictProxy_2__reduce__(((struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_14MultiDictProxy_2__reduce__(struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("__reduce__", 0); + + /* "multidict/_multidict.pyx":234 + * + * def __reduce__(self): + * raise TypeError("can't pickle {} objects".format(self.__class__.__name__)) # <<<<<<<<<<<<<< + * + * def copy(self): + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_can_t_pickle_objects, __pyx_n_s_format); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_name); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_3) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_t_4}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_t_4}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_5 = PyTuple_New(1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_5, 0+1, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_t_2, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 234, __pyx_L1_error) + + /* "multidict/_multidict.pyx":233 + * self._impl = base._impl + * + * def __reduce__(self): # <<<<<<<<<<<<<< + * raise TypeError("can't pickle {} objects".format(self.__class__.__name__)) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("multidict._multidict.MultiDictProxy.__reduce__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":236 + * raise TypeError("can't pickle {} objects".format(self.__class__.__name__)) + * + * def copy(self): # <<<<<<<<<<<<<< + * """Return a copy of itself.""" + * return self._base_class(self) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_14MultiDictProxy_5copy(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_14MultiDictProxy_4copy[] = "Return a copy of itself."; +static PyObject *__pyx_pw_9multidict_10_multidict_14MultiDictProxy_5copy(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("copy (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_14MultiDictProxy_4copy(((struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_14MultiDictProxy_4copy(struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("copy", 0); + + /* "multidict/_multidict.pyx":238 + * def copy(self): + * """Return a copy of itself.""" + * return self._base_class(self) # <<<<<<<<<<<<<< + * + * abc.Mapping.register(MultiDictProxy) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_base_class); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 238, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_3) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, ((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 238, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 238, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 238, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 238, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, ((PyObject *)__pyx_v_self)); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 238, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":236 + * raise TypeError("can't pickle {} objects".format(self.__class__.__name__)) + * + * def copy(self): # <<<<<<<<<<<<<< + * """Return a copy of itself.""" + * return self._base_class(self) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict.MultiDictProxy.copy", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":247 + * _base_class = CIMultiDict + * + * cdef str _title(self, s): # <<<<<<<<<<<<<< + * typ = type(s) + * if typ is str: + */ + +static PyObject *__pyx_f_9multidict_10_multidict_16CIMultiDictProxy__title(CYTHON_UNUSED struct __pyx_obj_9multidict_10_multidict_CIMultiDictProxy *__pyx_v_self, PyObject *__pyx_v_s) { + PyTypeObject *__pyx_v_typ = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("_title", 0); + + /* "multidict/_multidict.pyx":248 + * + * cdef str _title(self, s): + * typ = type(s) # <<<<<<<<<<<<<< + * if typ is str: + * return (s.title()) + */ + __Pyx_INCREF(((PyObject *)Py_TYPE(__pyx_v_s))); + __pyx_v_typ = ((PyTypeObject*)((PyObject *)Py_TYPE(__pyx_v_s))); + + /* "multidict/_multidict.pyx":249 + * cdef str _title(self, s): + * typ = type(s) + * if typ is str: # <<<<<<<<<<<<<< + * return (s.title()) + * elif type(s) is _istr: + */ + __pyx_t_1 = (__pyx_v_typ == (&PyString_Type)); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":250 + * typ = type(s) + * if typ is str: + * return (s.title()) # <<<<<<<<<<<<<< + * elif type(s) is _istr: + * return PyObject_Str(s) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_s, __pyx_n_s_title); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 250, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 250, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else { + __pyx_t_3 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 250, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_INCREF(((PyObject*)__pyx_t_3)); + __pyx_r = ((PyObject*)__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":249 + * cdef str _title(self, s): + * typ = type(s) + * if typ is str: # <<<<<<<<<<<<<< + * return (s.title()) + * elif type(s) is _istr: + */ + } + + /* "multidict/_multidict.pyx":251 + * if typ is str: + * return (s.title()) + * elif type(s) is _istr: # <<<<<<<<<<<<<< + * return PyObject_Str(s) + * return s.title() + */ + __pyx_t_2 = (((PyObject *)Py_TYPE(__pyx_v_s)) == __pyx_v_9multidict_10_multidict__istr); + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":252 + * return (s.title()) + * elif type(s) is _istr: + * return PyObject_Str(s) # <<<<<<<<<<<<<< + * return s.title() + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyObject_Str(__pyx_v_s); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 252, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyString_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 252, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":251 + * if typ is str: + * return (s.title()) + * elif type(s) is _istr: # <<<<<<<<<<<<<< + * return PyObject_Str(s) + * return s.title() + */ + } + + /* "multidict/_multidict.pyx":253 + * elif type(s) is _istr: + * return PyObject_Str(s) + * return s.title() # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_s, __pyx_n_s_title); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 253, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 253, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else { + __pyx_t_3 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 253, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 253, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":247 + * _base_class = CIMultiDict + * + * cdef str _title(self, s): # <<<<<<<<<<<<<< + * typ = type(s) + * if typ is str: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("multidict._multidict.CIMultiDictProxy._title", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_typ); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":259 + * + * + * cdef str _str(key): # <<<<<<<<<<<<<< + * typ = type(key) + * if typ is str: + */ + +static PyObject *__pyx_f_9multidict_10_multidict__str(PyObject *__pyx_v_key) { + PyTypeObject *__pyx_v_typ = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("_str", 0); + + /* "multidict/_multidict.pyx":260 + * + * cdef str _str(key): + * typ = type(key) # <<<<<<<<<<<<<< + * if typ is str: + * return key + */ + __Pyx_INCREF(((PyObject *)Py_TYPE(__pyx_v_key))); + __pyx_v_typ = ((PyTypeObject*)((PyObject *)Py_TYPE(__pyx_v_key))); + + /* "multidict/_multidict.pyx":261 + * cdef str _str(key): + * typ = type(key) + * if typ is str: # <<<<<<<<<<<<<< + * return key + * if typ is _istr: + */ + __pyx_t_1 = (__pyx_v_typ == (&PyString_Type)); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":262 + * typ = type(key) + * if typ is str: + * return key # <<<<<<<<<<<<<< + * if typ is _istr: + * return PyObject_Str(key) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject*)__pyx_v_key)); + __pyx_r = ((PyObject*)__pyx_v_key); + goto __pyx_L0; + + /* "multidict/_multidict.pyx":261 + * cdef str _str(key): + * typ = type(key) + * if typ is str: # <<<<<<<<<<<<<< + * return key + * if typ is _istr: + */ + } + + /* "multidict/_multidict.pyx":263 + * if typ is str: + * return key + * if typ is _istr: # <<<<<<<<<<<<<< + * return PyObject_Str(key) + * elif issubclass(typ, str): + */ + __pyx_t_2 = (__pyx_v_typ == ((PyTypeObject*)__pyx_v_9multidict_10_multidict__istr)); + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":264 + * return key + * if typ is _istr: + * return PyObject_Str(key) # <<<<<<<<<<<<<< + * elif issubclass(typ, str): + * return str(key) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyObject_Str(__pyx_v_key); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 264, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyString_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 264, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":263 + * if typ is str: + * return key + * if typ is _istr: # <<<<<<<<<<<<<< + * return PyObject_Str(key) + * elif issubclass(typ, str): + */ + } + + /* "multidict/_multidict.pyx":265 + * if typ is _istr: + * return PyObject_Str(key) + * elif issubclass(typ, str): # <<<<<<<<<<<<<< + * return str(key) + * else: + */ + __pyx_t_1 = PyObject_IsSubclass(((PyObject *)__pyx_v_typ), ((PyObject *)(&PyString_Type))); if (unlikely(__pyx_t_1 == -1)) __PYX_ERR(0, 265, __pyx_L1_error) + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":266 + * return PyObject_Str(key) + * elif issubclass(typ, str): + * return str(key) # <<<<<<<<<<<<<< + * else: + * raise TypeError("MultiDict keys should be either str " + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 266, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_key); + __Pyx_GIVEREF(__pyx_v_key); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_key); + __pyx_t_4 = __Pyx_PyObject_Call(((PyObject *)(&PyString_Type)), __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 266, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(0, 266, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":265 + * if typ is _istr: + * return PyObject_Str(key) + * elif issubclass(typ, str): # <<<<<<<<<<<<<< + * return str(key) + * else: + */ + } + + /* "multidict/_multidict.pyx":268 + * return str(key) + * else: + * raise TypeError("MultiDict keys should be either str " # <<<<<<<<<<<<<< + * "or subclasses of str") + * + */ + /*else*/ { + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 268, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(0, 268, __pyx_L1_error) + } + + /* "multidict/_multidict.pyx":259 + * + * + * cdef str _str(key): # <<<<<<<<<<<<<< + * typ = type(key) + * if typ is str: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict._str", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_typ); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":275 + * """An ordered dictionary that can have multiple values for each key.""" + * + * def __init__(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._impl = _Impl() + * self._extend(args, kwargs, 'MultiDict', True) + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_9MultiDict_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_9MultiDict_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + PyObject *__pyx_v_kwargs = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__init__", 1))) return -1; + __pyx_v_kwargs = (__pyx_kwds) ? PyDict_Copy(__pyx_kwds) : PyDict_New(); if (unlikely(!__pyx_v_kwargs)) return -1; + __Pyx_GOTREF(__pyx_v_kwargs); + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict___init__(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self), __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_9MultiDict___init__(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "multidict/_multidict.pyx":276 + * + * def __init__(self, *args, **kwargs): + * self._impl = _Impl() # <<<<<<<<<<<<<< + * self._extend(args, kwargs, 'MultiDict', True) + * + */ + __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_9multidict_10_multidict__Impl), __pyx_empty_tuple, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 276, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->__pyx_base._impl); + __Pyx_DECREF(((PyObject *)__pyx_v_self->__pyx_base._impl)); + __pyx_v_self->__pyx_base._impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":277 + * def __init__(self, *args, **kwargs): + * self._impl = _Impl() + * self._extend(args, kwargs, 'MultiDict', True) # <<<<<<<<<<<<<< + * + * def __reduce__(self): + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_extend(__pyx_v_self, __pyx_v_args, __pyx_v_kwargs, __pyx_n_s_MultiDict, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 277, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":275 + * """An ordered dictionary that can have multiple values for each key.""" + * + * def __init__(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._impl = _Impl() + * self._extend(args, kwargs, 'MultiDict', True) + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict.MultiDict.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":279 + * self._extend(args, kwargs, 'MultiDict', True) + * + * def __reduce__(self): # <<<<<<<<<<<<<< + * return ( + * self.__class__, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_3__reduce__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_3__reduce__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_2__reduce__(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_2__reduce__(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("__reduce__", 0); + + /* "multidict/_multidict.pyx":280 + * + * def __reduce__(self): + * return ( # <<<<<<<<<<<<<< + * self.__class__, + * tuple(self.items()), + */ + __Pyx_XDECREF(__pyx_r); + + /* "multidict/_multidict.pyx":281 + * def __reduce__(self): + * return ( + * self.__class__, # <<<<<<<<<<<<<< + * tuple(self.items()), + * ) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 281, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + + /* "multidict/_multidict.pyx":282 + * return ( + * self.__class__, + * tuple(self.items()), # <<<<<<<<<<<<<< + * ) + * + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_items); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (__pyx_t_4) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 282, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else { + __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 282, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PySequence_Tuple(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 282, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":281 + * def __reduce__(self): + * return ( + * self.__class__, # <<<<<<<<<<<<<< + * tuple(self.items()), + * ) + */ + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 281, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_3); + __pyx_t_1 = 0; + __pyx_t_3 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":279 + * self._extend(args, kwargs, 'MultiDict', True) + * + * def __reduce__(self): # <<<<<<<<<<<<<< + * return ( + * self.__class__, + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict.MultiDict.__reduce__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":285 + * ) + * + * cdef _extend(self, tuple args, dict kwargs, name, bint do_add): # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef object key + */ + +static PyObject *__pyx_f_9multidict_10_multidict_9MultiDict__extend(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs, PyObject *__pyx_v_name, int __pyx_v_do_add) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_arg = NULL; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_v_value = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + Py_ssize_t __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + int __pyx_t_10; + PyObject *(*__pyx_t_11)(PyObject *); + Py_ssize_t __pyx_t_12; + PyObject *(*__pyx_t_13)(PyObject *); + __Pyx_RefNannySetupContext("_extend", 0); + + /* "multidict/_multidict.pyx":289 + * cdef object key + * + * if len(args) > 1: # <<<<<<<<<<<<<< + * raise TypeError("{} takes at most 1 positional argument" + * " ({} given)".format(name, len(args))) + */ + if (unlikely(__pyx_v_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 289, __pyx_L1_error) + } + __pyx_t_1 = PyTuple_GET_SIZE(__pyx_v_args); if (unlikely(__pyx_t_1 == -1)) __PYX_ERR(0, 289, __pyx_L1_error) + __pyx_t_2 = ((__pyx_t_1 > 1) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":291 + * if len(args) > 1: + * raise TypeError("{} takes at most 1 positional argument" + * " ({} given)".format(name, len(args))) # <<<<<<<<<<<<<< + * + * if args: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_takes_at_most_1_positional_argu, __pyx_n_s_format); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 291, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + if (unlikely(__pyx_v_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 291, __pyx_L1_error) + } + __pyx_t_1 = PyTuple_GET_SIZE(__pyx_v_args); if (unlikely(__pyx_t_1 == -1)) __PYX_ERR(0, 291, __pyx_L1_error) + __pyx_t_5 = PyInt_FromSsize_t(__pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 291, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + __pyx_t_7 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_7 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_name, __pyx_t_5}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 291, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_name, __pyx_t_5}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 291, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 291, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_name); + __Pyx_GIVEREF(__pyx_v_name); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_7, __pyx_v_name); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_7, __pyx_t_5); + __pyx_t_5 = 0; + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_8, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 291, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":290 + * + * if len(args) > 1: + * raise TypeError("{} takes at most 1 positional argument" # <<<<<<<<<<<<<< + * " ({} given)".format(name, len(args))) + * + */ + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 290, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 290, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(0, 290, __pyx_L1_error) + + /* "multidict/_multidict.pyx":289 + * cdef object key + * + * if len(args) > 1: # <<<<<<<<<<<<<< + * raise TypeError("{} takes at most 1 positional argument" + * " ({} given)".format(name, len(args))) + */ + } + + /* "multidict/_multidict.pyx":293 + * " ({} given)".format(name, len(args))) + * + * if args: # <<<<<<<<<<<<<< + * arg = args[0] + * if isinstance(arg, CIMultiDict): + */ + __pyx_t_2 = (__pyx_v_args != Py_None) && (PyTuple_GET_SIZE(__pyx_v_args) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":294 + * + * if args: + * arg = args[0] # <<<<<<<<<<<<<< + * if isinstance(arg, CIMultiDict): + * self._impl._items.extend((<_Base>arg)._impl._items) + */ + if (unlikely(__pyx_v_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 294, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_GetItemInt_Tuple(__pyx_v_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 294, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_v_arg = __pyx_t_3; + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":295 + * if args: + * arg = args[0] + * if isinstance(arg, CIMultiDict): # <<<<<<<<<<<<<< + * self._impl._items.extend((<_Base>arg)._impl._items) + * elif isinstance(arg, _Base): + */ + __pyx_t_2 = __Pyx_TypeCheck(__pyx_v_arg, __pyx_ptype_9multidict_10_multidict_CIMultiDict); + __pyx_t_9 = (__pyx_t_2 != 0); + if (__pyx_t_9) { + + /* "multidict/_multidict.pyx":296 + * arg = args[0] + * if isinstance(arg, CIMultiDict): + * self._impl._items.extend((<_Base>arg)._impl._items) # <<<<<<<<<<<<<< + * elif isinstance(arg, _Base): + * for i in (<_Base>arg)._impl._items: + */ + if (unlikely(__pyx_v_self->__pyx_base._impl->_items == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", "extend"); + __PYX_ERR(0, 296, __pyx_L1_error) + } + __pyx_t_3 = ((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_arg)->_impl->_items; + __Pyx_INCREF(__pyx_t_3); + __pyx_t_10 = __Pyx_PyList_Extend(__pyx_v_self->__pyx_base._impl->_items, __pyx_t_3); if (unlikely(__pyx_t_10 == -1)) __PYX_ERR(0, 296, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":295 + * if args: + * arg = args[0] + * if isinstance(arg, CIMultiDict): # <<<<<<<<<<<<<< + * self._impl._items.extend((<_Base>arg)._impl._items) + * elif isinstance(arg, _Base): + */ + goto __pyx_L5; + } + + /* "multidict/_multidict.pyx":297 + * if isinstance(arg, CIMultiDict): + * self._impl._items.extend((<_Base>arg)._impl._items) + * elif isinstance(arg, _Base): # <<<<<<<<<<<<<< + * for i in (<_Base>arg)._impl._items: + * item = <_Pair>i + */ + __pyx_t_9 = __Pyx_TypeCheck(__pyx_v_arg, __pyx_ptype_9multidict_10_multidict__Base); + __pyx_t_2 = (__pyx_t_9 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":298 + * self._impl._items.extend((<_Base>arg)._impl._items) + * elif isinstance(arg, _Base): + * for i in (<_Base>arg)._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * key = item._key + */ + if (unlikely(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_arg)->_impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 298, __pyx_L1_error) + } + __pyx_t_3 = ((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_arg)->_impl->_items; __Pyx_INCREF(__pyx_t_3); __pyx_t_1 = 0; + for (;;) { + if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_3)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_1); __Pyx_INCREF(__pyx_t_4); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 298, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_3, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 298, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":299 + * elif isinstance(arg, _Base): + * for i in (<_Base>arg)._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * key = item._key + * value = item._value + */ + __pyx_t_4 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":300 + * for i in (<_Base>arg)._impl._items: + * item = <_Pair>i + * key = item._key # <<<<<<<<<<<<<< + * value = item._value + * if do_add: + */ + __pyx_t_4 = __pyx_v_item->_key; + __Pyx_INCREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_key, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":301 + * item = <_Pair>i + * key = item._key + * value = item._value # <<<<<<<<<<<<<< + * if do_add: + * self._add(key, value) + */ + __pyx_t_4 = __pyx_v_item->_value; + __Pyx_INCREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_value, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":302 + * key = item._key + * value = item._value + * if do_add: # <<<<<<<<<<<<<< + * self._add(key, value) + * else: + */ + __pyx_t_2 = (__pyx_v_do_add != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":303 + * value = item._value + * if do_add: + * self._add(key, value) # <<<<<<<<<<<<<< + * else: + * self._replace(key, value) + */ + __pyx_t_4 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_add(__pyx_v_self, __pyx_v_key, __pyx_v_value); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 303, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":302 + * key = item._key + * value = item._value + * if do_add: # <<<<<<<<<<<<<< + * self._add(key, value) + * else: + */ + goto __pyx_L8; + } + + /* "multidict/_multidict.pyx":305 + * self._add(key, value) + * else: + * self._replace(key, value) # <<<<<<<<<<<<<< + * elif hasattr(arg, 'items'): + * for i in arg.items(): + */ + /*else*/ { + __pyx_t_4 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_replace(__pyx_v_self, __pyx_v_key, __pyx_v_value); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 305, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __pyx_L8:; + + /* "multidict/_multidict.pyx":298 + * self._impl._items.extend((<_Base>arg)._impl._items) + * elif isinstance(arg, _Base): + * for i in (<_Base>arg)._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * key = item._key + */ + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":297 + * if isinstance(arg, CIMultiDict): + * self._impl._items.extend((<_Base>arg)._impl._items) + * elif isinstance(arg, _Base): # <<<<<<<<<<<<<< + * for i in (<_Base>arg)._impl._items: + * item = <_Pair>i + */ + goto __pyx_L5; + } + + /* "multidict/_multidict.pyx":306 + * else: + * self._replace(key, value) + * elif hasattr(arg, 'items'): # <<<<<<<<<<<<<< + * for i in arg.items(): + * if isinstance(i, _Pair): + */ + __pyx_t_2 = PyObject_HasAttr(__pyx_v_arg, __pyx_n_s_items); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 306, __pyx_L1_error) + __pyx_t_9 = (__pyx_t_2 != 0); + if (__pyx_t_9) { + + /* "multidict/_multidict.pyx":307 + * self._replace(key, value) + * elif hasattr(arg, 'items'): + * for i in arg.items(): # <<<<<<<<<<<<<< + * if isinstance(i, _Pair): + * item = <_Pair>i + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_arg, __pyx_n_s_items); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (__pyx_t_8) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else { + __pyx_t_3 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 307, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (likely(PyList_CheckExact(__pyx_t_3)) || PyTuple_CheckExact(__pyx_t_3)) { + __pyx_t_4 = __pyx_t_3; __Pyx_INCREF(__pyx_t_4); __pyx_t_1 = 0; + __pyx_t_11 = NULL; + } else { + __pyx_t_1 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_11 = Py_TYPE(__pyx_t_4)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 307, __pyx_L1_error) + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + for (;;) { + if (likely(!__pyx_t_11)) { + if (likely(PyList_CheckExact(__pyx_t_4))) { + if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_4)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_1); __Pyx_INCREF(__pyx_t_3); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 307, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_4, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } else { + if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_4)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_1); __Pyx_INCREF(__pyx_t_3); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 307, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_4, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 307, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } + } else { + __pyx_t_3 = __pyx_t_11(__pyx_t_4); + if (unlikely(!__pyx_t_3)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 307, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_3); + } + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":308 + * elif hasattr(arg, 'items'): + * for i in arg.items(): + * if isinstance(i, _Pair): # <<<<<<<<<<<<<< + * item = <_Pair>i + * key = item._key + */ + __pyx_t_9 = __Pyx_TypeCheck(__pyx_v_i, __pyx_ptype_9multidict_10_multidict__Pair); + __pyx_t_2 = (__pyx_t_9 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":309 + * for i in arg.items(): + * if isinstance(i, _Pair): + * item = <_Pair>i # <<<<<<<<<<<<<< + * key = item._key + * value = item._value + */ + __pyx_t_3 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":310 + * if isinstance(i, _Pair): + * item = <_Pair>i + * key = item._key # <<<<<<<<<<<<<< + * value = item._value + * else: + */ + __pyx_t_3 = __pyx_v_item->_key; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_key, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":311 + * item = <_Pair>i + * key = item._key + * value = item._value # <<<<<<<<<<<<<< + * else: + * key = i[0] + */ + __pyx_t_3 = __pyx_v_item->_value; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_value, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":308 + * elif hasattr(arg, 'items'): + * for i in arg.items(): + * if isinstance(i, _Pair): # <<<<<<<<<<<<<< + * item = <_Pair>i + * key = item._key + */ + goto __pyx_L11; + } + + /* "multidict/_multidict.pyx":313 + * value = item._value + * else: + * key = i[0] # <<<<<<<<<<<<<< + * value = i[1] + * if do_add: + */ + /*else*/ { + __pyx_t_3 = __Pyx_GetItemInt(__pyx_v_i, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 313, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_key, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":314 + * else: + * key = i[0] + * value = i[1] # <<<<<<<<<<<<<< + * if do_add: + * self._add(key, value) + */ + __pyx_t_3 = __Pyx_GetItemInt(__pyx_v_i, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 314, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_value, __pyx_t_3); + __pyx_t_3 = 0; + } + __pyx_L11:; + + /* "multidict/_multidict.pyx":315 + * key = i[0] + * value = i[1] + * if do_add: # <<<<<<<<<<<<<< + * self._add(key, value) + * else: + */ + __pyx_t_2 = (__pyx_v_do_add != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":316 + * value = i[1] + * if do_add: + * self._add(key, value) # <<<<<<<<<<<<<< + * else: + * self._replace(key, value) + */ + __pyx_t_3 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_add(__pyx_v_self, __pyx_v_key, __pyx_v_value); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 316, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":315 + * key = i[0] + * value = i[1] + * if do_add: # <<<<<<<<<<<<<< + * self._add(key, value) + * else: + */ + goto __pyx_L12; + } + + /* "multidict/_multidict.pyx":318 + * self._add(key, value) + * else: + * self._replace(key, value) # <<<<<<<<<<<<<< + * else: + * for i in arg: + */ + /*else*/ { + __pyx_t_3 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_replace(__pyx_v_self, __pyx_v_key, __pyx_v_value); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 318, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_L12:; + + /* "multidict/_multidict.pyx":307 + * self._replace(key, value) + * elif hasattr(arg, 'items'): + * for i in arg.items(): # <<<<<<<<<<<<<< + * if isinstance(i, _Pair): + * item = <_Pair>i + */ + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":306 + * else: + * self._replace(key, value) + * elif hasattr(arg, 'items'): # <<<<<<<<<<<<<< + * for i in arg.items(): + * if isinstance(i, _Pair): + */ + goto __pyx_L5; + } + + /* "multidict/_multidict.pyx":320 + * self._replace(key, value) + * else: + * for i in arg: # <<<<<<<<<<<<<< + * if isinstance(i, _Pair): + * item = <_Pair>i + */ + /*else*/ { + if (likely(PyList_CheckExact(__pyx_v_arg)) || PyTuple_CheckExact(__pyx_v_arg)) { + __pyx_t_4 = __pyx_v_arg; __Pyx_INCREF(__pyx_t_4); __pyx_t_1 = 0; + __pyx_t_11 = NULL; + } else { + __pyx_t_1 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_v_arg); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 320, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_11 = Py_TYPE(__pyx_t_4)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 320, __pyx_L1_error) + } + for (;;) { + if (likely(!__pyx_t_11)) { + if (likely(PyList_CheckExact(__pyx_t_4))) { + if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_4)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_1); __Pyx_INCREF(__pyx_t_3); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 320, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_4, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 320, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } else { + if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_4)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_1); __Pyx_INCREF(__pyx_t_3); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 320, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_4, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 320, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } + } else { + __pyx_t_3 = __pyx_t_11(__pyx_t_4); + if (unlikely(!__pyx_t_3)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 320, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_3); + } + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":321 + * else: + * for i in arg: + * if isinstance(i, _Pair): # <<<<<<<<<<<<<< + * item = <_Pair>i + * key = item._key + */ + __pyx_t_2 = __Pyx_TypeCheck(__pyx_v_i, __pyx_ptype_9multidict_10_multidict__Pair); + __pyx_t_9 = (__pyx_t_2 != 0); + if (__pyx_t_9) { + + /* "multidict/_multidict.pyx":322 + * for i in arg: + * if isinstance(i, _Pair): + * item = <_Pair>i # <<<<<<<<<<<<<< + * key = item._key + * value = item._value + */ + __pyx_t_3 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":323 + * if isinstance(i, _Pair): + * item = <_Pair>i + * key = item._key # <<<<<<<<<<<<<< + * value = item._value + * else: + */ + __pyx_t_3 = __pyx_v_item->_key; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_key, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":324 + * item = <_Pair>i + * key = item._key + * value = item._value # <<<<<<<<<<<<<< + * else: + * if not len(i) == 2: + */ + __pyx_t_3 = __pyx_v_item->_value; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_value, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":321 + * else: + * for i in arg: + * if isinstance(i, _Pair): # <<<<<<<<<<<<<< + * item = <_Pair>i + * key = item._key + */ + goto __pyx_L15; + } + + /* "multidict/_multidict.pyx":326 + * value = item._value + * else: + * if not len(i) == 2: # <<<<<<<<<<<<<< + * raise TypeError( + * "{} takes either dict or list of (key, value) " + */ + /*else*/ { + __pyx_t_12 = PyObject_Length(__pyx_v_i); if (unlikely(__pyx_t_12 == -1)) __PYX_ERR(0, 326, __pyx_L1_error) + __pyx_t_9 = ((!((__pyx_t_12 == 2) != 0)) != 0); + if (__pyx_t_9) { + + /* "multidict/_multidict.pyx":329 + * raise TypeError( + * "{} takes either dict or list of (key, value) " + * "tuples".format(name)) # <<<<<<<<<<<<<< + * key = i[0] + * value = i[1] + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_takes_either_dict_or_list_of_ke, __pyx_n_s_format); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 329, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + if (!__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_v_name); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 329, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_name}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 329, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_name}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 329, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 329, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5); __pyx_t_5 = NULL; + __Pyx_INCREF(__pyx_v_name); + __Pyx_GIVEREF(__pyx_v_name); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_v_name); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 329, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "multidict/_multidict.pyx":327 + * else: + * if not len(i) == 2: + * raise TypeError( # <<<<<<<<<<<<<< + * "{} takes either dict or list of (key, value) " + * "tuples".format(name)) + */ + __pyx_t_8 = PyTuple_New(1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 327, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_t_8, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 327, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(0, 327, __pyx_L1_error) + + /* "multidict/_multidict.pyx":326 + * value = item._value + * else: + * if not len(i) == 2: # <<<<<<<<<<<<<< + * raise TypeError( + * "{} takes either dict or list of (key, value) " + */ + } + + /* "multidict/_multidict.pyx":330 + * "{} takes either dict or list of (key, value) " + * "tuples".format(name)) + * key = i[0] # <<<<<<<<<<<<<< + * value = i[1] + * if do_add: + */ + __pyx_t_3 = __Pyx_GetItemInt(__pyx_v_i, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 330, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_key, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":331 + * "tuples".format(name)) + * key = i[0] + * value = i[1] # <<<<<<<<<<<<<< + * if do_add: + * self._add(key, value) + */ + __pyx_t_3 = __Pyx_GetItemInt(__pyx_v_i, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 331, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_value, __pyx_t_3); + __pyx_t_3 = 0; + } + __pyx_L15:; + + /* "multidict/_multidict.pyx":332 + * key = i[0] + * value = i[1] + * if do_add: # <<<<<<<<<<<<<< + * self._add(key, value) + * else: + */ + __pyx_t_9 = (__pyx_v_do_add != 0); + if (__pyx_t_9) { + + /* "multidict/_multidict.pyx":333 + * value = i[1] + * if do_add: + * self._add(key, value) # <<<<<<<<<<<<<< + * else: + * self._replace(key, value) + */ + __pyx_t_3 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_add(__pyx_v_self, __pyx_v_key, __pyx_v_value); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 333, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":332 + * key = i[0] + * value = i[1] + * if do_add: # <<<<<<<<<<<<<< + * self._add(key, value) + * else: + */ + goto __pyx_L17; + } + + /* "multidict/_multidict.pyx":335 + * self._add(key, value) + * else: + * self._replace(key, value) # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __pyx_t_3 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_replace(__pyx_v_self, __pyx_v_key, __pyx_v_value); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 335, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __pyx_L17:; + + /* "multidict/_multidict.pyx":320 + * self._replace(key, value) + * else: + * for i in arg: # <<<<<<<<<<<<<< + * if isinstance(i, _Pair): + * item = <_Pair>i + */ + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __pyx_L5:; + + /* "multidict/_multidict.pyx":293 + * " ({} given)".format(name, len(args))) + * + * if args: # <<<<<<<<<<<<<< + * arg = args[0] + * if isinstance(arg, CIMultiDict): + */ + } + + /* "multidict/_multidict.pyx":338 + * + * + * for key, value in kwargs.items(): # <<<<<<<<<<<<<< + * if do_add: + * self._add(key, value) + */ + if (unlikely(__pyx_v_kwargs == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", "items"); + __PYX_ERR(0, 338, __pyx_L1_error) + } + __pyx_t_4 = __Pyx_PyDict_Items(__pyx_v_kwargs); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + if (likely(PyList_CheckExact(__pyx_t_4)) || PyTuple_CheckExact(__pyx_t_4)) { + __pyx_t_3 = __pyx_t_4; __Pyx_INCREF(__pyx_t_3); __pyx_t_1 = 0; + __pyx_t_11 = NULL; + } else { + __pyx_t_1 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_11 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 338, __pyx_L1_error) + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + for (;;) { + if (likely(!__pyx_t_11)) { + if (likely(PyList_CheckExact(__pyx_t_3))) { + if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_3)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_1); __Pyx_INCREF(__pyx_t_4); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 338, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_3, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_3)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_1); __Pyx_INCREF(__pyx_t_4); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 338, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_3, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } + } else { + __pyx_t_4 = __pyx_t_11(__pyx_t_3); + if (unlikely(!__pyx_t_4)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 338, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_4); + } + if ((likely(PyTuple_CheckExact(__pyx_t_4))) || (PyList_CheckExact(__pyx_t_4))) { + PyObject* sequence = __pyx_t_4; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 338, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_8 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_8 = PyList_GET_ITEM(sequence, 0); + __pyx_t_6 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(__pyx_t_6); + #else + __pyx_t_8 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_5 = PyObject_GetIter(__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 338, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_13 = Py_TYPE(__pyx_t_5)->tp_iternext; + index = 0; __pyx_t_8 = __pyx_t_13(__pyx_t_5); if (unlikely(!__pyx_t_8)) goto __pyx_L20_unpacking_failed; + __Pyx_GOTREF(__pyx_t_8); + index = 1; __pyx_t_6 = __pyx_t_13(__pyx_t_5); if (unlikely(!__pyx_t_6)) goto __pyx_L20_unpacking_failed; + __Pyx_GOTREF(__pyx_t_6); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_13(__pyx_t_5), 2) < 0) __PYX_ERR(0, 338, __pyx_L1_error) + __pyx_t_13 = NULL; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L21_unpacking_done; + __pyx_L20_unpacking_failed:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_13 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 338, __pyx_L1_error) + __pyx_L21_unpacking_done:; + } + __Pyx_XDECREF_SET(__pyx_v_key, __pyx_t_8); + __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_value, __pyx_t_6); + __pyx_t_6 = 0; + + /* "multidict/_multidict.pyx":339 + * + * for key, value in kwargs.items(): + * if do_add: # <<<<<<<<<<<<<< + * self._add(key, value) + * else: + */ + __pyx_t_9 = (__pyx_v_do_add != 0); + if (__pyx_t_9) { + + /* "multidict/_multidict.pyx":340 + * for key, value in kwargs.items(): + * if do_add: + * self._add(key, value) # <<<<<<<<<<<<<< + * else: + * self._replace(key, value) + */ + __pyx_t_4 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_add(__pyx_v_self, __pyx_v_key, __pyx_v_value); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 340, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":339 + * + * for key, value in kwargs.items(): + * if do_add: # <<<<<<<<<<<<<< + * self._add(key, value) + * else: + */ + goto __pyx_L22; + } + + /* "multidict/_multidict.pyx":342 + * self._add(key, value) + * else: + * self._replace(key, value) # <<<<<<<<<<<<<< + * + * cdef _add(self, key, value): + */ + /*else*/ { + __pyx_t_4 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_replace(__pyx_v_self, __pyx_v_key, __pyx_v_value); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 342, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __pyx_L22:; + + /* "multidict/_multidict.pyx":338 + * + * + * for key, value in kwargs.items(): # <<<<<<<<<<<<<< + * if do_add: + * self._add(key, value) + */ + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":285 + * ) + * + * cdef _extend(self, tuple args, dict kwargs, name, bint do_add): # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef object key + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("multidict._multidict.MultiDict._extend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_key); + __Pyx_XDECREF(__pyx_v_arg); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XDECREF(__pyx_v_value); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":344 + * self._replace(key, value) + * + * cdef _add(self, key, value): # <<<<<<<<<<<<<< + * self._impl._items.append(_Pair.__new__( + * _Pair, self._title(key), _str(key), value)) + */ + +static PyObject *__pyx_f_9multidict_10_multidict_9MultiDict__add(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + __Pyx_RefNannySetupContext("_add", 0); + + /* "multidict/_multidict.pyx":345 + * + * cdef _add(self, key, value): + * self._impl._items.append(_Pair.__new__( # <<<<<<<<<<<<<< + * _Pair, self._title(key), _str(key), value)) + * self._impl.incr_version() + */ + if (unlikely(__pyx_v_self->__pyx_base._impl->_items == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", "append"); + __PYX_ERR(0, 345, __pyx_L1_error) + } + + /* "multidict/_multidict.pyx":346 + * cdef _add(self, key, value): + * self._impl._items.append(_Pair.__new__( + * _Pair, self._title(key), _str(key), value)) # <<<<<<<<<<<<<< + * self._impl.incr_version() + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._title(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 346, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __pyx_f_9multidict_10_multidict__str(__pyx_v_key); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 346, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + + /* "multidict/_multidict.pyx":345 + * + * cdef _add(self, key, value): + * self._impl._items.append(_Pair.__new__( # <<<<<<<<<<<<<< + * _Pair, self._title(key), _str(key), value)) + * self._impl.incr_version() + */ + __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 345, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_v_value); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_t_2 = __pyx_tp_new_9multidict_10_multidict__Pair(((PyTypeObject *)__pyx_ptype_9multidict_10_multidict__Pair), __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 345, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_4 = __Pyx_PyList_Append(__pyx_v_self->__pyx_base._impl->_items, __pyx_t_2); if (unlikely(__pyx_t_4 == -1)) __PYX_ERR(0, 345, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":347 + * self._impl._items.append(_Pair.__new__( + * _Pair, self._title(key), _str(key), value)) + * self._impl.incr_version() # <<<<<<<<<<<<<< + * + * cdef _replace(self, key, value): + */ + ((struct __pyx_vtabstruct_9multidict_10_multidict__Impl *)__pyx_v_self->__pyx_base._impl->__pyx_vtab)->incr_version(__pyx_v_self->__pyx_base._impl); + + /* "multidict/_multidict.pyx":344 + * self._replace(key, value) + * + * cdef _add(self, key, value): # <<<<<<<<<<<<<< + * self._impl._items.append(_Pair.__new__( + * _Pair, self._title(key), _str(key), value)) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict.MultiDict._add", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":349 + * self._impl.incr_version() + * + * cdef _replace(self, key, value): # <<<<<<<<<<<<<< + * cdef str identity = self._title(key) + * cdef str k = _str(key) + */ + +static PyObject *__pyx_f_9multidict_10_multidict_9MultiDict__replace(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { + PyObject *__pyx_v_identity = 0; + PyObject *__pyx_v_k = 0; + Py_hash_t __pyx_v_h; + Py_ssize_t __pyx_v_i; + Py_ssize_t __pyx_v_rgt; + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_items = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_hash_t __pyx_t_2; + Py_ssize_t __pyx_t_3; + Py_ssize_t __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_t_7; + int __pyx_t_8; + int __pyx_t_9; + __Pyx_RefNannySetupContext("_replace", 0); + + /* "multidict/_multidict.pyx":350 + * + * cdef _replace(self, key, value): + * cdef str identity = self._title(key) # <<<<<<<<<<<<<< + * cdef str k = _str(key) + * cdef Py_hash_t h = hash(identity) + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._title(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 350, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_identity = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":351 + * cdef _replace(self, key, value): + * cdef str identity = self._title(key) + * cdef str k = _str(key) # <<<<<<<<<<<<<< + * cdef Py_hash_t h = hash(identity) + * cdef Py_ssize_t i, rgt + */ + __pyx_t_1 = __pyx_f_9multidict_10_multidict__str(__pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 351, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_k = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":352 + * cdef str identity = self._title(key) + * cdef str k = _str(key) + * cdef Py_hash_t h = hash(identity) # <<<<<<<<<<<<<< + * cdef Py_ssize_t i, rgt + * cdef _Pair item + */ + __pyx_t_2 = PyObject_Hash(__pyx_v_identity); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 352, __pyx_L1_error) + __pyx_v_h = __pyx_t_2; + + /* "multidict/_multidict.pyx":355 + * cdef Py_ssize_t i, rgt + * cdef _Pair item + * cdef list items = self._impl._items # <<<<<<<<<<<<<< + * + * for i in range(len(items)-1, -1, -1): + */ + __pyx_t_1 = __pyx_v_self->__pyx_base._impl->_items; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_items = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":357 + * cdef list items = self._impl._items + * + * for i in range(len(items)-1, -1, -1): # <<<<<<<<<<<<<< + * item = <_Pair>items[i] + * if h != item._hash: + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 357, __pyx_L1_error) + } + __pyx_t_3 = PyList_GET_SIZE(__pyx_v_items); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 357, __pyx_L1_error) + for (__pyx_t_4 = (__pyx_t_3 - 1); __pyx_t_4 > -1L; __pyx_t_4-=1) { + __pyx_v_i = __pyx_t_4; + + /* "multidict/_multidict.pyx":358 + * + * for i in range(len(items)-1, -1, -1): + * item = <_Pair>items[i] # <<<<<<<<<<<<<< + * if h != item._hash: + * continue + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 358, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_List(__pyx_v_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __pyx_t_1; + __Pyx_INCREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_5)); + __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":359 + * for i in range(len(items)-1, -1, -1): + * item = <_Pair>items[i] + * if h != item._hash: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + __pyx_t_6 = ((__pyx_v_h != __pyx_v_item->_hash) != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":360 + * item = <_Pair>items[i] + * if h != item._hash: + * continue # <<<<<<<<<<<<<< + * if item._identity == identity: + * item._key = k + */ + goto __pyx_L3_continue; + + /* "multidict/_multidict.pyx":359 + * for i in range(len(items)-1, -1, -1): + * item = <_Pair>items[i] + * if h != item._hash: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + } + + /* "multidict/_multidict.pyx":361 + * if h != item._hash: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * item._key = k + * item._value = value + */ + __pyx_t_6 = (__Pyx_PyString_Equals(__pyx_v_item->_identity, __pyx_v_identity, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 361, __pyx_L1_error) + __pyx_t_7 = (__pyx_t_6 != 0); + if (__pyx_t_7) { + + /* "multidict/_multidict.pyx":362 + * continue + * if item._identity == identity: + * item._key = k # <<<<<<<<<<<<<< + * item._value = value + * # i points to last found item + */ + __Pyx_INCREF(__pyx_v_k); + __Pyx_GIVEREF(__pyx_v_k); + __Pyx_GOTREF(__pyx_v_item->_key); + __Pyx_DECREF(__pyx_v_item->_key); + __pyx_v_item->_key = __pyx_v_k; + + /* "multidict/_multidict.pyx":363 + * if item._identity == identity: + * item._key = k + * item._value = value # <<<<<<<<<<<<<< + * # i points to last found item + * rgt = i + */ + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_item->_value); + __Pyx_DECREF(__pyx_v_item->_value); + __pyx_v_item->_value = __pyx_v_value; + + /* "multidict/_multidict.pyx":365 + * item._value = value + * # i points to last found item + * rgt = i # <<<<<<<<<<<<<< + * self._impl.incr_version() + * break + */ + __pyx_v_rgt = __pyx_v_i; + + /* "multidict/_multidict.pyx":366 + * # i points to last found item + * rgt = i + * self._impl.incr_version() # <<<<<<<<<<<<<< + * break + * else: + */ + ((struct __pyx_vtabstruct_9multidict_10_multidict__Impl *)__pyx_v_self->__pyx_base._impl->__pyx_vtab)->incr_version(__pyx_v_self->__pyx_base._impl); + + /* "multidict/_multidict.pyx":367 + * rgt = i + * self._impl.incr_version() + * break # <<<<<<<<<<<<<< + * else: + * self._impl._items.append(_Pair.__new__(_Pair, identity, k, value)) + */ + goto __pyx_L4_break; + + /* "multidict/_multidict.pyx":361 + * if h != item._hash: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * item._key = k + * item._value = value + */ + } + __pyx_L3_continue:; + } + /*else*/ { + + /* "multidict/_multidict.pyx":369 + * break + * else: + * self._impl._items.append(_Pair.__new__(_Pair, identity, k, value)) # <<<<<<<<<<<<<< + * self._impl.incr_version() + * return + */ + if (unlikely(__pyx_v_self->__pyx_base._impl->_items == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", "append"); + __PYX_ERR(0, 369, __pyx_L1_error) + } + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 369, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_identity); + __Pyx_GIVEREF(__pyx_v_identity); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_identity); + __Pyx_INCREF(__pyx_v_k); + __Pyx_GIVEREF(__pyx_v_k); + PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_v_k); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_value); + __pyx_t_1 = __pyx_tp_new_9multidict_10_multidict__Pair(((PyTypeObject *)__pyx_ptype_9multidict_10_multidict__Pair), __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 369, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_self->__pyx_base._impl->_items, __pyx_t_1); if (unlikely(__pyx_t_8 == -1)) __PYX_ERR(0, 369, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":370 + * else: + * self._impl._items.append(_Pair.__new__(_Pair, identity, k, value)) + * self._impl.incr_version() # <<<<<<<<<<<<<< + * return + * + */ + ((struct __pyx_vtabstruct_9multidict_10_multidict__Impl *)__pyx_v_self->__pyx_base._impl->__pyx_vtab)->incr_version(__pyx_v_self->__pyx_base._impl); + + /* "multidict/_multidict.pyx":371 + * self._impl._items.append(_Pair.__new__(_Pair, identity, k, value)) + * self._impl.incr_version() + * return # <<<<<<<<<<<<<< + * + * # remove all precending items + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + } + __pyx_L4_break:; + + /* "multidict/_multidict.pyx":374 + * + * # remove all precending items + * i = 0 # <<<<<<<<<<<<<< + * while i < rgt: + * item = <_Pair>items[i] + */ + __pyx_v_i = 0; + + /* "multidict/_multidict.pyx":375 + * # remove all precending items + * i = 0 + * while i < rgt: # <<<<<<<<<<<<<< + * item = <_Pair>items[i] + * if h == item._hash and item._identity == identity: + */ + while (1) { + __pyx_t_7 = ((__pyx_v_i < __pyx_v_rgt) != 0); + if (!__pyx_t_7) break; + + /* "multidict/_multidict.pyx":376 + * i = 0 + * while i < rgt: + * item = <_Pair>items[i] # <<<<<<<<<<<<<< + * if h == item._hash and item._identity == identity: + * del items[i] + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 376, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_List(__pyx_v_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 376, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __pyx_t_1; + __Pyx_INCREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_5)); + __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":377 + * while i < rgt: + * item = <_Pair>items[i] + * if h == item._hash and item._identity == identity: # <<<<<<<<<<<<<< + * del items[i] + * rgt -= 1 + */ + __pyx_t_6 = ((__pyx_v_h == __pyx_v_item->_hash) != 0); + if (__pyx_t_6) { + } else { + __pyx_t_7 = __pyx_t_6; + goto __pyx_L10_bool_binop_done; + } + __pyx_t_6 = (__Pyx_PyString_Equals(__pyx_v_item->_identity, __pyx_v_identity, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 377, __pyx_L1_error) + __pyx_t_9 = (__pyx_t_6 != 0); + __pyx_t_7 = __pyx_t_9; + __pyx_L10_bool_binop_done:; + if (__pyx_t_7) { + + /* "multidict/_multidict.pyx":378 + * item = <_Pair>items[i] + * if h == item._hash and item._identity == identity: + * del items[i] # <<<<<<<<<<<<<< + * rgt -= 1 + * else: + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 378, __pyx_L1_error) + } + if (unlikely(__Pyx_DelItemInt(__pyx_v_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1) < 0)) __PYX_ERR(0, 378, __pyx_L1_error) + + /* "multidict/_multidict.pyx":379 + * if h == item._hash and item._identity == identity: + * del items[i] + * rgt -= 1 # <<<<<<<<<<<<<< + * else: + * i += 1 + */ + __pyx_v_rgt = (__pyx_v_rgt - 1); + + /* "multidict/_multidict.pyx":377 + * while i < rgt: + * item = <_Pair>items[i] + * if h == item._hash and item._identity == identity: # <<<<<<<<<<<<<< + * del items[i] + * rgt -= 1 + */ + goto __pyx_L9; + } + + /* "multidict/_multidict.pyx":381 + * rgt -= 1 + * else: + * i += 1 # <<<<<<<<<<<<<< + * + * def add(self, key, value): + */ + /*else*/ { + __pyx_v_i = (__pyx_v_i + 1); + } + __pyx_L9:; + } + + /* "multidict/_multidict.pyx":349 + * self._impl.incr_version() + * + * cdef _replace(self, key, value): # <<<<<<<<<<<<<< + * cdef str identity = self._title(key) + * cdef str k = _str(key) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("multidict._multidict.MultiDict._replace", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_identity); + __Pyx_XDECREF(__pyx_v_k); + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_items); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":383 + * i += 1 + * + * def add(self, key, value): # <<<<<<<<<<<<<< + * """Add the key and value, not overwriting any previous value.""" + * self._add(key, value) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_5add(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_9MultiDict_4add[] = "Add the key and value, not overwriting any previous value."; +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_5add(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_value = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("add (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_value,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_key)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_value)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("add", 1, 2, 2, 1); __PYX_ERR(0, 383, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "add") < 0)) __PYX_ERR(0, 383, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_key = values[0]; + __pyx_v_value = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("add", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 383, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict.MultiDict.add", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_4add(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self), __pyx_v_key, __pyx_v_value); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_4add(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("add", 0); + + /* "multidict/_multidict.pyx":385 + * def add(self, key, value): + * """Add the key and value, not overwriting any previous value.""" + * self._add(key, value) # <<<<<<<<<<<<<< + * + * def copy(self): + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_add(__pyx_v_self, __pyx_v_key, __pyx_v_value); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 385, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":383 + * i += 1 + * + * def add(self, key, value): # <<<<<<<<<<<<<< + * """Add the key and value, not overwriting any previous value.""" + * self._add(key, value) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict.MultiDict.add", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":387 + * self._add(key, value) + * + * def copy(self): # <<<<<<<<<<<<<< + * """Return a copy of itself.""" + * cls = self.__class__ + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_7copy(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_9MultiDict_6copy[] = "Return a copy of itself."; +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_7copy(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("copy (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_6copy(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_6copy(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self) { + PyObject *__pyx_v_cls = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("copy", 0); + + /* "multidict/_multidict.pyx":389 + * def copy(self): + * """Return a copy of itself.""" + * cls = self.__class__ # <<<<<<<<<<<<<< + * return cls(self) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 389, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":390 + * """Return a copy of itself.""" + * cls = self.__class__ + * return cls(self) # <<<<<<<<<<<<<< + * + * def extend(self, *args, **kwargs): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_cls); + __pyx_t_2 = __pyx_v_cls; __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_3) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, ((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 390, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_v_self)}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 390, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, ((PyObject *)__pyx_v_self)); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 390, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":387 + * self._add(key, value) + * + * def copy(self): # <<<<<<<<<<<<<< + * """Return a copy of itself.""" + * cls = self.__class__ + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict.MultiDict.copy", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_cls); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":392 + * return cls(self) + * + * def extend(self, *args, **kwargs): # <<<<<<<<<<<<<< + * """Extend current MultiDict with more values. + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_9extend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_9MultiDict_8extend[] = "Extend current MultiDict with more values.\n\n This method must be used instead of update.\n "; +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_9extend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + PyObject *__pyx_v_kwargs = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("extend (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "extend", 1))) return NULL; + __pyx_v_kwargs = (__pyx_kwds) ? PyDict_Copy(__pyx_kwds) : PyDict_New(); if (unlikely(!__pyx_v_kwargs)) return NULL; + __Pyx_GOTREF(__pyx_v_kwargs); + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_8extend(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self), __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_8extend(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("extend", 0); + + /* "multidict/_multidict.pyx":397 + * This method must be used instead of update. + * """ + * self._extend(args, kwargs, "extend", True) # <<<<<<<<<<<<<< + * + * def clear(self): + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_extend(__pyx_v_self, __pyx_v_args, __pyx_v_kwargs, __pyx_n_s_extend, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 397, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":392 + * return cls(self) + * + * def extend(self, *args, **kwargs): # <<<<<<<<<<<<<< + * """Extend current MultiDict with more values. + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict.MultiDict.extend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":399 + * self._extend(args, kwargs, "extend", True) + * + * def clear(self): # <<<<<<<<<<<<<< + * """Remove all items from MultiDict""" + * self._impl._items.clear() + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_11clear(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_9MultiDict_10clear[] = "Remove all items from MultiDict"; +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_11clear(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("clear (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_10clear(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_10clear(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("clear", 0); + + /* "multidict/_multidict.pyx":401 + * def clear(self): + * """Remove all items from MultiDict""" + * self._impl._items.clear() # <<<<<<<<<<<<<< + * self._impl.incr_version() + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->__pyx_base._impl->_items, __pyx_n_s_clear); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (__pyx_t_3) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 401, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 401, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":402 + * """Remove all items from MultiDict""" + * self._impl._items.clear() + * self._impl.incr_version() # <<<<<<<<<<<<<< + * + * # MutableMapping interface # + */ + ((struct __pyx_vtabstruct_9multidict_10_multidict__Impl *)__pyx_v_self->__pyx_base._impl->__pyx_vtab)->incr_version(__pyx_v_self->__pyx_base._impl); + + /* "multidict/_multidict.pyx":399 + * self._extend(args, kwargs, "extend", True) + * + * def clear(self): # <<<<<<<<<<<<<< + * """Remove all items from MultiDict""" + * self._impl._items.clear() + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict.MultiDict.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":406 + * # MutableMapping interface # + * + * def __setitem__(self, key, value): # <<<<<<<<<<<<<< + * self._replace(key, value) + * + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_9MultiDict_13__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_9MultiDict_13__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_12__setitem__(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self), ((PyObject *)__pyx_v_key), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_9MultiDict_12__setitem__(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setitem__", 0); + + /* "multidict/_multidict.pyx":407 + * + * def __setitem__(self, key, value): + * self._replace(key, value) # <<<<<<<<<<<<<< + * + * def __delitem__(self, key): + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_replace(__pyx_v_self, __pyx_v_key, __pyx_v_value); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 407, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":406 + * # MutableMapping interface # + * + * def __setitem__(self, key, value): # <<<<<<<<<<<<<< + * self._replace(key, value) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict.MultiDict.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":409 + * self._replace(key, value) + * + * def __delitem__(self, key): # <<<<<<<<<<<<<< + * self._remove(key) + * + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_9MultiDict_15__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_9MultiDict_15__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_14__delitem__(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self), ((PyObject *)__pyx_v_key)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_9MultiDict_14__delitem__(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__delitem__", 0); + + /* "multidict/_multidict.pyx":410 + * + * def __delitem__(self, key): + * self._remove(key) # <<<<<<<<<<<<<< + * + * cdef _remove(self, key): + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_remove(__pyx_v_self, __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 410, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":409 + * self._replace(key, value) + * + * def __delitem__(self, key): # <<<<<<<<<<<<<< + * self._remove(key) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict.MultiDict.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":412 + * self._remove(key) + * + * cdef _remove(self, key): # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef bint found = False + */ + +static PyObject *__pyx_f_9multidict_10_multidict_9MultiDict__remove(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + int __pyx_v_found; + PyObject *__pyx_v_identity = 0; + Py_hash_t __pyx_v_h; + PyObject *__pyx_v_items = 0; + Py_ssize_t __pyx_v_i; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_hash_t __pyx_t_2; + Py_ssize_t __pyx_t_3; + Py_ssize_t __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_t_7; + __Pyx_RefNannySetupContext("_remove", 0); + + /* "multidict/_multidict.pyx":414 + * cdef _remove(self, key): + * cdef _Pair item + * cdef bint found = False # <<<<<<<<<<<<<< + * cdef str identity = self._title(key) + * cdef Py_hash_t h = hash(identity) + */ + __pyx_v_found = 0; + + /* "multidict/_multidict.pyx":415 + * cdef _Pair item + * cdef bint found = False + * cdef str identity = self._title(key) # <<<<<<<<<<<<<< + * cdef Py_hash_t h = hash(identity) + * cdef list items = self._impl._items + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._title(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 415, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_identity = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":416 + * cdef bint found = False + * cdef str identity = self._title(key) + * cdef Py_hash_t h = hash(identity) # <<<<<<<<<<<<<< + * cdef list items = self._impl._items + * for i in range(len(items) - 1, -1, -1): + */ + __pyx_t_2 = PyObject_Hash(__pyx_v_identity); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 416, __pyx_L1_error) + __pyx_v_h = __pyx_t_2; + + /* "multidict/_multidict.pyx":417 + * cdef str identity = self._title(key) + * cdef Py_hash_t h = hash(identity) + * cdef list items = self._impl._items # <<<<<<<<<<<<<< + * for i in range(len(items) - 1, -1, -1): + * item = <_Pair>items[i] + */ + __pyx_t_1 = __pyx_v_self->__pyx_base._impl->_items; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_items = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":418 + * cdef Py_hash_t h = hash(identity) + * cdef list items = self._impl._items + * for i in range(len(items) - 1, -1, -1): # <<<<<<<<<<<<<< + * item = <_Pair>items[i] + * if item._hash != h: + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 418, __pyx_L1_error) + } + __pyx_t_3 = PyList_GET_SIZE(__pyx_v_items); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 418, __pyx_L1_error) + for (__pyx_t_4 = (__pyx_t_3 - 1); __pyx_t_4 > -1L; __pyx_t_4-=1) { + __pyx_v_i = __pyx_t_4; + + /* "multidict/_multidict.pyx":419 + * cdef list items = self._impl._items + * for i in range(len(items) - 1, -1, -1): + * item = <_Pair>items[i] # <<<<<<<<<<<<<< + * if item._hash != h: + * continue + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 419, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_List(__pyx_v_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __pyx_t_1; + __Pyx_INCREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_5)); + __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":420 + * for i in range(len(items) - 1, -1, -1): + * item = <_Pair>items[i] + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + __pyx_t_6 = ((__pyx_v_item->_hash != __pyx_v_h) != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":421 + * item = <_Pair>items[i] + * if item._hash != h: + * continue # <<<<<<<<<<<<<< + * if item._identity == identity: + * del items[i] + */ + goto __pyx_L3_continue; + + /* "multidict/_multidict.pyx":420 + * for i in range(len(items) - 1, -1, -1): + * item = <_Pair>items[i] + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + } + + /* "multidict/_multidict.pyx":422 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * del items[i] + * found = True + */ + __pyx_t_6 = (__Pyx_PyString_Equals(__pyx_v_item->_identity, __pyx_v_identity, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 422, __pyx_L1_error) + __pyx_t_7 = (__pyx_t_6 != 0); + if (__pyx_t_7) { + + /* "multidict/_multidict.pyx":423 + * continue + * if item._identity == identity: + * del items[i] # <<<<<<<<<<<<<< + * found = True + * if not found: + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 423, __pyx_L1_error) + } + if (unlikely(__Pyx_DelItemInt(__pyx_v_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1) < 0)) __PYX_ERR(0, 423, __pyx_L1_error) + + /* "multidict/_multidict.pyx":424 + * if item._identity == identity: + * del items[i] + * found = True # <<<<<<<<<<<<<< + * if not found: + * raise KeyError(key) + */ + __pyx_v_found = 1; + + /* "multidict/_multidict.pyx":422 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * del items[i] + * found = True + */ + } + __pyx_L3_continue:; + } + + /* "multidict/_multidict.pyx":425 + * del items[i] + * found = True + * if not found: # <<<<<<<<<<<<<< + * raise KeyError(key) + * else: + */ + __pyx_t_7 = ((!(__pyx_v_found != 0)) != 0); + if (__pyx_t_7) { + + /* "multidict/_multidict.pyx":426 + * found = True + * if not found: + * raise KeyError(key) # <<<<<<<<<<<<<< + * else: + * self._impl.incr_version() + */ + __pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 426, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_key); + __Pyx_GIVEREF(__pyx_v_key); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_key); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_KeyError, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 426, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 426, __pyx_L1_error) + + /* "multidict/_multidict.pyx":425 + * del items[i] + * found = True + * if not found: # <<<<<<<<<<<<<< + * raise KeyError(key) + * else: + */ + } + + /* "multidict/_multidict.pyx":428 + * raise KeyError(key) + * else: + * self._impl.incr_version() # <<<<<<<<<<<<<< + * + * def setdefault(self, key, default=None): + */ + /*else*/ { + ((struct __pyx_vtabstruct_9multidict_10_multidict__Impl *)__pyx_v_self->__pyx_base._impl->__pyx_vtab)->incr_version(__pyx_v_self->__pyx_base._impl); + } + + /* "multidict/_multidict.pyx":412 + * self._remove(key) + * + * cdef _remove(self, key): # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef bint found = False + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("multidict._multidict.MultiDict._remove", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_identity); + __Pyx_XDECREF(__pyx_v_items); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":430 + * self._impl.incr_version() + * + * def setdefault(self, key, default=None): # <<<<<<<<<<<<<< + * """Return value for key, set value to default if key is not present.""" + * cdef _Pair item + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_17setdefault(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_9MultiDict_16setdefault[] = "Return value for key, set value to default if key is not present."; +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_17setdefault(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_default = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("setdefault (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; + PyObject* values[2] = {0,0}; + values[1] = ((PyObject *)Py_None); + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_key)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (kw_args > 0) { + PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_default); + if (value) { values[1] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "setdefault") < 0)) __PYX_ERR(0, 430, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_key = values[0]; + __pyx_v_default = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("setdefault", 0, 1, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 430, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict.MultiDict.setdefault", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_16setdefault(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self), __pyx_v_key, __pyx_v_default); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_16setdefault(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_identity = 0; + Py_hash_t __pyx_v_h; + PyObject *__pyx_v_items = 0; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_hash_t __pyx_t_2; + Py_ssize_t __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + __Pyx_RefNannySetupContext("setdefault", 0); + + /* "multidict/_multidict.pyx":433 + * """Return value for key, set value to default if key is not present.""" + * cdef _Pair item + * cdef str identity = self._title(key) # <<<<<<<<<<<<<< + * cdef Py_hash_t h = hash(identity) + * cdef list items = self._impl._items + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._title(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 433, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_identity = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":434 + * cdef _Pair item + * cdef str identity = self._title(key) + * cdef Py_hash_t h = hash(identity) # <<<<<<<<<<<<<< + * cdef list items = self._impl._items + * for i in items: + */ + __pyx_t_2 = PyObject_Hash(__pyx_v_identity); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 434, __pyx_L1_error) + __pyx_v_h = __pyx_t_2; + + /* "multidict/_multidict.pyx":435 + * cdef str identity = self._title(key) + * cdef Py_hash_t h = hash(identity) + * cdef list items = self._impl._items # <<<<<<<<<<<<<< + * for i in items: + * item = <_Pair>i + */ + __pyx_t_1 = __pyx_v_self->__pyx_base._impl->_items; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_items = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":436 + * cdef Py_hash_t h = hash(identity) + * cdef list items = self._impl._items + * for i in items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._hash != h: + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 436, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_items; __Pyx_INCREF(__pyx_t_1); __pyx_t_3 = 0; + for (;;) { + if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_3); __Pyx_INCREF(__pyx_t_4); __pyx_t_3++; if (unlikely(0 < 0)) __PYX_ERR(0, 436, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_3); __pyx_t_3++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 436, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":437 + * cdef list items = self._impl._items + * for i in items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * if item._hash != h: + * continue + */ + __pyx_t_4 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":438 + * for i in items: + * item = <_Pair>i + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + __pyx_t_5 = ((__pyx_v_item->_hash != __pyx_v_h) != 0); + if (__pyx_t_5) { + + /* "multidict/_multidict.pyx":439 + * item = <_Pair>i + * if item._hash != h: + * continue # <<<<<<<<<<<<<< + * if item._identity == identity: + * return item._value + */ + goto __pyx_L3_continue; + + /* "multidict/_multidict.pyx":438 + * for i in items: + * item = <_Pair>i + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + } + + /* "multidict/_multidict.pyx":440 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * return item._value + * self._add(key, default) + */ + __pyx_t_5 = (__Pyx_PyString_Equals(__pyx_v_item->_identity, __pyx_v_identity, Py_EQ)); if (unlikely(__pyx_t_5 < 0)) __PYX_ERR(0, 440, __pyx_L1_error) + __pyx_t_6 = (__pyx_t_5 != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":441 + * continue + * if item._identity == identity: + * return item._value # <<<<<<<<<<<<<< + * self._add(key, default) + * return default + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_item->_value); + __pyx_r = __pyx_v_item->_value; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":440 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * return item._value + * self._add(key, default) + */ + } + + /* "multidict/_multidict.pyx":436 + * cdef Py_hash_t h = hash(identity) + * cdef list items = self._impl._items + * for i in items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._hash != h: + */ + __pyx_L3_continue:; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":442 + * if item._identity == identity: + * return item._value + * self._add(key, default) # <<<<<<<<<<<<<< + * return default + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_add(__pyx_v_self, __pyx_v_key, __pyx_v_default); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 442, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":443 + * return item._value + * self._add(key, default) + * return default # <<<<<<<<<<<<<< + * + * def popone(self, key, default=_marker): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_default); + __pyx_r = __pyx_v_default; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":430 + * self._impl.incr_version() + * + * def setdefault(self, key, default=None): # <<<<<<<<<<<<<< + * """Return value for key, set value to default if key is not present.""" + * cdef _Pair item + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict.MultiDict.setdefault", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_identity); + __Pyx_XDECREF(__pyx_v_items); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":445 + * return default + * + * def popone(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Remove the last occurrence of key and return the corresponding + * value. + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_19popone(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_9MultiDict_18popone[] = "Remove the last occurrence of key and return the corresponding\n value.\n\n If key is not found, default is returned if given, otherwise\n KeyError is raised.\n\n "; +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_19popone(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_default = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("popone (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; + PyObject* values[2] = {0,0}; + values[1] = __pyx_k__6; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_key)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (kw_args > 0) { + PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_default); + if (value) { values[1] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "popone") < 0)) __PYX_ERR(0, 445, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_key = values[0]; + __pyx_v_default = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("popone", 0, 1, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 445, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict.MultiDict.popone", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_18popone(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self), __pyx_v_key, __pyx_v_default); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_18popone(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default) { + PyObject *__pyx_v_value = 0; + PyObject *__pyx_v_identity = 0; + Py_hash_t __pyx_v_h; + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_items = 0; + Py_ssize_t __pyx_v_i; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_hash_t __pyx_t_2; + Py_ssize_t __pyx_t_3; + Py_ssize_t __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_t_7; + __Pyx_RefNannySetupContext("popone", 0); + + /* "multidict/_multidict.pyx":453 + * + * """ + * cdef object value = None # <<<<<<<<<<<<<< + * cdef str identity = self._title(key) + * cdef Py_hash_t h = hash(identity) + */ + __Pyx_INCREF(Py_None); + __pyx_v_value = Py_None; + + /* "multidict/_multidict.pyx":454 + * """ + * cdef object value = None + * cdef str identity = self._title(key) # <<<<<<<<<<<<<< + * cdef Py_hash_t h = hash(identity) + * cdef _Pair item + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._title(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 454, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_identity = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":455 + * cdef object value = None + * cdef str identity = self._title(key) + * cdef Py_hash_t h = hash(identity) # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef list items = self._impl._items + */ + __pyx_t_2 = PyObject_Hash(__pyx_v_identity); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 455, __pyx_L1_error) + __pyx_v_h = __pyx_t_2; + + /* "multidict/_multidict.pyx":457 + * cdef Py_hash_t h = hash(identity) + * cdef _Pair item + * cdef list items = self._impl._items # <<<<<<<<<<<<<< + * for i in range(len(items)): + * item = <_Pair>items[i] + */ + __pyx_t_1 = __pyx_v_self->__pyx_base._impl->_items; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_items = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":458 + * cdef _Pair item + * cdef list items = self._impl._items + * for i in range(len(items)): # <<<<<<<<<<<<<< + * item = <_Pair>items[i] + * if item._hash != h: + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 458, __pyx_L1_error) + } + __pyx_t_3 = PyList_GET_SIZE(__pyx_v_items); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 458, __pyx_L1_error) + for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) { + __pyx_v_i = __pyx_t_4; + + /* "multidict/_multidict.pyx":459 + * cdef list items = self._impl._items + * for i in range(len(items)): + * item = <_Pair>items[i] # <<<<<<<<<<<<<< + * if item._hash != h: + * continue + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 459, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_List(__pyx_v_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 459, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __pyx_t_1; + __Pyx_INCREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_5)); + __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":460 + * for i in range(len(items)): + * item = <_Pair>items[i] + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + __pyx_t_6 = ((__pyx_v_item->_hash != __pyx_v_h) != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":461 + * item = <_Pair>items[i] + * if item._hash != h: + * continue # <<<<<<<<<<<<<< + * if item._identity == identity: + * value = item._value + */ + goto __pyx_L3_continue; + + /* "multidict/_multidict.pyx":460 + * for i in range(len(items)): + * item = <_Pair>items[i] + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + } + + /* "multidict/_multidict.pyx":462 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * value = item._value + * del items[i] + */ + __pyx_t_6 = (__Pyx_PyString_Equals(__pyx_v_item->_identity, __pyx_v_identity, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 462, __pyx_L1_error) + __pyx_t_7 = (__pyx_t_6 != 0); + if (__pyx_t_7) { + + /* "multidict/_multidict.pyx":463 + * continue + * if item._identity == identity: + * value = item._value # <<<<<<<<<<<<<< + * del items[i] + * self._impl.incr_version() + */ + __pyx_t_5 = __pyx_v_item->_value; + __Pyx_INCREF(__pyx_t_5); + __Pyx_DECREF_SET(__pyx_v_value, __pyx_t_5); + __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":464 + * if item._identity == identity: + * value = item._value + * del items[i] # <<<<<<<<<<<<<< + * self._impl.incr_version() + * return value + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 464, __pyx_L1_error) + } + if (unlikely(__Pyx_DelItemInt(__pyx_v_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1) < 0)) __PYX_ERR(0, 464, __pyx_L1_error) + + /* "multidict/_multidict.pyx":465 + * value = item._value + * del items[i] + * self._impl.incr_version() # <<<<<<<<<<<<<< + * return value + * if default is _marker: + */ + ((struct __pyx_vtabstruct_9multidict_10_multidict__Impl *)__pyx_v_self->__pyx_base._impl->__pyx_vtab)->incr_version(__pyx_v_self->__pyx_base._impl); + + /* "multidict/_multidict.pyx":466 + * del items[i] + * self._impl.incr_version() + * return value # <<<<<<<<<<<<<< + * if default is _marker: + * raise KeyError(key) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_value); + __pyx_r = __pyx_v_value; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":462 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * value = item._value + * del items[i] + */ + } + __pyx_L3_continue:; + } + + /* "multidict/_multidict.pyx":467 + * self._impl.incr_version() + * return value + * if default is _marker: # <<<<<<<<<<<<<< + * raise KeyError(key) + * else: + */ + __pyx_t_7 = (__pyx_v_default == __pyx_v_9multidict_10_multidict__marker); + __pyx_t_6 = (__pyx_t_7 != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":468 + * return value + * if default is _marker: + * raise KeyError(key) # <<<<<<<<<<<<<< + * else: + * return default + */ + __pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 468, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_key); + __Pyx_GIVEREF(__pyx_v_key); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_key); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_KeyError, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 468, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 468, __pyx_L1_error) + + /* "multidict/_multidict.pyx":467 + * self._impl.incr_version() + * return value + * if default is _marker: # <<<<<<<<<<<<<< + * raise KeyError(key) + * else: + */ + } + + /* "multidict/_multidict.pyx":470 + * raise KeyError(key) + * else: + * return default # <<<<<<<<<<<<<< + * + * pop = popone + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_default); + __pyx_r = __pyx_v_default; + goto __pyx_L0; + } + + /* "multidict/_multidict.pyx":445 + * return default + * + * def popone(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Remove the last occurrence of key and return the corresponding + * value. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("multidict._multidict.MultiDict.popone", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_value); + __Pyx_XDECREF(__pyx_v_identity); + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_items); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":474 + * pop = popone + * + * def popall(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Remove all occurrences of key and return the list of corresponding + * values. + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_21popall(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_9MultiDict_20popall[] = "Remove all occurrences of key and return the list of corresponding\n values.\n\n If key is not found, default is returned if given, otherwise\n KeyError is raised.\n\n "; +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_21popall(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_default = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("popall (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_default,0}; + PyObject* values[2] = {0,0}; + values[1] = __pyx_k__7; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_key)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (kw_args > 0) { + PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_default); + if (value) { values[1] = value; kw_args--; } + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "popall") < 0)) __PYX_ERR(0, 474, __pyx_L3_error) + } + } else { + switch (PyTuple_GET_SIZE(__pyx_args)) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + break; + default: goto __pyx_L5_argtuple_error; + } + } + __pyx_v_key = values[0]; + __pyx_v_default = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("popall", 0, 1, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 474, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict.MultiDict.popall", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_20popall(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self), __pyx_v_key, __pyx_v_default); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_20popall(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_default) { + int __pyx_v_found; + PyObject *__pyx_v_identity = 0; + Py_hash_t __pyx_v_h; + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_items = 0; + PyObject *__pyx_v_ret = 0; + Py_ssize_t __pyx_v_i; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_hash_t __pyx_t_2; + Py_ssize_t __pyx_t_3; + Py_ssize_t __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_t_7; + int __pyx_t_8; + __Pyx_RefNannySetupContext("popall", 0); + + /* "multidict/_multidict.pyx":482 + * + * """ + * cdef bint found = False # <<<<<<<<<<<<<< + * cdef str identity = self._title(key) + * cdef Py_hash_t h = hash(identity) + */ + __pyx_v_found = 0; + + /* "multidict/_multidict.pyx":483 + * """ + * cdef bint found = False + * cdef str identity = self._title(key) # <<<<<<<<<<<<<< + * cdef Py_hash_t h = hash(identity) + * cdef _Pair item + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._title(((struct __pyx_obj_9multidict_10_multidict__Base *)__pyx_v_self), __pyx_v_key); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 483, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_identity = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":484 + * cdef bint found = False + * cdef str identity = self._title(key) + * cdef Py_hash_t h = hash(identity) # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef list items = self._impl._items + */ + __pyx_t_2 = PyObject_Hash(__pyx_v_identity); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 484, __pyx_L1_error) + __pyx_v_h = __pyx_t_2; + + /* "multidict/_multidict.pyx":486 + * cdef Py_hash_t h = hash(identity) + * cdef _Pair item + * cdef list items = self._impl._items # <<<<<<<<<<<<<< + * cdef list ret = [] + * for i in range(len(items)-1, -1, -1): + */ + __pyx_t_1 = __pyx_v_self->__pyx_base._impl->_items; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_items = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":487 + * cdef _Pair item + * cdef list items = self._impl._items + * cdef list ret = [] # <<<<<<<<<<<<<< + * for i in range(len(items)-1, -1, -1): + * item = <_Pair>items[i] + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 487, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_ret = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":488 + * cdef list items = self._impl._items + * cdef list ret = [] + * for i in range(len(items)-1, -1, -1): # <<<<<<<<<<<<<< + * item = <_Pair>items[i] + * if item._hash != h: + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 488, __pyx_L1_error) + } + __pyx_t_3 = PyList_GET_SIZE(__pyx_v_items); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 488, __pyx_L1_error) + for (__pyx_t_4 = (__pyx_t_3 - 1); __pyx_t_4 > -1L; __pyx_t_4-=1) { + __pyx_v_i = __pyx_t_4; + + /* "multidict/_multidict.pyx":489 + * cdef list ret = [] + * for i in range(len(items)-1, -1, -1): + * item = <_Pair>items[i] # <<<<<<<<<<<<<< + * if item._hash != h: + * continue + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 489, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_List(__pyx_v_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 489, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __pyx_t_1; + __Pyx_INCREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_5)); + __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":490 + * for i in range(len(items)-1, -1, -1): + * item = <_Pair>items[i] + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + __pyx_t_6 = ((__pyx_v_item->_hash != __pyx_v_h) != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":491 + * item = <_Pair>items[i] + * if item._hash != h: + * continue # <<<<<<<<<<<<<< + * if item._identity == identity: + * ret.append(item._value) + */ + goto __pyx_L3_continue; + + /* "multidict/_multidict.pyx":490 + * for i in range(len(items)-1, -1, -1): + * item = <_Pair>items[i] + * if item._hash != h: # <<<<<<<<<<<<<< + * continue + * if item._identity == identity: + */ + } + + /* "multidict/_multidict.pyx":492 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * ret.append(item._value) + * del items[i] + */ + __pyx_t_6 = (__Pyx_PyString_Equals(__pyx_v_item->_identity, __pyx_v_identity, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 492, __pyx_L1_error) + __pyx_t_7 = (__pyx_t_6 != 0); + if (__pyx_t_7) { + + /* "multidict/_multidict.pyx":493 + * continue + * if item._identity == identity: + * ret.append(item._value) # <<<<<<<<<<<<<< + * del items[i] + * self._impl.incr_version() + */ + __pyx_t_5 = __pyx_v_item->_value; + __Pyx_INCREF(__pyx_t_5); + __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_ret, __pyx_t_5); if (unlikely(__pyx_t_8 == -1)) __PYX_ERR(0, 493, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":494 + * if item._identity == identity: + * ret.append(item._value) + * del items[i] # <<<<<<<<<<<<<< + * self._impl.incr_version() + * found = True + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 494, __pyx_L1_error) + } + if (unlikely(__Pyx_DelItemInt(__pyx_v_items, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 1, 1, 1) < 0)) __PYX_ERR(0, 494, __pyx_L1_error) + + /* "multidict/_multidict.pyx":495 + * ret.append(item._value) + * del items[i] + * self._impl.incr_version() # <<<<<<<<<<<<<< + * found = True + * if not found: + */ + ((struct __pyx_vtabstruct_9multidict_10_multidict__Impl *)__pyx_v_self->__pyx_base._impl->__pyx_vtab)->incr_version(__pyx_v_self->__pyx_base._impl); + + /* "multidict/_multidict.pyx":496 + * del items[i] + * self._impl.incr_version() + * found = True # <<<<<<<<<<<<<< + * if not found: + * if default is _marker: + */ + __pyx_v_found = 1; + + /* "multidict/_multidict.pyx":492 + * if item._hash != h: + * continue + * if item._identity == identity: # <<<<<<<<<<<<<< + * ret.append(item._value) + * del items[i] + */ + } + __pyx_L3_continue:; + } + + /* "multidict/_multidict.pyx":497 + * self._impl.incr_version() + * found = True + * if not found: # <<<<<<<<<<<<<< + * if default is _marker: + * raise KeyError(key) + */ + __pyx_t_7 = ((!(__pyx_v_found != 0)) != 0); + if (__pyx_t_7) { + + /* "multidict/_multidict.pyx":498 + * found = True + * if not found: + * if default is _marker: # <<<<<<<<<<<<<< + * raise KeyError(key) + * else: + */ + __pyx_t_7 = (__pyx_v_default == __pyx_v_9multidict_10_multidict__marker); + __pyx_t_6 = (__pyx_t_7 != 0); + if (__pyx_t_6) { + + /* "multidict/_multidict.pyx":499 + * if not found: + * if default is _marker: + * raise KeyError(key) # <<<<<<<<<<<<<< + * else: + * return default + */ + __pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 499, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_key); + __Pyx_GIVEREF(__pyx_v_key); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_key); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_KeyError, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 499, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 499, __pyx_L1_error) + + /* "multidict/_multidict.pyx":498 + * found = True + * if not found: + * if default is _marker: # <<<<<<<<<<<<<< + * raise KeyError(key) + * else: + */ + } + + /* "multidict/_multidict.pyx":501 + * raise KeyError(key) + * else: + * return default # <<<<<<<<<<<<<< + * else: + * ret.reverse() + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_default); + __pyx_r = __pyx_v_default; + goto __pyx_L0; + } + + /* "multidict/_multidict.pyx":497 + * self._impl.incr_version() + * found = True + * if not found: # <<<<<<<<<<<<<< + * if default is _marker: + * raise KeyError(key) + */ + } + + /* "multidict/_multidict.pyx":503 + * return default + * else: + * ret.reverse() # <<<<<<<<<<<<<< + * return ret + * + */ + /*else*/ { + __pyx_t_8 = PyList_Reverse(__pyx_v_ret); if (unlikely(__pyx_t_8 == -1)) __PYX_ERR(0, 503, __pyx_L1_error) + + /* "multidict/_multidict.pyx":504 + * else: + * ret.reverse() + * return ret # <<<<<<<<<<<<<< + * + * def popitem(self): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_ret); + __pyx_r = __pyx_v_ret; + goto __pyx_L0; + } + + /* "multidict/_multidict.pyx":474 + * pop = popone + * + * def popall(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Remove all occurrences of key and return the list of corresponding + * values. + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("multidict._multidict.MultiDict.popall", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_identity); + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_items); + __Pyx_XDECREF(__pyx_v_ret); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":506 + * return ret + * + * def popitem(self): # <<<<<<<<<<<<<< + * """Remove and return an arbitrary (key, value) pair.""" + * cdef _Pair item + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_23popitem(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_9MultiDict_22popitem[] = "Remove and return an arbitrary (key, value) pair."; +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_23popitem(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("popitem (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_22popitem(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_22popitem(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_items = 0; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("popitem", 0); + + /* "multidict/_multidict.pyx":509 + * """Remove and return an arbitrary (key, value) pair.""" + * cdef _Pair item + * cdef list items = self._impl._items # <<<<<<<<<<<<<< + * if items: + * item = <_Pair>items.pop(0) + */ + __pyx_t_1 = __pyx_v_self->__pyx_base._impl->_items; + __Pyx_INCREF(__pyx_t_1); + __pyx_v_items = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":510 + * cdef _Pair item + * cdef list items = self._impl._items + * if items: # <<<<<<<<<<<<<< + * item = <_Pair>items.pop(0) + * self._impl.incr_version() + */ + __pyx_t_2 = (__pyx_v_items != Py_None) && (PyList_GET_SIZE(__pyx_v_items) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":511 + * cdef list items = self._impl._items + * if items: + * item = <_Pair>items.pop(0) # <<<<<<<<<<<<<< + * self._impl.incr_version() + * return (item._key, item._value) + */ + if (unlikely(__pyx_v_items == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", "pop"); + __PYX_ERR(0, 511, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_PyList_PopIndex(__pyx_v_items, __pyx_int_0, 0, 1, Py_ssize_t, PyInt_FromSsize_t); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 511, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; + __Pyx_INCREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_item = ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":512 + * if items: + * item = <_Pair>items.pop(0) + * self._impl.incr_version() # <<<<<<<<<<<<<< + * return (item._key, item._value) + * else: + */ + ((struct __pyx_vtabstruct_9multidict_10_multidict__Impl *)__pyx_v_self->__pyx_base._impl->__pyx_vtab)->incr_version(__pyx_v_self->__pyx_base._impl); + + /* "multidict/_multidict.pyx":513 + * item = <_Pair>items.pop(0) + * self._impl.incr_version() + * return (item._key, item._value) # <<<<<<<<<<<<<< + * else: + * raise KeyError("empty multidict") + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 513, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_item->_key); + __Pyx_GIVEREF(__pyx_v_item->_key); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_item->_key); + __Pyx_INCREF(__pyx_v_item->_value); + __Pyx_GIVEREF(__pyx_v_item->_value); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_item->_value); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":510 + * cdef _Pair item + * cdef list items = self._impl._items + * if items: # <<<<<<<<<<<<<< + * item = <_Pair>items.pop(0) + * self._impl.incr_version() + */ + } + + /* "multidict/_multidict.pyx":515 + * return (item._key, item._value) + * else: + * raise KeyError("empty multidict") # <<<<<<<<<<<<<< + * + * def update(self, *args, **kwargs): + */ + /*else*/ { + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_KeyError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 515, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(0, 515, __pyx_L1_error) + } + + /* "multidict/_multidict.pyx":506 + * return ret + * + * def popitem(self): # <<<<<<<<<<<<<< + * """Remove and return an arbitrary (key, value) pair.""" + * cdef _Pair item + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict.MultiDict.popitem", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_items); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":517 + * raise KeyError("empty multidict") + * + * def update(self, *args, **kwargs): # <<<<<<<<<<<<<< + * """Update the dictionary from *other*, overwriting existing keys.""" + * self._extend(args, kwargs, "update", False) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_25update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_9MultiDict_24update[] = "Update the dictionary from *other*, overwriting existing keys."; +static PyObject *__pyx_pw_9multidict_10_multidict_9MultiDict_25update(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + PyObject *__pyx_v_kwargs = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("update (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "update", 1))) return NULL; + __pyx_v_kwargs = (__pyx_kwds) ? PyDict_Copy(__pyx_kwds) : PyDict_New(); if (unlikely(!__pyx_v_kwargs)) return NULL; + __Pyx_GOTREF(__pyx_v_kwargs); + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_9multidict_10_multidict_9MultiDict_24update(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self), __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9MultiDict_24update(struct __pyx_obj_9multidict_10_multidict_MultiDict *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("update", 0); + + /* "multidict/_multidict.pyx":519 + * def update(self, *args, **kwargs): + * """Update the dictionary from *other*, overwriting existing keys.""" + * self._extend(args, kwargs, "update", False) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict *)__pyx_v_self->__pyx_base.__pyx_vtab)->_extend(__pyx_v_self, __pyx_v_args, __pyx_v_kwargs, __pyx_n_s_update, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 519, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":517 + * raise KeyError("empty multidict") + * + * def update(self, *args, **kwargs): # <<<<<<<<<<<<<< + * """Update the dictionary from *other*, overwriting existing keys.""" + * self._extend(args, kwargs, "update", False) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict.MultiDict.update", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":528 + * """An ordered dictionary that can have multiple values for each key.""" + * + * def __init__(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._impl = _Impl() + * + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_11CIMultiDict_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_11CIMultiDict_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + PyObject *__pyx_v_kwargs = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__init__", 1))) return -1; + __pyx_v_kwargs = (__pyx_kwds) ? PyDict_Copy(__pyx_kwds) : PyDict_New(); if (unlikely(!__pyx_v_kwargs)) return -1; + __Pyx_GOTREF(__pyx_v_kwargs); + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_9multidict_10_multidict_11CIMultiDict___init__(((struct __pyx_obj_9multidict_10_multidict_CIMultiDict *)__pyx_v_self), __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_11CIMultiDict___init__(struct __pyx_obj_9multidict_10_multidict_CIMultiDict *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "multidict/_multidict.pyx":529 + * + * def __init__(self, *args, **kwargs): + * self._impl = _Impl() # <<<<<<<<<<<<<< + * + * self._extend(args, kwargs, 'CIMultiDict', True) + */ + __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_9multidict_10_multidict__Impl), __pyx_empty_tuple, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 529, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->__pyx_base.__pyx_base._impl); + __Pyx_DECREF(((PyObject *)__pyx_v_self->__pyx_base.__pyx_base._impl)); + __pyx_v_self->__pyx_base.__pyx_base._impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":531 + * self._impl = _Impl() + * + * self._extend(args, kwargs, 'CIMultiDict', True) # <<<<<<<<<<<<<< + * + * cdef str _title(self, s): + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9multidict_10_multidict_CIMultiDict *)__pyx_v_self->__pyx_base.__pyx_base.__pyx_vtab)->__pyx_base._extend(((struct __pyx_obj_9multidict_10_multidict_MultiDict *)__pyx_v_self), __pyx_v_args, __pyx_v_kwargs, __pyx_n_s_CIMultiDict, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 531, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":528 + * """An ordered dictionary that can have multiple values for each key.""" + * + * def __init__(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._impl = _Impl() + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict.CIMultiDict.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":533 + * self._extend(args, kwargs, 'CIMultiDict', True) + * + * cdef str _title(self, s): # <<<<<<<<<<<<<< + * typ = type(s) + * if typ is str: + */ + +static PyObject *__pyx_f_9multidict_10_multidict_11CIMultiDict__title(CYTHON_UNUSED struct __pyx_obj_9multidict_10_multidict_CIMultiDict *__pyx_v_self, PyObject *__pyx_v_s) { + PyTypeObject *__pyx_v_typ = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("_title", 0); + + /* "multidict/_multidict.pyx":534 + * + * cdef str _title(self, s): + * typ = type(s) # <<<<<<<<<<<<<< + * if typ is str: + * return (s.title()) + */ + __Pyx_INCREF(((PyObject *)Py_TYPE(__pyx_v_s))); + __pyx_v_typ = ((PyTypeObject*)((PyObject *)Py_TYPE(__pyx_v_s))); + + /* "multidict/_multidict.pyx":535 + * cdef str _title(self, s): + * typ = type(s) + * if typ is str: # <<<<<<<<<<<<<< + * return (s.title()) + * elif type(s) is _istr: + */ + __pyx_t_1 = (__pyx_v_typ == (&PyString_Type)); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":536 + * typ = type(s) + * if typ is str: + * return (s.title()) # <<<<<<<<<<<<<< + * elif type(s) is _istr: + * return PyObject_Str(s) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_s, __pyx_n_s_title); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 536, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 536, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else { + __pyx_t_3 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 536, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_INCREF(((PyObject*)__pyx_t_3)); + __pyx_r = ((PyObject*)__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":535 + * cdef str _title(self, s): + * typ = type(s) + * if typ is str: # <<<<<<<<<<<<<< + * return (s.title()) + * elif type(s) is _istr: + */ + } + + /* "multidict/_multidict.pyx":537 + * if typ is str: + * return (s.title()) + * elif type(s) is _istr: # <<<<<<<<<<<<<< + * return PyObject_Str(s) + * return s.title() + */ + __pyx_t_2 = (((PyObject *)Py_TYPE(__pyx_v_s)) == __pyx_v_9multidict_10_multidict__istr); + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":538 + * return (s.title()) + * elif type(s) is _istr: + * return PyObject_Str(s) # <<<<<<<<<<<<<< + * return s.title() + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyObject_Str(__pyx_v_s); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 538, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyString_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 538, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":537 + * if typ is str: + * return (s.title()) + * elif type(s) is _istr: # <<<<<<<<<<<<<< + * return PyObject_Str(s) + * return s.title() + */ + } + + /* "multidict/_multidict.pyx":539 + * elif type(s) is _istr: + * return PyObject_Str(s) + * return s.title() # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_s, __pyx_n_s_title); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 539, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 539, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else { + __pyx_t_3 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 539, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 539, __pyx_L1_error) + __pyx_r = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":533 + * self._extend(args, kwargs, 'CIMultiDict', True) + * + * cdef str _title(self, s): # <<<<<<<<<<<<<< + * typ = type(s) + * if typ is str: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("multidict._multidict.CIMultiDict._title", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_typ); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":549 + * cdef _Impl _impl + * + * def __cinit__(self, _Impl impl): # <<<<<<<<<<<<<< + * self._impl = impl + * + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_9_ViewBase_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_9_ViewBase_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_impl,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_impl)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__cinit__") < 0)) __PYX_ERR(0, 549, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)values[0]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 549, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict._ViewBase.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_impl), __pyx_ptype_9multidict_10_multidict__Impl, 1, "impl", 0))) __PYX_ERR(0, 549, __pyx_L1_error) + __pyx_r = __pyx_pf_9multidict_10_multidict_9_ViewBase___cinit__(((struct __pyx_obj_9multidict_10_multidict__ViewBase *)__pyx_v_self), __pyx_v_impl); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_9_ViewBase___cinit__(struct __pyx_obj_9multidict_10_multidict__ViewBase *__pyx_v_self, struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__", 0); + + /* "multidict/_multidict.pyx":550 + * + * def __cinit__(self, _Impl impl): + * self._impl = impl # <<<<<<<<<<<<<< + * + * def __len__(self): + */ + __Pyx_INCREF(((PyObject *)__pyx_v_impl)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_impl)); + __Pyx_GOTREF(__pyx_v_self->_impl); + __Pyx_DECREF(((PyObject *)__pyx_v_self->_impl)); + __pyx_v_self->_impl = __pyx_v_impl; + + /* "multidict/_multidict.pyx":549 + * cdef _Impl _impl + * + * def __cinit__(self, _Impl impl): # <<<<<<<<<<<<<< + * self._impl = impl + * + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":552 + * self._impl = impl + * + * def __len__(self): # <<<<<<<<<<<<<< + * return len(self._impl._items) + * + */ + +/* Python wrapper */ +static Py_ssize_t __pyx_pw_9multidict_10_multidict_9_ViewBase_3__len__(PyObject *__pyx_v_self); /*proto*/ +static Py_ssize_t __pyx_pw_9multidict_10_multidict_9_ViewBase_3__len__(PyObject *__pyx_v_self) { + Py_ssize_t __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9_ViewBase_2__len__(((struct __pyx_obj_9multidict_10_multidict__ViewBase *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static Py_ssize_t __pyx_pf_9multidict_10_multidict_9_ViewBase_2__len__(struct __pyx_obj_9multidict_10_multidict__ViewBase *__pyx_v_self) { + Py_ssize_t __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + __Pyx_RefNannySetupContext("__len__", 0); + + /* "multidict/_multidict.pyx":553 + * + * def __len__(self): + * return len(self._impl._items) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __pyx_v_self->_impl->_items; + __Pyx_INCREF(__pyx_t_1); + if (unlikely(__pyx_t_1 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 553, __pyx_L1_error) + } + __pyx_t_2 = PyList_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 553, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":552 + * self._impl = impl + * + * def __len__(self): # <<<<<<<<<<<<<< + * return len(self._impl._items) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict._ViewBase.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":558 + * cdef class _ViewBaseSet(_ViewBase): + * + * def __richcmp__(self, other, op): # <<<<<<<<<<<<<< + * if op == 0: # < + * if not isinstance(other, Set): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_12_ViewBaseSet_1__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_12_ViewBaseSet_1__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op) { + PyObject *__pyx_v_op = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__richcmp__ (wrapper)", 0); + __pyx_v_op = __Pyx_PyInt_From_int(__pyx_arg_op); if (unlikely(!__pyx_v_op)) __PYX_ERR(0, 558, __pyx_L3_error) + __Pyx_GOTREF(__pyx_v_op); + goto __pyx_L4_argument_unpacking_done; + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict._ViewBaseSet.__richcmp__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9multidict_10_multidict_12_ViewBaseSet___richcmp__(((PyObject *)__pyx_v_self), ((PyObject *)__pyx_v_other), ((PyObject *)__pyx_v_op)); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_op); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_12_ViewBaseSet___richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, PyObject *__pyx_v_op) { + PyObject *__pyx_v_elem = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + Py_ssize_t __pyx_t_4; + Py_ssize_t __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *(*__pyx_t_7)(PyObject *); + __Pyx_RefNannySetupContext("__richcmp__", 0); + + /* "multidict/_multidict.pyx":559 + * + * def __richcmp__(self, other, op): + * if op == 0: # < # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * return NotImplemented + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_0, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 559, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 559, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":560 + * def __richcmp__(self, other, op): + * if op == 0: # < + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * return NotImplemented + * return len(self) < len(other) and self <= other + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 560, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 560, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":561 + * if op == 0: # < + * if not isinstance(other, Set): + * return NotImplemented # <<<<<<<<<<<<<< + * return len(self) < len(other) and self <= other + * elif op == 1: # <= + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":560 + * def __richcmp__(self, other, op): + * if op == 0: # < + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * return NotImplemented + * return len(self) < len(other) and self <= other + */ + } + + /* "multidict/_multidict.pyx":562 + * if not isinstance(other, Set): + * return NotImplemented + * return len(self) < len(other) and self <= other # <<<<<<<<<<<<<< + * elif op == 1: # <= + * if not isinstance(other, Set): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyObject_Length(__pyx_v_self); if (unlikely(__pyx_t_4 == -1)) __PYX_ERR(0, 562, __pyx_L1_error) + __pyx_t_5 = PyObject_Length(__pyx_v_other); if (unlikely(__pyx_t_5 == -1)) __PYX_ERR(0, 562, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_4 < __pyx_t_5); + if (__pyx_t_3) { + } else { + __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 562, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L5_bool_binop_done; + } + __pyx_t_6 = PyObject_RichCompare(__pyx_v_self, __pyx_v_other, Py_LE); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 562, __pyx_L1_error) + __Pyx_INCREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_L5_bool_binop_done:; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":559 + * + * def __richcmp__(self, other, op): + * if op == 0: # < # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * return NotImplemented + */ + } + + /* "multidict/_multidict.pyx":563 + * return NotImplemented + * return len(self) < len(other) and self <= other + * elif op == 1: # <= # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * return NotImplemented + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 563, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 563, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":564 + * return len(self) < len(other) and self <= other + * elif op == 1: # <= + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * return NotImplemented + * if len(self) > len(other): + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 564, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 564, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = ((!(__pyx_t_3 != 0)) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":565 + * elif op == 1: # <= + * if not isinstance(other, Set): + * return NotImplemented # <<<<<<<<<<<<<< + * if len(self) > len(other): + * return False + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":564 + * return len(self) < len(other) and self <= other + * elif op == 1: # <= + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * return NotImplemented + * if len(self) > len(other): + */ + } + + /* "multidict/_multidict.pyx":566 + * if not isinstance(other, Set): + * return NotImplemented + * if len(self) > len(other): # <<<<<<<<<<<<<< + * return False + * for elem in self: + */ + __pyx_t_5 = PyObject_Length(__pyx_v_self); if (unlikely(__pyx_t_5 == -1)) __PYX_ERR(0, 566, __pyx_L1_error) + __pyx_t_4 = PyObject_Length(__pyx_v_other); if (unlikely(__pyx_t_4 == -1)) __PYX_ERR(0, 566, __pyx_L1_error) + __pyx_t_2 = ((__pyx_t_5 > __pyx_t_4) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":567 + * return NotImplemented + * if len(self) > len(other): + * return False # <<<<<<<<<<<<<< + * for elem in self: + * if elem not in other: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":566 + * if not isinstance(other, Set): + * return NotImplemented + * if len(self) > len(other): # <<<<<<<<<<<<<< + * return False + * for elem in self: + */ + } + + /* "multidict/_multidict.pyx":568 + * if len(self) > len(other): + * return False + * for elem in self: # <<<<<<<<<<<<<< + * if elem not in other: + * return False + */ + if (likely(PyList_CheckExact(__pyx_v_self)) || PyTuple_CheckExact(__pyx_v_self)) { + __pyx_t_1 = __pyx_v_self; __Pyx_INCREF(__pyx_t_1); __pyx_t_4 = 0; + __pyx_t_7 = NULL; + } else { + __pyx_t_4 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 568, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 568, __pyx_L1_error) + } + for (;;) { + if (likely(!__pyx_t_7)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_6); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 568, __pyx_L1_error) + #else + __pyx_t_6 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 568, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } else { + if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_4); __Pyx_INCREF(__pyx_t_6); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 568, __pyx_L1_error) + #else + __pyx_t_6 = PySequence_ITEM(__pyx_t_1, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 568, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } + } else { + __pyx_t_6 = __pyx_t_7(__pyx_t_1); + if (unlikely(!__pyx_t_6)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 568, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_6); + } + __Pyx_XDECREF_SET(__pyx_v_elem, __pyx_t_6); + __pyx_t_6 = 0; + + /* "multidict/_multidict.pyx":569 + * return False + * for elem in self: + * if elem not in other: # <<<<<<<<<<<<<< + * return False + * return True + */ + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_v_elem, __pyx_v_other, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 569, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":570 + * for elem in self: + * if elem not in other: + * return False # <<<<<<<<<<<<<< + * return True + * elif op == 2: # == + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":569 + * return False + * for elem in self: + * if elem not in other: # <<<<<<<<<<<<<< + * return False + * return True + */ + } + + /* "multidict/_multidict.pyx":568 + * if len(self) > len(other): + * return False + * for elem in self: # <<<<<<<<<<<<<< + * if elem not in other: + * return False + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":571 + * if elem not in other: + * return False + * return True # <<<<<<<<<<<<<< + * elif op == 2: # == + * if not isinstance(other, Set): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":563 + * return NotImplemented + * return len(self) < len(other) and self <= other + * elif op == 1: # <= # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * return NotImplemented + */ + } + + /* "multidict/_multidict.pyx":572 + * return False + * return True + * elif op == 2: # == # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * return NotImplemented + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_2, 2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 572, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 572, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":573 + * return True + * elif op == 2: # == + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * return NotImplemented + * return len(self) == len(other) and self <= other + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 573, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 573, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = ((!(__pyx_t_3 != 0)) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":574 + * elif op == 2: # == + * if not isinstance(other, Set): + * return NotImplemented # <<<<<<<<<<<<<< + * return len(self) == len(other) and self <= other + * elif op == 3: # != + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":573 + * return True + * elif op == 2: # == + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * return NotImplemented + * return len(self) == len(other) and self <= other + */ + } + + /* "multidict/_multidict.pyx":575 + * if not isinstance(other, Set): + * return NotImplemented + * return len(self) == len(other) and self <= other # <<<<<<<<<<<<<< + * elif op == 3: # != + * return not self == other + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyObject_Length(__pyx_v_self); if (unlikely(__pyx_t_4 == -1)) __PYX_ERR(0, 575, __pyx_L1_error) + __pyx_t_5 = PyObject_Length(__pyx_v_other); if (unlikely(__pyx_t_5 == -1)) __PYX_ERR(0, 575, __pyx_L1_error) + __pyx_t_2 = (__pyx_t_4 == __pyx_t_5); + if (__pyx_t_2) { + } else { + __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 575, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L13_bool_binop_done; + } + __pyx_t_6 = PyObject_RichCompare(__pyx_v_self, __pyx_v_other, Py_LE); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 575, __pyx_L1_error) + __Pyx_INCREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_L13_bool_binop_done:; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":572 + * return False + * return True + * elif op == 2: # == # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * return NotImplemented + */ + } + + /* "multidict/_multidict.pyx":576 + * return NotImplemented + * return len(self) == len(other) and self <= other + * elif op == 3: # != # <<<<<<<<<<<<<< + * return not self == other + * elif op == 4: # > + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_3, 3, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 576, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 576, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":577 + * return len(self) == len(other) and self <= other + * elif op == 3: # != + * return not self == other # <<<<<<<<<<<<<< + * elif op == 4: # > + * if not isinstance(other, Set): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyObject_RichCompare(__pyx_v_self, __pyx_v_other, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 577, __pyx_L1_error) + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 577, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyBool_FromLong((!__pyx_t_2)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 577, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":576 + * return NotImplemented + * return len(self) == len(other) and self <= other + * elif op == 3: # != # <<<<<<<<<<<<<< + * return not self == other + * elif op == 4: # > + */ + } + + /* "multidict/_multidict.pyx":578 + * elif op == 3: # != + * return not self == other + * elif op == 4: # > # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * return NotImplemented + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_4, 4, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 578, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 578, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":579 + * return not self == other + * elif op == 4: # > + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * return NotImplemented + * return len(self) > len(other) and self >= other + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 579, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 579, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":580 + * elif op == 4: # > + * if not isinstance(other, Set): + * return NotImplemented # <<<<<<<<<<<<<< + * return len(self) > len(other) and self >= other + * elif op == 5: # >= + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":579 + * return not self == other + * elif op == 4: # > + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * return NotImplemented + * return len(self) > len(other) and self >= other + */ + } + + /* "multidict/_multidict.pyx":581 + * if not isinstance(other, Set): + * return NotImplemented + * return len(self) > len(other) and self >= other # <<<<<<<<<<<<<< + * elif op == 5: # >= + * if not isinstance(other, Set): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_5 = PyObject_Length(__pyx_v_self); if (unlikely(__pyx_t_5 == -1)) __PYX_ERR(0, 581, __pyx_L1_error) + __pyx_t_4 = PyObject_Length(__pyx_v_other); if (unlikely(__pyx_t_4 == -1)) __PYX_ERR(0, 581, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_5 > __pyx_t_4); + if (__pyx_t_3) { + } else { + __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 581, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L16_bool_binop_done; + } + __pyx_t_6 = PyObject_RichCompare(__pyx_v_self, __pyx_v_other, Py_GE); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 581, __pyx_L1_error) + __Pyx_INCREF(__pyx_t_6); + __pyx_t_1 = __pyx_t_6; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_L16_bool_binop_done:; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":578 + * elif op == 3: # != + * return not self == other + * elif op == 4: # > # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * return NotImplemented + */ + } + + /* "multidict/_multidict.pyx":582 + * return NotImplemented + * return len(self) > len(other) and self >= other + * elif op == 5: # >= # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * return NotImplemented + */ + __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_5, 5, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 582, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 582, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":583 + * return len(self) > len(other) and self >= other + * elif op == 5: # >= + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * return NotImplemented + * if len(self) < len(other): + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 583, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 583, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = ((!(__pyx_t_3 != 0)) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":584 + * elif op == 5: # >= + * if not isinstance(other, Set): + * return NotImplemented # <<<<<<<<<<<<<< + * if len(self) < len(other): + * return False + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":583 + * return len(self) > len(other) and self >= other + * elif op == 5: # >= + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * return NotImplemented + * if len(self) < len(other): + */ + } + + /* "multidict/_multidict.pyx":585 + * if not isinstance(other, Set): + * return NotImplemented + * if len(self) < len(other): # <<<<<<<<<<<<<< + * return False + * for elem in other: + */ + __pyx_t_4 = PyObject_Length(__pyx_v_self); if (unlikely(__pyx_t_4 == -1)) __PYX_ERR(0, 585, __pyx_L1_error) + __pyx_t_5 = PyObject_Length(__pyx_v_other); if (unlikely(__pyx_t_5 == -1)) __PYX_ERR(0, 585, __pyx_L1_error) + __pyx_t_2 = ((__pyx_t_4 < __pyx_t_5) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":586 + * return NotImplemented + * if len(self) < len(other): + * return False # <<<<<<<<<<<<<< + * for elem in other: + * if elem not in self: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":585 + * if not isinstance(other, Set): + * return NotImplemented + * if len(self) < len(other): # <<<<<<<<<<<<<< + * return False + * for elem in other: + */ + } + + /* "multidict/_multidict.pyx":587 + * if len(self) < len(other): + * return False + * for elem in other: # <<<<<<<<<<<<<< + * if elem not in self: + * return False + */ + if (likely(PyList_CheckExact(__pyx_v_other)) || PyTuple_CheckExact(__pyx_v_other)) { + __pyx_t_1 = __pyx_v_other; __Pyx_INCREF(__pyx_t_1); __pyx_t_5 = 0; + __pyx_t_7 = NULL; + } else { + __pyx_t_5 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 587, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 587, __pyx_L1_error) + } + for (;;) { + if (likely(!__pyx_t_7)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_5); __Pyx_INCREF(__pyx_t_6); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 587, __pyx_L1_error) + #else + __pyx_t_6 = PySequence_ITEM(__pyx_t_1, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 587, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } else { + if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_5); __Pyx_INCREF(__pyx_t_6); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 587, __pyx_L1_error) + #else + __pyx_t_6 = PySequence_ITEM(__pyx_t_1, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 587, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } + } else { + __pyx_t_6 = __pyx_t_7(__pyx_t_1); + if (unlikely(!__pyx_t_6)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 587, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_6); + } + __Pyx_XDECREF_SET(__pyx_v_elem, __pyx_t_6); + __pyx_t_6 = 0; + + /* "multidict/_multidict.pyx":588 + * return False + * for elem in other: + * if elem not in self: # <<<<<<<<<<<<<< + * return False + * return True + */ + __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_v_elem, __pyx_v_self, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 588, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":589 + * for elem in other: + * if elem not in self: + * return False # <<<<<<<<<<<<<< + * return True + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":588 + * return False + * for elem in other: + * if elem not in self: # <<<<<<<<<<<<<< + * return False + * return True + */ + } + + /* "multidict/_multidict.pyx":587 + * if len(self) < len(other): + * return False + * for elem in other: # <<<<<<<<<<<<<< + * if elem not in self: + * return False + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":590 + * if elem not in self: + * return False + * return True # <<<<<<<<<<<<<< + * + * def __and__(self, other): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":582 + * return NotImplemented + * return len(self) > len(other) and self >= other + * elif op == 5: # >= # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * return NotImplemented + */ + } + + /* "multidict/_multidict.pyx":558 + * cdef class _ViewBaseSet(_ViewBase): + * + * def __richcmp__(self, other, op): # <<<<<<<<<<<<<< + * if op == 0: # < + * if not isinstance(other, Set): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("multidict._multidict._ViewBaseSet.__richcmp__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_elem); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":592 + * return True + * + * def __and__(self, other): # <<<<<<<<<<<<<< + * if not isinstance(other, Iterable): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_12_ViewBaseSet_3__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_12_ViewBaseSet_3__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__and__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_12_ViewBaseSet_2__and__(((PyObject *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_12_ViewBaseSet_2__and__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("__and__", 0); + __Pyx_INCREF(__pyx_v_self); + __Pyx_INCREF(__pyx_v_other); + + /* "multidict/_multidict.pyx":593 + * + * def __and__(self, other): + * if not isinstance(other, Iterable): # <<<<<<<<<<<<<< + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Iterable); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 593, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 593, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":594 + * def __and__(self, other): + * if not isinstance(other, Iterable): + * return NotImplemented # <<<<<<<<<<<<<< + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":593 + * + * def __and__(self, other): + * if not isinstance(other, Iterable): # <<<<<<<<<<<<<< + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + */ + } + + /* "multidict/_multidict.pyx":595 + * if not isinstance(other, Iterable): + * return NotImplemented + * if isinstance(self, _ViewBaseSet): # <<<<<<<<<<<<<< + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + */ + __pyx_t_3 = __Pyx_TypeCheck(__pyx_v_self, __pyx_ptype_9multidict_10_multidict__ViewBaseSet); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":596 + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) # <<<<<<<<<<<<<< + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + */ + __pyx_t_1 = PyObject_GetIter(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 596, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySet_New(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 596, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_self, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":595 + * if not isinstance(other, Iterable): + * return NotImplemented + * if isinstance(self, _ViewBaseSet): # <<<<<<<<<<<<<< + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + */ + } + + /* "multidict/_multidict.pyx":597 + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * if not isinstance(other, Set): + */ + __pyx_t_2 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_9multidict_10_multidict__ViewBaseSet); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":598 + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * other = set(iter(other)) + */ + __pyx_t_4 = PyObject_GetIter(__pyx_v_other); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 598, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PySet_New(__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 598, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF_SET(__pyx_v_other, __pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":597 + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * if not isinstance(other, Set): + */ + } + + /* "multidict/_multidict.pyx":599 + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * return self & other + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 599, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 599, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = ((!(__pyx_t_3 != 0)) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":600 + * other = set(iter(other)) + * if not isinstance(other, Set): + * other = set(iter(other)) # <<<<<<<<<<<<<< + * return self & other + * + */ + __pyx_t_1 = PyObject_GetIter(__pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 600, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySet_New(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 600, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_other, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":599 + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * return self & other + */ + } + + /* "multidict/_multidict.pyx":601 + * if not isinstance(other, Set): + * other = set(iter(other)) + * return self & other # <<<<<<<<<<<<<< + * + * def __or__(self, other): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyNumber_And(__pyx_v_self, __pyx_v_other); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 601, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":592 + * return True + * + * def __and__(self, other): # <<<<<<<<<<<<<< + * if not isinstance(other, Iterable): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict._ViewBaseSet.__and__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_self); + __Pyx_XDECREF(__pyx_v_other); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":603 + * return self & other + * + * def __or__(self, other): # <<<<<<<<<<<<<< + * if not isinstance(other, Iterable): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_12_ViewBaseSet_5__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_12_ViewBaseSet_5__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__or__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_12_ViewBaseSet_4__or__(((PyObject *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_12_ViewBaseSet_4__or__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("__or__", 0); + __Pyx_INCREF(__pyx_v_self); + __Pyx_INCREF(__pyx_v_other); + + /* "multidict/_multidict.pyx":604 + * + * def __or__(self, other): + * if not isinstance(other, Iterable): # <<<<<<<<<<<<<< + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Iterable); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 604, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 604, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":605 + * def __or__(self, other): + * if not isinstance(other, Iterable): + * return NotImplemented # <<<<<<<<<<<<<< + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":604 + * + * def __or__(self, other): + * if not isinstance(other, Iterable): # <<<<<<<<<<<<<< + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + */ + } + + /* "multidict/_multidict.pyx":606 + * if not isinstance(other, Iterable): + * return NotImplemented + * if isinstance(self, _ViewBaseSet): # <<<<<<<<<<<<<< + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + */ + __pyx_t_3 = __Pyx_TypeCheck(__pyx_v_self, __pyx_ptype_9multidict_10_multidict__ViewBaseSet); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":607 + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) # <<<<<<<<<<<<<< + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + */ + __pyx_t_1 = PyObject_GetIter(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 607, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySet_New(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 607, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_self, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":606 + * if not isinstance(other, Iterable): + * return NotImplemented + * if isinstance(self, _ViewBaseSet): # <<<<<<<<<<<<<< + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + */ + } + + /* "multidict/_multidict.pyx":608 + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * if not isinstance(other, Set): + */ + __pyx_t_2 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_9multidict_10_multidict__ViewBaseSet); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":609 + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * other = set(iter(other)) + */ + __pyx_t_4 = PyObject_GetIter(__pyx_v_other); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 609, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PySet_New(__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 609, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF_SET(__pyx_v_other, __pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":608 + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * if not isinstance(other, Set): + */ + } + + /* "multidict/_multidict.pyx":610 + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * return self | other + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 610, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 610, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = ((!(__pyx_t_3 != 0)) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":611 + * other = set(iter(other)) + * if not isinstance(other, Set): + * other = set(iter(other)) # <<<<<<<<<<<<<< + * return self | other + * + */ + __pyx_t_1 = PyObject_GetIter(__pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 611, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySet_New(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 611, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_other, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":610 + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * return self | other + */ + } + + /* "multidict/_multidict.pyx":612 + * if not isinstance(other, Set): + * other = set(iter(other)) + * return self | other # <<<<<<<<<<<<<< + * + * def __sub__(self, other): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyNumber_Or(__pyx_v_self, __pyx_v_other); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 612, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":603 + * return self & other + * + * def __or__(self, other): # <<<<<<<<<<<<<< + * if not isinstance(other, Iterable): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict._ViewBaseSet.__or__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_self); + __Pyx_XDECREF(__pyx_v_other); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":614 + * return self | other + * + * def __sub__(self, other): # <<<<<<<<<<<<<< + * if not isinstance(other, Iterable): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_12_ViewBaseSet_7__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_12_ViewBaseSet_7__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__sub__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_12_ViewBaseSet_6__sub__(((PyObject *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_12_ViewBaseSet_6__sub__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("__sub__", 0); + __Pyx_INCREF(__pyx_v_self); + __Pyx_INCREF(__pyx_v_other); + + /* "multidict/_multidict.pyx":615 + * + * def __sub__(self, other): + * if not isinstance(other, Iterable): # <<<<<<<<<<<<<< + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Iterable); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 615, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 615, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":616 + * def __sub__(self, other): + * if not isinstance(other, Iterable): + * return NotImplemented # <<<<<<<<<<<<<< + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":615 + * + * def __sub__(self, other): + * if not isinstance(other, Iterable): # <<<<<<<<<<<<<< + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + */ + } + + /* "multidict/_multidict.pyx":617 + * if not isinstance(other, Iterable): + * return NotImplemented + * if isinstance(self, _ViewBaseSet): # <<<<<<<<<<<<<< + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + */ + __pyx_t_3 = __Pyx_TypeCheck(__pyx_v_self, __pyx_ptype_9multidict_10_multidict__ViewBaseSet); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":618 + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) # <<<<<<<<<<<<<< + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + */ + __pyx_t_1 = PyObject_GetIter(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 618, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySet_New(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 618, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_self, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":617 + * if not isinstance(other, Iterable): + * return NotImplemented + * if isinstance(self, _ViewBaseSet): # <<<<<<<<<<<<<< + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + */ + } + + /* "multidict/_multidict.pyx":619 + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * if not isinstance(other, Set): + */ + __pyx_t_2 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_9multidict_10_multidict__ViewBaseSet); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":620 + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * other = set(iter(other)) + */ + __pyx_t_4 = PyObject_GetIter(__pyx_v_other); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 620, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PySet_New(__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 620, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF_SET(__pyx_v_other, __pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":619 + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * if not isinstance(other, Set): + */ + } + + /* "multidict/_multidict.pyx":621 + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * return self - other + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 621, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 621, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = ((!(__pyx_t_3 != 0)) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":622 + * other = set(iter(other)) + * if not isinstance(other, Set): + * other = set(iter(other)) # <<<<<<<<<<<<<< + * return self - other + * + */ + __pyx_t_1 = PyObject_GetIter(__pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 622, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySet_New(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 622, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_other, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":621 + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * return self - other + */ + } + + /* "multidict/_multidict.pyx":623 + * if not isinstance(other, Set): + * other = set(iter(other)) + * return self - other # <<<<<<<<<<<<<< + * + * def __xor__(self, other): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyNumber_Subtract(__pyx_v_self, __pyx_v_other); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 623, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":614 + * return self | other + * + * def __sub__(self, other): # <<<<<<<<<<<<<< + * if not isinstance(other, Iterable): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict._ViewBaseSet.__sub__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_self); + __Pyx_XDECREF(__pyx_v_other); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":625 + * return self - other + * + * def __xor__(self, other): # <<<<<<<<<<<<<< + * if not isinstance(other, Iterable): + * return NotImplemented + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_12_ViewBaseSet_9__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_12_ViewBaseSet_9__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__xor__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_12_ViewBaseSet_8__xor__(((PyObject *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_12_ViewBaseSet_8__xor__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("__xor__", 0); + __Pyx_INCREF(__pyx_v_self); + __Pyx_INCREF(__pyx_v_other); + + /* "multidict/_multidict.pyx":626 + * + * def __xor__(self, other): + * if not isinstance(other, Iterable): # <<<<<<<<<<<<<< + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Iterable); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 626, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 626, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":627 + * def __xor__(self, other): + * if not isinstance(other, Iterable): + * return NotImplemented # <<<<<<<<<<<<<< + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_builtin_NotImplemented); + __pyx_r = __pyx_builtin_NotImplemented; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":626 + * + * def __xor__(self, other): + * if not isinstance(other, Iterable): # <<<<<<<<<<<<<< + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + */ + } + + /* "multidict/_multidict.pyx":628 + * if not isinstance(other, Iterable): + * return NotImplemented + * if isinstance(self, _ViewBaseSet): # <<<<<<<<<<<<<< + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + */ + __pyx_t_3 = __Pyx_TypeCheck(__pyx_v_self, __pyx_ptype_9multidict_10_multidict__ViewBaseSet); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":629 + * return NotImplemented + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) # <<<<<<<<<<<<<< + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + */ + __pyx_t_1 = PyObject_GetIter(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 629, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySet_New(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 629, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_self, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":628 + * if not isinstance(other, Iterable): + * return NotImplemented + * if isinstance(self, _ViewBaseSet): # <<<<<<<<<<<<<< + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + */ + } + + /* "multidict/_multidict.pyx":630 + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * if not isinstance(other, Set): + */ + __pyx_t_2 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_9multidict_10_multidict__ViewBaseSet); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "multidict/_multidict.pyx":631 + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) # <<<<<<<<<<<<<< + * if not isinstance(other, Set): + * other = set(iter(other)) + */ + __pyx_t_4 = PyObject_GetIter(__pyx_v_other); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 631, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PySet_New(__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 631, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF_SET(__pyx_v_other, __pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":630 + * if isinstance(self, _ViewBaseSet): + * self = set(iter(self)) + * if isinstance(other, _ViewBaseSet): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * if not isinstance(other, Set): + */ + } + + /* "multidict/_multidict.pyx":632 + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * return self ^ other + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_Set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 632, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyObject_IsInstance(__pyx_v_other, __pyx_t_1); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 632, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = ((!(__pyx_t_3 != 0)) != 0); + if (__pyx_t_2) { + + /* "multidict/_multidict.pyx":633 + * other = set(iter(other)) + * if not isinstance(other, Set): + * other = set(iter(other)) # <<<<<<<<<<<<<< + * return self ^ other + * + */ + __pyx_t_1 = PyObject_GetIter(__pyx_v_other); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 633, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySet_New(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 633, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_other, __pyx_t_4); + __pyx_t_4 = 0; + + /* "multidict/_multidict.pyx":632 + * if isinstance(other, _ViewBaseSet): + * other = set(iter(other)) + * if not isinstance(other, Set): # <<<<<<<<<<<<<< + * other = set(iter(other)) + * return self ^ other + */ + } + + /* "multidict/_multidict.pyx":634 + * if not isinstance(other, Set): + * other = set(iter(other)) + * return self ^ other # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyNumber_Xor(__pyx_v_self, __pyx_v_other); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 634, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":625 + * return self - other + * + * def __xor__(self, other): # <<<<<<<<<<<<<< + * if not isinstance(other, Iterable): + * return NotImplemented + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("multidict._multidict._ViewBaseSet.__xor__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_self); + __Pyx_XDECREF(__pyx_v_other); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":643 + * cdef unsigned long long _version + * + * def __cinit__(self, _Impl impl): # <<<<<<<<<<<<<< + * self._impl = impl + * self._current = 0 + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_10_ItemsIter_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_10_ItemsIter_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_impl,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_impl)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__cinit__") < 0)) __PYX_ERR(0, 643, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)values[0]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 643, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict._ItemsIter.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_impl), __pyx_ptype_9multidict_10_multidict__Impl, 1, "impl", 0))) __PYX_ERR(0, 643, __pyx_L1_error) + __pyx_r = __pyx_pf_9multidict_10_multidict_10_ItemsIter___cinit__(((struct __pyx_obj_9multidict_10_multidict__ItemsIter *)__pyx_v_self), __pyx_v_impl); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_10_ItemsIter___cinit__(struct __pyx_obj_9multidict_10_multidict__ItemsIter *__pyx_v_self, struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl) { + int __pyx_r; + __Pyx_RefNannyDeclarations + unsigned PY_LONG_LONG __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + Py_ssize_t __pyx_t_3; + __Pyx_RefNannySetupContext("__cinit__", 0); + + /* "multidict/_multidict.pyx":644 + * + * def __cinit__(self, _Impl impl): + * self._impl = impl # <<<<<<<<<<<<<< + * self._current = 0 + * self._version = impl._version + */ + __Pyx_INCREF(((PyObject *)__pyx_v_impl)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_impl)); + __Pyx_GOTREF(__pyx_v_self->_impl); + __Pyx_DECREF(((PyObject *)__pyx_v_self->_impl)); + __pyx_v_self->_impl = __pyx_v_impl; + + /* "multidict/_multidict.pyx":645 + * def __cinit__(self, _Impl impl): + * self._impl = impl + * self._current = 0 # <<<<<<<<<<<<<< + * self._version = impl._version + * self._len = len(impl._items) + */ + __pyx_v_self->_current = 0; + + /* "multidict/_multidict.pyx":646 + * self._impl = impl + * self._current = 0 + * self._version = impl._version # <<<<<<<<<<<<<< + * self._len = len(impl._items) + * + */ + __pyx_t_1 = __pyx_v_impl->_version; + __pyx_v_self->_version = __pyx_t_1; + + /* "multidict/_multidict.pyx":647 + * self._current = 0 + * self._version = impl._version + * self._len = len(impl._items) # <<<<<<<<<<<<<< + * + * def __iter__(self): + */ + __pyx_t_2 = __pyx_v_impl->_items; + __Pyx_INCREF(__pyx_t_2); + if (unlikely(__pyx_t_2 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 647, __pyx_L1_error) + } + __pyx_t_3 = PyList_GET_SIZE(__pyx_t_2); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 647, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_self->_len = __pyx_t_3; + + /* "multidict/_multidict.pyx":643 + * cdef unsigned long long _version + * + * def __cinit__(self, _Impl impl): # <<<<<<<<<<<<<< + * self._impl = impl + * self._current = 0 + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._ItemsIter.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":649 + * self._len = len(impl._items) + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return self + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_10_ItemsIter_3__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_10_ItemsIter_3__iter__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_10_ItemsIter_2__iter__(((struct __pyx_obj_9multidict_10_multidict__ItemsIter *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_10_ItemsIter_2__iter__(struct __pyx_obj_9multidict_10_multidict__ItemsIter *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__", 0); + + /* "multidict/_multidict.pyx":650 + * + * def __iter__(self): + * return self # <<<<<<<<<<<<<< + * + * def __next__(self): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "multidict/_multidict.pyx":649 + * self._len = len(impl._items) + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return self + * + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":652 + * return self + * + * def __next__(self): # <<<<<<<<<<<<<< + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_10_ItemsIter_5__next__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_10_ItemsIter_5__next__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__next__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_10_ItemsIter_4__next__(((struct __pyx_obj_9multidict_10_multidict__ItemsIter *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_10_ItemsIter_4__next__(struct __pyx_obj_9multidict_10_multidict__ItemsIter *__pyx_v_self) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__next__", 0); + + /* "multidict/_multidict.pyx":653 + * + * def __next__(self): + * if self._version != self._impl._version: # <<<<<<<<<<<<<< + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: + */ + __pyx_t_1 = ((__pyx_v_self->_version != __pyx_v_self->_impl->_version) != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":654 + * def __next__(self): + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") # <<<<<<<<<<<<<< + * if self._current == self._len: + * raise StopIteration + */ + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 654, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_Raise(__pyx_t_2, 0, 0, 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __PYX_ERR(0, 654, __pyx_L1_error) + + /* "multidict/_multidict.pyx":653 + * + * def __next__(self): + * if self._version != self._impl._version: # <<<<<<<<<<<<<< + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: + */ + } + + /* "multidict/_multidict.pyx":655 + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: # <<<<<<<<<<<<<< + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] + */ + __pyx_t_1 = ((__pyx_v_self->_current == __pyx_v_self->_len) != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":656 + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: + * raise StopIteration # <<<<<<<<<<<<<< + * item = <_Pair>self._impl._items[self._current] + * self._current += 1 + */ + __Pyx_Raise(__pyx_builtin_StopIteration, 0, 0, 0); + __PYX_ERR(0, 656, __pyx_L1_error) + + /* "multidict/_multidict.pyx":655 + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: # <<<<<<<<<<<<<< + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] + */ + } + + /* "multidict/_multidict.pyx":657 + * if self._current == self._len: + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] # <<<<<<<<<<<<<< + * self._current += 1 + * return (item._key, item._value) + */ + if (unlikely(__pyx_v_self->_impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 657, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_v_self->_impl->_items, __pyx_v_self->_current, int, 1, __Pyx_PyInt_From_int, 1, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 657, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __pyx_t_2; + __Pyx_INCREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_item = ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":658 + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] + * self._current += 1 # <<<<<<<<<<<<<< + * return (item._key, item._value) + * + */ + __pyx_v_self->_current = (__pyx_v_self->_current + 1); + + /* "multidict/_multidict.pyx":659 + * item = <_Pair>self._impl._items[self._current] + * self._current += 1 + * return (item._key, item._value) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 659, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_item->_key); + __Pyx_GIVEREF(__pyx_v_item->_key); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_item->_key); + __Pyx_INCREF(__pyx_v_item->_value); + __Pyx_GIVEREF(__pyx_v_item->_value); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_item->_value); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":652 + * return self + * + * def __next__(self): # <<<<<<<<<<<<<< + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict._ItemsIter.__next__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":664 + * cdef class _ItemsView(_ViewBaseSet): + * + * def isdisjoint(self, other): # <<<<<<<<<<<<<< + * 'Return True if two sets have a null intersection.' + * cdef _Pair item + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_10_ItemsView_1isdisjoint(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_10_ItemsView_isdisjoint[] = "Return True if two sets have a null intersection."; +static PyObject *__pyx_pw_9multidict_10_multidict_10_ItemsView_1isdisjoint(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("isdisjoint (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_10_ItemsView_isdisjoint(((struct __pyx_obj_9multidict_10_multidict__ItemsView *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_10_ItemsView_isdisjoint(struct __pyx_obj_9multidict_10_multidict__ItemsView *__pyx_v_self, PyObject *__pyx_v_other) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_v_t = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + int __pyx_t_5; + __Pyx_RefNannySetupContext("isdisjoint", 0); + + /* "multidict/_multidict.pyx":667 + * 'Return True if two sets have a null intersection.' + * cdef _Pair item + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * t = (item._key, item._value) + */ + if (unlikely(__pyx_v_self->__pyx_base.__pyx_base._impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 667, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_self->__pyx_base.__pyx_base._impl->_items; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 667, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 667, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":668 + * cdef _Pair item + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * t = (item._key, item._value) + * if t in other: + */ + __pyx_t_3 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":669 + * for i in self._impl._items: + * item = <_Pair>i + * t = (item._key, item._value) # <<<<<<<<<<<<<< + * if t in other: + * return False + */ + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 669, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_item->_key); + __Pyx_GIVEREF(__pyx_v_item->_key); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_item->_key); + __Pyx_INCREF(__pyx_v_item->_value); + __Pyx_GIVEREF(__pyx_v_item->_value); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_item->_value); + __Pyx_XDECREF_SET(__pyx_v_t, ((PyObject*)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":670 + * item = <_Pair>i + * t = (item._key, item._value) + * if t in other: # <<<<<<<<<<<<<< + * return False + * return True + */ + __pyx_t_4 = (__Pyx_PySequence_ContainsTF(__pyx_v_t, __pyx_v_other, Py_EQ)); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 670, __pyx_L1_error) + __pyx_t_5 = (__pyx_t_4 != 0); + if (__pyx_t_5) { + + /* "multidict/_multidict.pyx":671 + * t = (item._key, item._value) + * if t in other: + * return False # <<<<<<<<<<<<<< + * return True + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":670 + * item = <_Pair>i + * t = (item._key, item._value) + * if t in other: # <<<<<<<<<<<<<< + * return False + * return True + */ + } + + /* "multidict/_multidict.pyx":667 + * 'Return True if two sets have a null intersection.' + * cdef _Pair item + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * t = (item._key, item._value) + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":672 + * if t in other: + * return False + * return True # <<<<<<<<<<<<<< + * + * def __contains__(self, i): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":664 + * cdef class _ItemsView(_ViewBaseSet): + * + * def isdisjoint(self, other): # <<<<<<<<<<<<<< + * 'Return True if two sets have a null intersection.' + * cdef _Pair item + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict._ItemsView.isdisjoint", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":674 + * return True + * + * def __contains__(self, i): # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef str key + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_10_ItemsView_3__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_i); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_10_ItemsView_3__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_i) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__contains__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_10_ItemsView_2__contains__(((struct __pyx_obj_9multidict_10_multidict__ItemsView *)__pyx_v_self), ((PyObject *)__pyx_v_i)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_10_ItemsView_2__contains__(struct __pyx_obj_9multidict_10_multidict__ItemsView *__pyx_v_self, PyObject *__pyx_v_i) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_key = 0; + PyObject *__pyx_v_value = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + Py_ssize_t __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + __Pyx_RefNannySetupContext("__contains__", 0); + + /* "multidict/_multidict.pyx":678 + * cdef str key + * cdef object value + * assert isinstance(i, tuple) or isinstance(i, list) # <<<<<<<<<<<<<< + * assert len(i) == 2 + * key = i[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + if (unlikely(!Py_OptimizeFlag)) { + __pyx_t_2 = PyTuple_Check(__pyx_v_i); + __pyx_t_3 = (__pyx_t_2 != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_1 = __pyx_t_3; + goto __pyx_L3_bool_binop_done; + } + __pyx_t_3 = PyList_Check(__pyx_v_i); + __pyx_t_2 = (__pyx_t_3 != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L3_bool_binop_done:; + if (unlikely(!__pyx_t_1)) { + PyErr_SetNone(PyExc_AssertionError); + __PYX_ERR(0, 678, __pyx_L1_error) + } + } + #endif + + /* "multidict/_multidict.pyx":679 + * cdef object value + * assert isinstance(i, tuple) or isinstance(i, list) + * assert len(i) == 2 # <<<<<<<<<<<<<< + * key = i[0] + * value = i[1] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + if (unlikely(!Py_OptimizeFlag)) { + __pyx_t_4 = PyObject_Length(__pyx_v_i); if (unlikely(__pyx_t_4 == -1)) __PYX_ERR(0, 679, __pyx_L1_error) + if (unlikely(!((__pyx_t_4 == 2) != 0))) { + PyErr_SetNone(PyExc_AssertionError); + __PYX_ERR(0, 679, __pyx_L1_error) + } + } + #endif + + /* "multidict/_multidict.pyx":680 + * assert isinstance(i, tuple) or isinstance(i, list) + * assert len(i) == 2 + * key = i[0] # <<<<<<<<<<<<<< + * value = i[1] + * for item in self._impl._items: + */ + __pyx_t_5 = __Pyx_GetItemInt(__pyx_v_i, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 680, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (!(likely(PyString_CheckExact(__pyx_t_5))||((__pyx_t_5) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_5)->tp_name), 0))) __PYX_ERR(0, 680, __pyx_L1_error) + __pyx_v_key = ((PyObject*)__pyx_t_5); + __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":681 + * assert len(i) == 2 + * key = i[0] + * value = i[1] # <<<<<<<<<<<<<< + * for item in self._impl._items: + * if key == item._key and value == item._value: + */ + __pyx_t_5 = __Pyx_GetItemInt(__pyx_v_i, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 681, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_v_value = __pyx_t_5; + __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":682 + * key = i[0] + * value = i[1] + * for item in self._impl._items: # <<<<<<<<<<<<<< + * if key == item._key and value == item._value: + * return True + */ + if (unlikely(__pyx_v_self->__pyx_base.__pyx_base._impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 682, __pyx_L1_error) + } + __pyx_t_5 = __pyx_v_self->__pyx_base.__pyx_base._impl->_items; __Pyx_INCREF(__pyx_t_5); __pyx_t_4 = 0; + for (;;) { + if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_5)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyList_GET_ITEM(__pyx_t_5, __pyx_t_4); __Pyx_INCREF(__pyx_t_6); __pyx_t_4++; if (unlikely(0 < 0)) __PYX_ERR(0, 682, __pyx_L1_error) + #else + __pyx_t_6 = PySequence_ITEM(__pyx_t_5, __pyx_t_4); __pyx_t_4++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 682, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + if (!(likely(((__pyx_t_6) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_6, __pyx_ptype_9multidict_10_multidict__Pair))))) __PYX_ERR(0, 682, __pyx_L1_error) + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_6)); + __pyx_t_6 = 0; + + /* "multidict/_multidict.pyx":683 + * value = i[1] + * for item in self._impl._items: + * if key == item._key and value == item._value: # <<<<<<<<<<<<<< + * return True + * return False + */ + __pyx_t_2 = (__Pyx_PyString_Equals(__pyx_v_key, __pyx_v_item->_key, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 683, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + } else { + __pyx_t_1 = __pyx_t_3; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_6 = PyObject_RichCompare(__pyx_v_value, __pyx_v_item->_value, Py_EQ); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 683, __pyx_L1_error) + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 683, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_1 = __pyx_t_3; + __pyx_L8_bool_binop_done:; + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":684 + * for item in self._impl._items: + * if key == item._key and value == item._value: + * return True # <<<<<<<<<<<<<< + * return False + * + */ + __pyx_r = 1; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":683 + * value = i[1] + * for item in self._impl._items: + * if key == item._key and value == item._value: # <<<<<<<<<<<<<< + * return True + * return False + */ + } + + /* "multidict/_multidict.pyx":682 + * key = i[0] + * value = i[1] + * for item in self._impl._items: # <<<<<<<<<<<<<< + * if key == item._key and value == item._value: + * return True + */ + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "multidict/_multidict.pyx":685 + * if key == item._key and value == item._value: + * return True + * return False # <<<<<<<<<<<<<< + * + * def __iter__(self): + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":674 + * return True + * + * def __contains__(self, i): # <<<<<<<<<<<<<< + * cdef _Pair item + * cdef str key + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("multidict._multidict._ItemsView.__contains__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_key); + __Pyx_XDECREF(__pyx_v_value); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":687 + * return False + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return _ItemsIter.__new__(_ItemsIter, self._impl) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_10_ItemsView_5__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_10_ItemsView_5__iter__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_10_ItemsView_4__iter__(((struct __pyx_obj_9multidict_10_multidict__ItemsView *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_10_ItemsView_4__iter__(struct __pyx_obj_9multidict_10_multidict__ItemsView *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("__iter__", 0); + + /* "multidict/_multidict.pyx":688 + * + * def __iter__(self): + * return _ItemsIter.__new__(_ItemsIter, self._impl) # <<<<<<<<<<<<<< + * + * def __repr__(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 688, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)__pyx_v_self->__pyx_base.__pyx_base._impl)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self->__pyx_base.__pyx_base._impl)); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_self->__pyx_base.__pyx_base._impl)); + __pyx_t_2 = __pyx_tp_new_9multidict_10_multidict__ItemsIter(((PyTypeObject *)__pyx_ptype_9multidict_10_multidict__ItemsIter), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 688, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":687 + * return False + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return _ItemsIter.__new__(_ItemsIter, self._impl) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._ItemsView.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":690 + * return _ItemsIter.__new__(_ItemsIter, self._impl) + * + * def __repr__(self): # <<<<<<<<<<<<<< + * cdef _Pair item + * lst = [] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_10_ItemsView_7__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_10_ItemsView_7__repr__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_10_ItemsView_6__repr__(((struct __pyx_obj_9multidict_10_multidict__ItemsView *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_10_ItemsView_6__repr__(struct __pyx_obj_9multidict_10_multidict__ItemsView *__pyx_v_self) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_lst = NULL; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_v_body = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + __Pyx_RefNannySetupContext("__repr__", 0); + + /* "multidict/_multidict.pyx":692 + * def __repr__(self): + * cdef _Pair item + * lst = [] # <<<<<<<<<<<<<< + * for i in self._impl._items: + * item = <_Pair>i + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 692, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_lst = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":693 + * cdef _Pair item + * lst = [] + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * lst.append("{!r}: {!r}".format(item._key, item._value)) + */ + if (unlikely(__pyx_v_self->__pyx_base.__pyx_base._impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 693, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_self->__pyx_base.__pyx_base._impl->_items; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 693, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 693, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":694 + * lst = [] + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * lst.append("{!r}: {!r}".format(item._key, item._value)) + * body = ', '.join(lst) + */ + __pyx_t_3 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":695 + * for i in self._impl._items: + * item = <_Pair>i + * lst.append("{!r}: {!r}".format(item._key, item._value)) # <<<<<<<<<<<<<< + * body = ', '.join(lst) + * return '{}({})'.format(self.__class__.__name__, body) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_r_r, __pyx_n_s_format); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 695, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + __pyx_t_6 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_6 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_item->_key, __pyx_v_item->_value}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 695, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_item->_key, __pyx_v_item->_value}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 695, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_7 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 695, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_item->_key); + __Pyx_GIVEREF(__pyx_v_item->_key); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_6, __pyx_v_item->_key); + __Pyx_INCREF(__pyx_v_item->_value); + __Pyx_GIVEREF(__pyx_v_item->_value); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_6, __pyx_v_item->_value); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 695, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_lst, __pyx_t_3); if (unlikely(__pyx_t_8 == -1)) __PYX_ERR(0, 695, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":693 + * cdef _Pair item + * lst = [] + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * lst.append("{!r}: {!r}".format(item._key, item._value)) + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":696 + * item = <_Pair>i + * lst.append("{!r}: {!r}".format(item._key, item._value)) + * body = ', '.join(lst) # <<<<<<<<<<<<<< + * return '{}({})'.format(self.__class__.__name__, body) + * + */ + __pyx_t_1 = __Pyx_PyString_Join(__pyx_kp_s__3, __pyx_v_lst); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 696, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_body = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":697 + * lst.append("{!r}: {!r}".format(item._key, item._value)) + * body = ', '.join(lst) + * return '{}({})'.format(self.__class__.__name__, body) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s__10, __pyx_n_s_format); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 697, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 697, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_name); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 697, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + __pyx_t_6 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_6 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_7, __pyx_v_body}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 697, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_7, __pyx_v_body}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 697, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_5 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 697, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_6, __pyx_t_7); + __Pyx_INCREF(__pyx_v_body); + __Pyx_GIVEREF(__pyx_v_body); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_6, __pyx_v_body); + __pyx_t_7 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 697, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":690 + * return _ItemsIter.__new__(_ItemsIter, self._impl) + * + * def __repr__(self): # <<<<<<<<<<<<<< + * cdef _Pair item + * lst = [] + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("multidict._multidict._ItemsView.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_lst); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XDECREF(__pyx_v_body); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":709 + * cdef unsigned long long _version + * + * def __cinit__(self, _Impl impl): # <<<<<<<<<<<<<< + * self._impl = impl + * self._current = 0 + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_11_ValuesIter_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_11_ValuesIter_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_impl,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_impl)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__cinit__") < 0)) __PYX_ERR(0, 709, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)values[0]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 709, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict._ValuesIter.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_impl), __pyx_ptype_9multidict_10_multidict__Impl, 1, "impl", 0))) __PYX_ERR(0, 709, __pyx_L1_error) + __pyx_r = __pyx_pf_9multidict_10_multidict_11_ValuesIter___cinit__(((struct __pyx_obj_9multidict_10_multidict__ValuesIter *)__pyx_v_self), __pyx_v_impl); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_11_ValuesIter___cinit__(struct __pyx_obj_9multidict_10_multidict__ValuesIter *__pyx_v_self, struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + unsigned PY_LONG_LONG __pyx_t_3; + __Pyx_RefNannySetupContext("__cinit__", 0); + + /* "multidict/_multidict.pyx":710 + * + * def __cinit__(self, _Impl impl): + * self._impl = impl # <<<<<<<<<<<<<< + * self._current = 0 + * self._len = len(impl._items) + */ + __Pyx_INCREF(((PyObject *)__pyx_v_impl)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_impl)); + __Pyx_GOTREF(__pyx_v_self->_impl); + __Pyx_DECREF(((PyObject *)__pyx_v_self->_impl)); + __pyx_v_self->_impl = __pyx_v_impl; + + /* "multidict/_multidict.pyx":711 + * def __cinit__(self, _Impl impl): + * self._impl = impl + * self._current = 0 # <<<<<<<<<<<<<< + * self._len = len(impl._items) + * self._version = impl._version + */ + __pyx_v_self->_current = 0; + + /* "multidict/_multidict.pyx":712 + * self._impl = impl + * self._current = 0 + * self._len = len(impl._items) # <<<<<<<<<<<<<< + * self._version = impl._version + * + */ + __pyx_t_1 = __pyx_v_impl->_items; + __Pyx_INCREF(__pyx_t_1); + if (unlikely(__pyx_t_1 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 712, __pyx_L1_error) + } + __pyx_t_2 = PyList_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 712, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_self->_len = __pyx_t_2; + + /* "multidict/_multidict.pyx":713 + * self._current = 0 + * self._len = len(impl._items) + * self._version = impl._version # <<<<<<<<<<<<<< + * + * def __iter__(self): + */ + __pyx_t_3 = __pyx_v_impl->_version; + __pyx_v_self->_version = __pyx_t_3; + + /* "multidict/_multidict.pyx":709 + * cdef unsigned long long _version + * + * def __cinit__(self, _Impl impl): # <<<<<<<<<<<<<< + * self._impl = impl + * self._current = 0 + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict._ValuesIter.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":715 + * self._version = impl._version + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return self + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_11_ValuesIter_3__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_11_ValuesIter_3__iter__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_11_ValuesIter_2__iter__(((struct __pyx_obj_9multidict_10_multidict__ValuesIter *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_11_ValuesIter_2__iter__(struct __pyx_obj_9multidict_10_multidict__ValuesIter *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__", 0); + + /* "multidict/_multidict.pyx":716 + * + * def __iter__(self): + * return self # <<<<<<<<<<<<<< + * + * def __next__(self): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "multidict/_multidict.pyx":715 + * self._version = impl._version + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return self + * + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":718 + * return self + * + * def __next__(self): # <<<<<<<<<<<<<< + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_11_ValuesIter_5__next__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_11_ValuesIter_5__next__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__next__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_11_ValuesIter_4__next__(((struct __pyx_obj_9multidict_10_multidict__ValuesIter *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_11_ValuesIter_4__next__(struct __pyx_obj_9multidict_10_multidict__ValuesIter *__pyx_v_self) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__next__", 0); + + /* "multidict/_multidict.pyx":719 + * + * def __next__(self): + * if self._version != self._impl._version: # <<<<<<<<<<<<<< + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: + */ + __pyx_t_1 = ((__pyx_v_self->_version != __pyx_v_self->_impl->_version) != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":720 + * def __next__(self): + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") # <<<<<<<<<<<<<< + * if self._current == self._len: + * raise StopIteration + */ + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 720, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_Raise(__pyx_t_2, 0, 0, 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __PYX_ERR(0, 720, __pyx_L1_error) + + /* "multidict/_multidict.pyx":719 + * + * def __next__(self): + * if self._version != self._impl._version: # <<<<<<<<<<<<<< + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: + */ + } + + /* "multidict/_multidict.pyx":721 + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: # <<<<<<<<<<<<<< + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] + */ + __pyx_t_1 = ((__pyx_v_self->_current == __pyx_v_self->_len) != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":722 + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: + * raise StopIteration # <<<<<<<<<<<<<< + * item = <_Pair>self._impl._items[self._current] + * self._current += 1 + */ + __Pyx_Raise(__pyx_builtin_StopIteration, 0, 0, 0); + __PYX_ERR(0, 722, __pyx_L1_error) + + /* "multidict/_multidict.pyx":721 + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: # <<<<<<<<<<<<<< + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] + */ + } + + /* "multidict/_multidict.pyx":723 + * if self._current == self._len: + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] # <<<<<<<<<<<<<< + * self._current += 1 + * return item._value + */ + if (unlikely(__pyx_v_self->_impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 723, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_v_self->_impl->_items, __pyx_v_self->_current, int, 1, __Pyx_PyInt_From_int, 1, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 723, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __pyx_t_2; + __Pyx_INCREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_item = ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":724 + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] + * self._current += 1 # <<<<<<<<<<<<<< + * return item._value + * + */ + __pyx_v_self->_current = (__pyx_v_self->_current + 1); + + /* "multidict/_multidict.pyx":725 + * item = <_Pair>self._impl._items[self._current] + * self._current += 1 + * return item._value # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_item->_value); + __pyx_r = __pyx_v_item->_value; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":718 + * return self + * + * def __next__(self): # <<<<<<<<<<<<<< + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict._ValuesIter.__next__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":730 + * cdef class _ValuesView(_ViewBase): + * + * def __contains__(self, value): # <<<<<<<<<<<<<< + * cdef _Pair item + * for i in self._impl._items: + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_11_ValuesView_1__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_11_ValuesView_1__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__contains__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_11_ValuesView___contains__(((struct __pyx_obj_9multidict_10_multidict__ValuesView *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_11_ValuesView___contains__(struct __pyx_obj_9multidict_10_multidict__ValuesView *__pyx_v_self, PyObject *__pyx_v_value) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_i = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + __Pyx_RefNannySetupContext("__contains__", 0); + + /* "multidict/_multidict.pyx":732 + * def __contains__(self, value): + * cdef _Pair item + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._value == value: + */ + if (unlikely(__pyx_v_self->__pyx_base._impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 732, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_self->__pyx_base._impl->_items; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 732, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 732, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":733 + * cdef _Pair item + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * if item._value == value: + * return True + */ + __pyx_t_3 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":734 + * for i in self._impl._items: + * item = <_Pair>i + * if item._value == value: # <<<<<<<<<<<<<< + * return True + * return False + */ + __pyx_t_3 = PyObject_RichCompare(__pyx_v_item->_value, __pyx_v_value, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 734, __pyx_L1_error) + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 734, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_4) { + + /* "multidict/_multidict.pyx":735 + * item = <_Pair>i + * if item._value == value: + * return True # <<<<<<<<<<<<<< + * return False + * + */ + __pyx_r = 1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":734 + * for i in self._impl._items: + * item = <_Pair>i + * if item._value == value: # <<<<<<<<<<<<<< + * return True + * return False + */ + } + + /* "multidict/_multidict.pyx":732 + * def __contains__(self, value): + * cdef _Pair item + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._value == value: + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":736 + * if item._value == value: + * return True + * return False # <<<<<<<<<<<<<< + * + * def __iter__(self): + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":730 + * cdef class _ValuesView(_ViewBase): + * + * def __contains__(self, value): # <<<<<<<<<<<<<< + * cdef _Pair item + * for i in self._impl._items: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict._ValuesView.__contains__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":738 + * return False + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return _ValuesIter.__new__(_ValuesIter, self._impl) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_11_ValuesView_3__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_11_ValuesView_3__iter__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_11_ValuesView_2__iter__(((struct __pyx_obj_9multidict_10_multidict__ValuesView *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_11_ValuesView_2__iter__(struct __pyx_obj_9multidict_10_multidict__ValuesView *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("__iter__", 0); + + /* "multidict/_multidict.pyx":739 + * + * def __iter__(self): + * return _ValuesIter.__new__(_ValuesIter, self._impl) # <<<<<<<<<<<<<< + * + * def __repr__(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 739, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)__pyx_v_self->__pyx_base._impl)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self->__pyx_base._impl)); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_self->__pyx_base._impl)); + __pyx_t_2 = __pyx_tp_new_9multidict_10_multidict__ValuesIter(((PyTypeObject *)__pyx_ptype_9multidict_10_multidict__ValuesIter), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 739, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":738 + * return False + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return _ValuesIter.__new__(_ValuesIter, self._impl) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._ValuesView.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":741 + * return _ValuesIter.__new__(_ValuesIter, self._impl) + * + * def __repr__(self): # <<<<<<<<<<<<<< + * cdef _Pair item + * lst = [] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_11_ValuesView_5__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_11_ValuesView_5__repr__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_11_ValuesView_4__repr__(((struct __pyx_obj_9multidict_10_multidict__ValuesView *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_11_ValuesView_4__repr__(struct __pyx_obj_9multidict_10_multidict__ValuesView *__pyx_v_self) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_lst = NULL; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_v_body = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + int __pyx_t_8; + __Pyx_RefNannySetupContext("__repr__", 0); + + /* "multidict/_multidict.pyx":743 + * def __repr__(self): + * cdef _Pair item + * lst = [] # <<<<<<<<<<<<<< + * for i in self._impl._items: + * item = <_Pair>i + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 743, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_lst = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":744 + * cdef _Pair item + * lst = [] + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * lst.append("{!r}".format(item._value)) + */ + if (unlikely(__pyx_v_self->__pyx_base._impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 744, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_self->__pyx_base._impl->_items; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 744, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 744, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":745 + * lst = [] + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * lst.append("{!r}".format(item._value)) + * body = ', '.join(lst) + */ + __pyx_t_3 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":746 + * for i in self._impl._items: + * item = <_Pair>i + * lst.append("{!r}".format(item._value)) # <<<<<<<<<<<<<< + * body = ', '.join(lst) + * return '{}({})'.format(self.__class__.__name__, body) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_r_2, __pyx_n_s_format); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 746, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (!__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_item->_value); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 746, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_item->_value}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 746, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_item->_value}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 746, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 746, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5); __pyx_t_5 = NULL; + __Pyx_INCREF(__pyx_v_item->_value); + __Pyx_GIVEREF(__pyx_v_item->_value); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_v_item->_value); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 746, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_7 = __Pyx_PyList_Append(__pyx_v_lst, __pyx_t_3); if (unlikely(__pyx_t_7 == -1)) __PYX_ERR(0, 746, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":744 + * cdef _Pair item + * lst = [] + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * lst.append("{!r}".format(item._value)) + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":747 + * item = <_Pair>i + * lst.append("{!r}".format(item._value)) + * body = ', '.join(lst) # <<<<<<<<<<<<<< + * return '{}({})'.format(self.__class__.__name__, body) + * + */ + __pyx_t_1 = __Pyx_PyString_Join(__pyx_kp_s__3, __pyx_v_lst); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 747, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_body = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":748 + * lst.append("{!r}".format(item._value)) + * body = ', '.join(lst) + * return '{}({})'.format(self.__class__.__name__, body) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s__10, __pyx_n_s_format); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 748, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 748, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_name); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 748, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_6, __pyx_v_body}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 748, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_6, __pyx_v_body}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 748, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_5 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 748, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_8, __pyx_t_6); + __Pyx_INCREF(__pyx_v_body); + __Pyx_GIVEREF(__pyx_v_body); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_8, __pyx_v_body); + __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 748, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":741 + * return _ValuesIter.__new__(_ValuesIter, self._impl) + * + * def __repr__(self): # <<<<<<<<<<<<<< + * cdef _Pair item + * lst = [] + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("multidict._multidict._ValuesView.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_lst); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XDECREF(__pyx_v_body); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":760 + * cdef unsigned long long _version + * + * def __cinit__(self, _Impl impl): # <<<<<<<<<<<<<< + * self._impl = impl + * self._current = 0 + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_9_KeysIter_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_9_KeysIter_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_impl,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_impl)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__cinit__") < 0)) __PYX_ERR(0, 760, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)values[0]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 760, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("multidict._multidict._KeysIter.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_impl), __pyx_ptype_9multidict_10_multidict__Impl, 1, "impl", 0))) __PYX_ERR(0, 760, __pyx_L1_error) + __pyx_r = __pyx_pf_9multidict_10_multidict_9_KeysIter___cinit__(((struct __pyx_obj_9multidict_10_multidict__KeysIter *)__pyx_v_self), __pyx_v_impl); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_9_KeysIter___cinit__(struct __pyx_obj_9multidict_10_multidict__KeysIter *__pyx_v_self, struct __pyx_obj_9multidict_10_multidict__Impl *__pyx_v_impl) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + unsigned PY_LONG_LONG __pyx_t_3; + __Pyx_RefNannySetupContext("__cinit__", 0); + + /* "multidict/_multidict.pyx":761 + * + * def __cinit__(self, _Impl impl): + * self._impl = impl # <<<<<<<<<<<<<< + * self._current = 0 + * self._len = len(self._impl._items) + */ + __Pyx_INCREF(((PyObject *)__pyx_v_impl)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_impl)); + __Pyx_GOTREF(__pyx_v_self->_impl); + __Pyx_DECREF(((PyObject *)__pyx_v_self->_impl)); + __pyx_v_self->_impl = __pyx_v_impl; + + /* "multidict/_multidict.pyx":762 + * def __cinit__(self, _Impl impl): + * self._impl = impl + * self._current = 0 # <<<<<<<<<<<<<< + * self._len = len(self._impl._items) + * self._version = impl._version + */ + __pyx_v_self->_current = 0; + + /* "multidict/_multidict.pyx":763 + * self._impl = impl + * self._current = 0 + * self._len = len(self._impl._items) # <<<<<<<<<<<<<< + * self._version = impl._version + * + */ + __pyx_t_1 = __pyx_v_self->_impl->_items; + __Pyx_INCREF(__pyx_t_1); + if (unlikely(__pyx_t_1 == Py_None)) { + PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); + __PYX_ERR(0, 763, __pyx_L1_error) + } + __pyx_t_2 = PyList_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 763, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_self->_len = __pyx_t_2; + + /* "multidict/_multidict.pyx":764 + * self._current = 0 + * self._len = len(self._impl._items) + * self._version = impl._version # <<<<<<<<<<<<<< + * + * def __iter__(self): + */ + __pyx_t_3 = __pyx_v_impl->_version; + __pyx_v_self->_version = __pyx_t_3; + + /* "multidict/_multidict.pyx":760 + * cdef unsigned long long _version + * + * def __cinit__(self, _Impl impl): # <<<<<<<<<<<<<< + * self._impl = impl + * self._current = 0 + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("multidict._multidict._KeysIter.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":766 + * self._version = impl._version + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return self + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9_KeysIter_3__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_9_KeysIter_3__iter__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9_KeysIter_2__iter__(((struct __pyx_obj_9multidict_10_multidict__KeysIter *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9_KeysIter_2__iter__(struct __pyx_obj_9multidict_10_multidict__KeysIter *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__", 0); + + /* "multidict/_multidict.pyx":767 + * + * def __iter__(self): + * return self # <<<<<<<<<<<<<< + * + * def __next__(self): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __pyx_r = ((PyObject *)__pyx_v_self); + goto __pyx_L0; + + /* "multidict/_multidict.pyx":766 + * self._version = impl._version + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return self + * + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":769 + * return self + * + * def __next__(self): # <<<<<<<<<<<<<< + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9_KeysIter_5__next__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_9_KeysIter_5__next__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__next__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9_KeysIter_4__next__(((struct __pyx_obj_9multidict_10_multidict__KeysIter *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9_KeysIter_4__next__(struct __pyx_obj_9multidict_10_multidict__KeysIter *__pyx_v_self) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + __Pyx_RefNannySetupContext("__next__", 0); + + /* "multidict/_multidict.pyx":770 + * + * def __next__(self): + * if self._version != self._impl._version: # <<<<<<<<<<<<<< + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: + */ + __pyx_t_1 = ((__pyx_v_self->_version != __pyx_v_self->_impl->_version) != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":771 + * def __next__(self): + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") # <<<<<<<<<<<<<< + * if self._current == self._len: + * raise StopIteration + */ + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 771, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_Raise(__pyx_t_2, 0, 0, 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __PYX_ERR(0, 771, __pyx_L1_error) + + /* "multidict/_multidict.pyx":770 + * + * def __next__(self): + * if self._version != self._impl._version: # <<<<<<<<<<<<<< + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: + */ + } + + /* "multidict/_multidict.pyx":772 + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: # <<<<<<<<<<<<<< + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] + */ + __pyx_t_1 = ((__pyx_v_self->_current == __pyx_v_self->_len) != 0); + if (__pyx_t_1) { + + /* "multidict/_multidict.pyx":773 + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: + * raise StopIteration # <<<<<<<<<<<<<< + * item = <_Pair>self._impl._items[self._current] + * self._current += 1 + */ + __Pyx_Raise(__pyx_builtin_StopIteration, 0, 0, 0); + __PYX_ERR(0, 773, __pyx_L1_error) + + /* "multidict/_multidict.pyx":772 + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + * if self._current == self._len: # <<<<<<<<<<<<<< + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] + */ + } + + /* "multidict/_multidict.pyx":774 + * if self._current == self._len: + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] # <<<<<<<<<<<<<< + * self._current += 1 + * return item._key + */ + if (unlikely(__pyx_v_self->_impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 774, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_GetItemInt_List(__pyx_v_self->_impl->_items, __pyx_v_self->_current, int, 1, __Pyx_PyInt_From_int, 1, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 774, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __pyx_t_2; + __Pyx_INCREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_item = ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":775 + * raise StopIteration + * item = <_Pair>self._impl._items[self._current] + * self._current += 1 # <<<<<<<<<<<<<< + * return item._key + * + */ + __pyx_v_self->_current = (__pyx_v_self->_current + 1); + + /* "multidict/_multidict.pyx":776 + * item = <_Pair>self._impl._items[self._current] + * self._current += 1 + * return item._key # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_item->_key); + __pyx_r = __pyx_v_item->_key; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":769 + * return self + * + * def __next__(self): # <<<<<<<<<<<<<< + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict._KeysIter.__next__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":781 + * cdef class _KeysView(_ViewBaseSet): + * + * def isdisjoint(self, other): # <<<<<<<<<<<<<< + * 'Return True if two sets have a null intersection.' + * cdef _Pair item + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9_KeysView_1isdisjoint(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ +static char __pyx_doc_9multidict_10_multidict_9_KeysView_isdisjoint[] = "Return True if two sets have a null intersection."; +static PyObject *__pyx_pw_9multidict_10_multidict_9_KeysView_1isdisjoint(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("isdisjoint (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9_KeysView_isdisjoint(((struct __pyx_obj_9multidict_10_multidict__KeysView *)__pyx_v_self), ((PyObject *)__pyx_v_other)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9_KeysView_isdisjoint(struct __pyx_obj_9multidict_10_multidict__KeysView *__pyx_v_self, PyObject *__pyx_v_other) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + int __pyx_t_5; + __Pyx_RefNannySetupContext("isdisjoint", 0); + + /* "multidict/_multidict.pyx":784 + * 'Return True if two sets have a null intersection.' + * cdef _Pair item + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._key in other: + */ + if (unlikely(__pyx_v_self->__pyx_base.__pyx_base._impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 784, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_self->__pyx_base.__pyx_base._impl->_items; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 784, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 784, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":785 + * cdef _Pair item + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * if item._key in other: + * return False + */ + __pyx_t_3 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":786 + * for i in self._impl._items: + * item = <_Pair>i + * if item._key in other: # <<<<<<<<<<<<<< + * return False + * return True + */ + __pyx_t_4 = (__Pyx_PySequence_ContainsTF(__pyx_v_item->_key, __pyx_v_other, Py_EQ)); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 786, __pyx_L1_error) + __pyx_t_5 = (__pyx_t_4 != 0); + if (__pyx_t_5) { + + /* "multidict/_multidict.pyx":787 + * item = <_Pair>i + * if item._key in other: + * return False # <<<<<<<<<<<<<< + * return True + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_False); + __pyx_r = Py_False; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":786 + * for i in self._impl._items: + * item = <_Pair>i + * if item._key in other: # <<<<<<<<<<<<<< + * return False + * return True + */ + } + + /* "multidict/_multidict.pyx":784 + * 'Return True if two sets have a null intersection.' + * cdef _Pair item + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._key in other: + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":788 + * if item._key in other: + * return False + * return True # <<<<<<<<<<<<<< + * + * def __contains__(self, value): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_True); + __pyx_r = Py_True; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":781 + * cdef class _KeysView(_ViewBaseSet): + * + * def isdisjoint(self, other): # <<<<<<<<<<<<<< + * 'Return True if two sets have a null intersection.' + * cdef _Pair item + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict._KeysView.isdisjoint", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":790 + * return True + * + * def __contains__(self, value): # <<<<<<<<<<<<<< + * cdef _Pair item + * for i in self._impl._items: + */ + +/* Python wrapper */ +static int __pyx_pw_9multidict_10_multidict_9_KeysView_3__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_9multidict_10_multidict_9_KeysView_3__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__contains__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9_KeysView_2__contains__(((struct __pyx_obj_9multidict_10_multidict__KeysView *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9multidict_10_multidict_9_KeysView_2__contains__(struct __pyx_obj_9multidict_10_multidict__KeysView *__pyx_v_self, PyObject *__pyx_v_value) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_i = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + __Pyx_RefNannySetupContext("__contains__", 0); + + /* "multidict/_multidict.pyx":792 + * def __contains__(self, value): + * cdef _Pair item + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._key == value: + */ + if (unlikely(__pyx_v_self->__pyx_base.__pyx_base._impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 792, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_self->__pyx_base.__pyx_base._impl->_items; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 792, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 792, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":793 + * cdef _Pair item + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * if item._key == value: + * return True + */ + __pyx_t_3 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":794 + * for i in self._impl._items: + * item = <_Pair>i + * if item._key == value: # <<<<<<<<<<<<<< + * return True + * return False + */ + __pyx_t_4 = (__Pyx_PyString_Equals(__pyx_v_item->_key, __pyx_v_value, Py_EQ)); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 794, __pyx_L1_error) + if (__pyx_t_4) { + + /* "multidict/_multidict.pyx":795 + * item = <_Pair>i + * if item._key == value: + * return True # <<<<<<<<<<<<<< + * return False + * + */ + __pyx_r = 1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":794 + * for i in self._impl._items: + * item = <_Pair>i + * if item._key == value: # <<<<<<<<<<<<<< + * return True + * return False + */ + } + + /* "multidict/_multidict.pyx":792 + * def __contains__(self, value): + * cdef _Pair item + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * if item._key == value: + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":796 + * if item._key == value: + * return True + * return False # <<<<<<<<<<<<<< + * + * def __iter__(self): + */ + __pyx_r = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":790 + * return True + * + * def __contains__(self, value): # <<<<<<<<<<<<<< + * cdef _Pair item + * for i in self._impl._items: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("multidict._multidict._KeysView.__contains__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":798 + * return False + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return _KeysIter.__new__(_KeysIter, self._impl) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9_KeysView_5__iter__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_9_KeysView_5__iter__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9_KeysView_4__iter__(((struct __pyx_obj_9multidict_10_multidict__KeysView *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9_KeysView_4__iter__(struct __pyx_obj_9multidict_10_multidict__KeysView *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("__iter__", 0); + + /* "multidict/_multidict.pyx":799 + * + * def __iter__(self): + * return _KeysIter.__new__(_KeysIter, self._impl) # <<<<<<<<<<<<<< + * + * def __repr__(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 799, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)__pyx_v_self->__pyx_base.__pyx_base._impl)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self->__pyx_base.__pyx_base._impl)); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_self->__pyx_base.__pyx_base._impl)); + __pyx_t_2 = __pyx_tp_new_9multidict_10_multidict__KeysIter(((PyTypeObject *)__pyx_ptype_9multidict_10_multidict__KeysIter), __pyx_t_1, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 799, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":798 + * return False + * + * def __iter__(self): # <<<<<<<<<<<<<< + * return _KeysIter.__new__(_KeysIter, self._impl) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("multidict._multidict._KeysView.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "multidict/_multidict.pyx":801 + * return _KeysIter.__new__(_KeysIter, self._impl) + * + * def __repr__(self): # <<<<<<<<<<<<<< + * cdef _Pair item + * lst = [] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9multidict_10_multidict_9_KeysView_7__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9multidict_10_multidict_9_KeysView_7__repr__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_r = __pyx_pf_9multidict_10_multidict_9_KeysView_6__repr__(((struct __pyx_obj_9multidict_10_multidict__KeysView *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9multidict_10_multidict_9_KeysView_6__repr__(struct __pyx_obj_9multidict_10_multidict__KeysView *__pyx_v_self) { + struct __pyx_obj_9multidict_10_multidict__Pair *__pyx_v_item = 0; + PyObject *__pyx_v_lst = NULL; + PyObject *__pyx_v_i = NULL; + PyObject *__pyx_v_body = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + int __pyx_t_8; + __Pyx_RefNannySetupContext("__repr__", 0); + + /* "multidict/_multidict.pyx":803 + * def __repr__(self): + * cdef _Pair item + * lst = [] # <<<<<<<<<<<<<< + * for i in self._impl._items: + * item = <_Pair>i + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 803, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_lst = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":804 + * cdef _Pair item + * lst = [] + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * lst.append("{!r}".format(item._key)) + */ + if (unlikely(__pyx_v_self->__pyx_base.__pyx_base._impl->_items == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 804, __pyx_L1_error) + } + __pyx_t_1 = __pyx_v_self->__pyx_base.__pyx_base._impl->_items; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_3 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(0, 804, __pyx_L1_error) + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 804, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":805 + * lst = [] + * for i in self._impl._items: + * item = <_Pair>i # <<<<<<<<<<<<<< + * lst.append("{!r}".format(item._key)) + * body = ', '.join(lst) + */ + __pyx_t_3 = __pyx_v_i; + __Pyx_INCREF(__pyx_t_3); + __Pyx_XDECREF_SET(__pyx_v_item, ((struct __pyx_obj_9multidict_10_multidict__Pair *)__pyx_t_3)); + __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":806 + * for i in self._impl._items: + * item = <_Pair>i + * lst.append("{!r}".format(item._key)) # <<<<<<<<<<<<<< + * body = ', '.join(lst) + * return '{}({})'.format(self.__class__.__name__, body) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_r_2, __pyx_n_s_format); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 806, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (!__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_item->_key); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 806, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_item->_key}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 806, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_item->_key}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 806, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 806, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5); __pyx_t_5 = NULL; + __Pyx_INCREF(__pyx_v_item->_key); + __Pyx_GIVEREF(__pyx_v_item->_key); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_v_item->_key); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 806, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_7 = __Pyx_PyList_Append(__pyx_v_lst, __pyx_t_3); if (unlikely(__pyx_t_7 == -1)) __PYX_ERR(0, 806, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "multidict/_multidict.pyx":804 + * cdef _Pair item + * lst = [] + * for i in self._impl._items: # <<<<<<<<<<<<<< + * item = <_Pair>i + * lst.append("{!r}".format(item._key)) + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":807 + * item = <_Pair>i + * lst.append("{!r}".format(item._key)) + * body = ', '.join(lst) # <<<<<<<<<<<<<< + * return '{}({})'.format(self.__class__.__name__, body) + * + */ + __pyx_t_1 = __Pyx_PyString_Join(__pyx_kp_s__3, __pyx_v_lst); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 807, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_body = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":808 + * lst.append("{!r}".format(item._key)) + * body = ', '.join(lst) + * return '{}({})'.format(self.__class__.__name__, body) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s__10, __pyx_n_s_format); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 808, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_class); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 808, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_name); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 808, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_6, __pyx_v_body}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 808, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_6, __pyx_v_body}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 808, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_5 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 808, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_8, __pyx_t_6); + __Pyx_INCREF(__pyx_v_body); + __Pyx_GIVEREF(__pyx_v_body); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_8, __pyx_v_body); + __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 808, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "multidict/_multidict.pyx":801 + * return _KeysIter.__new__(_KeysIter, self._impl) + * + * def __repr__(self): # <<<<<<<<<<<<<< + * cdef _Pair item + * lst = [] + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("multidict._multidict._KeysView.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_item); + __Pyx_XDECREF(__pyx_v_lst); + __Pyx_XDECREF(__pyx_v_i); + __Pyx_XDECREF(__pyx_v_body); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_tp_new_9multidict_10_multidict__Pair(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9multidict_10_multidict__Pair *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict__Pair *)o); + p->_identity = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_key = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_value = Py_None; Py_INCREF(Py_None); + if (unlikely(__pyx_pw_9multidict_10_multidict_5_Pair_1__cinit__(o, a, k) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static void __pyx_tp_dealloc_9multidict_10_multidict__Pair(PyObject *o) { + struct __pyx_obj_9multidict_10_multidict__Pair *p = (struct __pyx_obj_9multidict_10_multidict__Pair *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_identity); + Py_CLEAR(p->_key); + Py_CLEAR(p->_value); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_9multidict_10_multidict__Pair(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_9multidict_10_multidict__Pair *p = (struct __pyx_obj_9multidict_10_multidict__Pair *)o; + if (p->_value) { + e = (*v)(p->_value, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_9multidict_10_multidict__Pair(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_9multidict_10_multidict__Pair *p = (struct __pyx_obj_9multidict_10_multidict__Pair *)o; + tmp = ((PyObject*)p->_value); + p->_value = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__Pair[] = { + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__Pair = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._Pair", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__Pair), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__Pair, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__Pair, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__Pair, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__Pair, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__Pair, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_9multidict_10_multidict__Impl __pyx_vtable_9multidict_10_multidict__Impl; + +static PyObject *__pyx_tp_new_9multidict_10_multidict__Impl(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_9multidict_10_multidict__Impl *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict__Impl *)o); + p->__pyx_vtab = __pyx_vtabptr_9multidict_10_multidict__Impl; + p->_items = ((PyObject*)Py_None); Py_INCREF(Py_None); + if (unlikely(__pyx_pw_9multidict_10_multidict_5_Impl_1__cinit__(o, __pyx_empty_tuple, NULL) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static void __pyx_tp_dealloc_9multidict_10_multidict__Impl(PyObject *o) { + struct __pyx_obj_9multidict_10_multidict__Impl *p = (struct __pyx_obj_9multidict_10_multidict__Impl *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_items); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_9multidict_10_multidict__Impl(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_9multidict_10_multidict__Impl *p = (struct __pyx_obj_9multidict_10_multidict__Impl *)o; + if (p->_items) { + e = (*v)(p->_items, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_9multidict_10_multidict__Impl(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_9multidict_10_multidict__Impl *p = (struct __pyx_obj_9multidict_10_multidict__Impl *)o; + tmp = ((PyObject*)p->_items); + p->_items = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__Impl[] = { + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__Impl = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._Impl", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__Impl), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__Impl, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__Impl, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__Impl, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__Impl, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__Impl, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_9multidict_10_multidict__Base __pyx_vtable_9multidict_10_multidict__Base; + +static PyObject *__pyx_tp_new_9multidict_10_multidict__Base(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_9multidict_10_multidict__Base *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict__Base *)o); + p->__pyx_vtab = __pyx_vtabptr_9multidict_10_multidict__Base; + p->_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_9multidict_10_multidict__Base(PyObject *o) { + struct __pyx_obj_9multidict_10_multidict__Base *p = (struct __pyx_obj_9multidict_10_multidict__Base *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_impl); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_9multidict_10_multidict__Base(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_9multidict_10_multidict__Base *p = (struct __pyx_obj_9multidict_10_multidict__Base *)o; + if (p->_impl) { + e = (*v)(((PyObject*)p->_impl), a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_9multidict_10_multidict__Base(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_9multidict_10_multidict__Base *p = (struct __pyx_obj_9multidict_10_multidict__Base *)o; + tmp = ((PyObject*)p->_impl); + p->_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} +static PyObject *__pyx_sq_item_9multidict_10_multidict__Base(PyObject *o, Py_ssize_t i) { + PyObject *r; + PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0; + r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x); + Py_DECREF(x); + return r; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__Base[] = { + {"getall", (PyCFunction)__pyx_pw_9multidict_10_multidict_5_Base_1getall, METH_VARARGS|METH_KEYWORDS, __pyx_doc_9multidict_10_multidict_5_Base_getall}, + {"getone", (PyCFunction)__pyx_pw_9multidict_10_multidict_5_Base_3getone, METH_VARARGS|METH_KEYWORDS, __pyx_doc_9multidict_10_multidict_5_Base_2getone}, + {"get", (PyCFunction)__pyx_pw_9multidict_10_multidict_5_Base_7get, METH_VARARGS|METH_KEYWORDS, __pyx_doc_9multidict_10_multidict_5_Base_6get}, + {"keys", (PyCFunction)__pyx_pw_9multidict_10_multidict_5_Base_15keys, METH_NOARGS, __pyx_doc_9multidict_10_multidict_5_Base_14keys}, + {"items", (PyCFunction)__pyx_pw_9multidict_10_multidict_5_Base_17items, METH_NOARGS, __pyx_doc_9multidict_10_multidict_5_Base_16items}, + {"values", (PyCFunction)__pyx_pw_9multidict_10_multidict_5_Base_19values, METH_NOARGS, __pyx_doc_9multidict_10_multidict_5_Base_18values}, + {0, 0, 0, 0} +}; + +static PySequenceMethods __pyx_tp_as_sequence__Base = { + __pyx_pw_9multidict_10_multidict_5_Base_13__len__, /*sq_length*/ + 0, /*sq_concat*/ + 0, /*sq_repeat*/ + __pyx_sq_item_9multidict_10_multidict__Base, /*sq_item*/ + 0, /*sq_slice*/ + 0, /*sq_ass_item*/ + 0, /*sq_ass_slice*/ + __pyx_pw_9multidict_10_multidict_5_Base_9__contains__, /*sq_contains*/ + 0, /*sq_inplace_concat*/ + 0, /*sq_inplace_repeat*/ +}; + +static PyMappingMethods __pyx_tp_as_mapping__Base = { + __pyx_pw_9multidict_10_multidict_5_Base_13__len__, /*mp_length*/ + __pyx_pw_9multidict_10_multidict_5_Base_5__getitem__, /*mp_subscript*/ + 0, /*mp_ass_subscript*/ +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__Base = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._Base", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__Base), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__Base, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_9multidict_10_multidict_5_Base_21__repr__, /*tp_repr*/ + 0, /*tp_as_number*/ + &__pyx_tp_as_sequence__Base, /*tp_as_sequence*/ + &__pyx_tp_as_mapping__Base, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__Base, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__Base, /*tp_clear*/ + __pyx_pw_9multidict_10_multidict_5_Base_23__richcmp__, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_9multidict_10_multidict_5_Base_11__iter__, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__Base, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__Base, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_9multidict_10_multidict_MultiDictProxy __pyx_vtable_9multidict_10_multidict_MultiDictProxy; + +static PyObject *__pyx_tp_new_9multidict_10_multidict_MultiDictProxy(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *p; + PyObject *o = __pyx_tp_new_9multidict_10_multidict__Base(t, a, k); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict_MultiDictProxy *)o); + p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_9multidict_10_multidict__Base*)__pyx_vtabptr_9multidict_10_multidict_MultiDictProxy; + return o; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict_MultiDictProxy[] = { + {"__reduce__", (PyCFunction)__pyx_pw_9multidict_10_multidict_14MultiDictProxy_3__reduce__, METH_NOARGS, 0}, + {"copy", (PyCFunction)__pyx_pw_9multidict_10_multidict_14MultiDictProxy_5copy, METH_NOARGS, __pyx_doc_9multidict_10_multidict_14MultiDictProxy_4copy}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict_MultiDictProxy = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict.MultiDictProxy", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict_MultiDictProxy), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__Base, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_5_Base_21__repr__, /*tp_repr*/ + #else + 0, /*tp_repr*/ + #endif + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__Base, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__Base, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_5_Base_11__iter__, /*tp_iter*/ + #else + 0, /*tp_iter*/ + #endif + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict_MultiDictProxy, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_9multidict_10_multidict_14MultiDictProxy_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict_MultiDictProxy, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_9multidict_10_multidict_CIMultiDictProxy __pyx_vtable_9multidict_10_multidict_CIMultiDictProxy; + +static PyObject *__pyx_tp_new_9multidict_10_multidict_CIMultiDictProxy(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9multidict_10_multidict_CIMultiDictProxy *p; + PyObject *o = __pyx_tp_new_9multidict_10_multidict_MultiDictProxy(t, a, k); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict_CIMultiDictProxy *)o); + p->__pyx_base.__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_9multidict_10_multidict__Base*)__pyx_vtabptr_9multidict_10_multidict_CIMultiDictProxy; + return o; +} + +static PyTypeObject __pyx_type_9multidict_10_multidict_CIMultiDictProxy = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict.CIMultiDictProxy", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict_CIMultiDictProxy), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__Base, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_5_Base_21__repr__, /*tp_repr*/ + #else + 0, /*tp_repr*/ + #endif + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__Base, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__Base, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_5_Base_11__iter__, /*tp_iter*/ + #else + 0, /*tp_iter*/ + #endif + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_14MultiDictProxy_1__init__, /*tp_init*/ + #else + 0, /*tp_init*/ + #endif + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict_CIMultiDictProxy, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_9multidict_10_multidict_MultiDict __pyx_vtable_9multidict_10_multidict_MultiDict; + +static PyObject *__pyx_tp_new_9multidict_10_multidict_MultiDict(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9multidict_10_multidict_MultiDict *p; + PyObject *o = __pyx_tp_new_9multidict_10_multidict__Base(t, a, k); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict_MultiDict *)o); + p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_9multidict_10_multidict__Base*)__pyx_vtabptr_9multidict_10_multidict_MultiDict; + return o; +} + +static int __pyx_mp_ass_subscript_9multidict_10_multidict_MultiDict(PyObject *o, PyObject *i, PyObject *v) { + if (v) { + return __pyx_pw_9multidict_10_multidict_9MultiDict_13__setitem__(o, i, v); + } + else { + return __pyx_pw_9multidict_10_multidict_9MultiDict_15__delitem__(o, i); + } +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict_MultiDict[] = { + {"__reduce__", (PyCFunction)__pyx_pw_9multidict_10_multidict_9MultiDict_3__reduce__, METH_NOARGS, 0}, + {"add", (PyCFunction)__pyx_pw_9multidict_10_multidict_9MultiDict_5add, METH_VARARGS|METH_KEYWORDS, __pyx_doc_9multidict_10_multidict_9MultiDict_4add}, + {"copy", (PyCFunction)__pyx_pw_9multidict_10_multidict_9MultiDict_7copy, METH_NOARGS, __pyx_doc_9multidict_10_multidict_9MultiDict_6copy}, + {"extend", (PyCFunction)__pyx_pw_9multidict_10_multidict_9MultiDict_9extend, METH_VARARGS|METH_KEYWORDS, __pyx_doc_9multidict_10_multidict_9MultiDict_8extend}, + {"clear", (PyCFunction)__pyx_pw_9multidict_10_multidict_9MultiDict_11clear, METH_NOARGS, __pyx_doc_9multidict_10_multidict_9MultiDict_10clear}, + {"setdefault", (PyCFunction)__pyx_pw_9multidict_10_multidict_9MultiDict_17setdefault, METH_VARARGS|METH_KEYWORDS, __pyx_doc_9multidict_10_multidict_9MultiDict_16setdefault}, + {"popone", (PyCFunction)__pyx_pw_9multidict_10_multidict_9MultiDict_19popone, METH_VARARGS|METH_KEYWORDS, __pyx_doc_9multidict_10_multidict_9MultiDict_18popone}, + {"popall", (PyCFunction)__pyx_pw_9multidict_10_multidict_9MultiDict_21popall, METH_VARARGS|METH_KEYWORDS, __pyx_doc_9multidict_10_multidict_9MultiDict_20popall}, + {"popitem", (PyCFunction)__pyx_pw_9multidict_10_multidict_9MultiDict_23popitem, METH_NOARGS, __pyx_doc_9multidict_10_multidict_9MultiDict_22popitem}, + {"update", (PyCFunction)__pyx_pw_9multidict_10_multidict_9MultiDict_25update, METH_VARARGS|METH_KEYWORDS, __pyx_doc_9multidict_10_multidict_9MultiDict_24update}, + {0, 0, 0, 0} +}; + +static PyMappingMethods __pyx_tp_as_mapping_MultiDict = { + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_5_Base_13__len__, /*mp_length*/ + #else + 0, /*mp_length*/ + #endif + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_5_Base_5__getitem__, /*mp_subscript*/ + #else + 0, /*mp_subscript*/ + #endif + __pyx_mp_ass_subscript_9multidict_10_multidict_MultiDict, /*mp_ass_subscript*/ +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict_MultiDict = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict.MultiDict", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict_MultiDict), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__Base, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_5_Base_21__repr__, /*tp_repr*/ + #else + 0, /*tp_repr*/ + #endif + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + &__pyx_tp_as_mapping_MultiDict, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + "An ordered dictionary that can have multiple values for each key.", /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__Base, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__Base, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_5_Base_11__iter__, /*tp_iter*/ + #else + 0, /*tp_iter*/ + #endif + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict_MultiDict, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_9multidict_10_multidict_9MultiDict_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict_MultiDict, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_9multidict_10_multidict_CIMultiDict __pyx_vtable_9multidict_10_multidict_CIMultiDict; + +static PyObject *__pyx_tp_new_9multidict_10_multidict_CIMultiDict(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9multidict_10_multidict_CIMultiDict *p; + PyObject *o = __pyx_tp_new_9multidict_10_multidict_MultiDict(t, a, k); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict_CIMultiDict *)o); + p->__pyx_base.__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_9multidict_10_multidict__Base*)__pyx_vtabptr_9multidict_10_multidict_CIMultiDict; + return o; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict_CIMultiDict[] = { + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict_CIMultiDict = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict.CIMultiDict", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict_CIMultiDict), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__Base, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_5_Base_21__repr__, /*tp_repr*/ + #else + 0, /*tp_repr*/ + #endif + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + "An ordered dictionary that can have multiple values for each key.", /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__Base, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__Base, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_5_Base_11__iter__, /*tp_iter*/ + #else + 0, /*tp_iter*/ + #endif + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict_CIMultiDict, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_9multidict_10_multidict_11CIMultiDict_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict_CIMultiDict, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyObject *__pyx_tp_new_9multidict_10_multidict__ViewBase(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9multidict_10_multidict__ViewBase *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict__ViewBase *)o); + p->_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)Py_None); Py_INCREF(Py_None); + if (unlikely(__pyx_pw_9multidict_10_multidict_9_ViewBase_1__cinit__(o, a, k) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static void __pyx_tp_dealloc_9multidict_10_multidict__ViewBase(PyObject *o) { + struct __pyx_obj_9multidict_10_multidict__ViewBase *p = (struct __pyx_obj_9multidict_10_multidict__ViewBase *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_impl); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_9multidict_10_multidict__ViewBase(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_9multidict_10_multidict__ViewBase *p = (struct __pyx_obj_9multidict_10_multidict__ViewBase *)o; + if (p->_impl) { + e = (*v)(((PyObject*)p->_impl), a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_9multidict_10_multidict__ViewBase(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_9multidict_10_multidict__ViewBase *p = (struct __pyx_obj_9multidict_10_multidict__ViewBase *)o; + tmp = ((PyObject*)p->_impl); + p->_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__ViewBase[] = { + {0, 0, 0, 0} +}; + +static PySequenceMethods __pyx_tp_as_sequence__ViewBase = { + __pyx_pw_9multidict_10_multidict_9_ViewBase_3__len__, /*sq_length*/ + 0, /*sq_concat*/ + 0, /*sq_repeat*/ + 0, /*sq_item*/ + 0, /*sq_slice*/ + 0, /*sq_ass_item*/ + 0, /*sq_ass_slice*/ + 0, /*sq_contains*/ + 0, /*sq_inplace_concat*/ + 0, /*sq_inplace_repeat*/ +}; + +static PyMappingMethods __pyx_tp_as_mapping__ViewBase = { + __pyx_pw_9multidict_10_multidict_9_ViewBase_3__len__, /*mp_length*/ + 0, /*mp_subscript*/ + 0, /*mp_ass_subscript*/ +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__ViewBase = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._ViewBase", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__ViewBase), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__ViewBase, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + &__pyx_tp_as_sequence__ViewBase, /*tp_as_sequence*/ + &__pyx_tp_as_mapping__ViewBase, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__ViewBase, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__ViewBase, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__ViewBase, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__ViewBase, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyObject *__pyx_tp_new_9multidict_10_multidict__ViewBaseSet(PyTypeObject *t, PyObject *a, PyObject *k) { + PyObject *o = __pyx_tp_new_9multidict_10_multidict__ViewBase(t, a, k); + if (unlikely(!o)) return 0; + return o; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__ViewBaseSet[] = { + {0, 0, 0, 0} +}; + +static PyNumberMethods __pyx_tp_as_number__ViewBaseSet = { + 0, /*nb_add*/ + __pyx_pw_9multidict_10_multidict_12_ViewBaseSet_7__sub__, /*nb_subtract*/ + 0, /*nb_multiply*/ + #if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + 0, /*nb_divide*/ + #endif + 0, /*nb_remainder*/ + 0, /*nb_divmod*/ + 0, /*nb_power*/ + 0, /*nb_negative*/ + 0, /*nb_positive*/ + 0, /*nb_absolute*/ + 0, /*nb_nonzero*/ + 0, /*nb_invert*/ + 0, /*nb_lshift*/ + 0, /*nb_rshift*/ + __pyx_pw_9multidict_10_multidict_12_ViewBaseSet_3__and__, /*nb_and*/ + __pyx_pw_9multidict_10_multidict_12_ViewBaseSet_9__xor__, /*nb_xor*/ + __pyx_pw_9multidict_10_multidict_12_ViewBaseSet_5__or__, /*nb_or*/ + #if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + 0, /*nb_coerce*/ + #endif + 0, /*nb_int*/ + #if PY_MAJOR_VERSION < 3 + 0, /*nb_long*/ + #else + 0, /*reserved*/ + #endif + 0, /*nb_float*/ + #if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + 0, /*nb_oct*/ + #endif + #if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + 0, /*nb_hex*/ + #endif + 0, /*nb_inplace_add*/ + 0, /*nb_inplace_subtract*/ + 0, /*nb_inplace_multiply*/ + #if PY_MAJOR_VERSION < 3 || CYTHON_COMPILING_IN_PYPY + 0, /*nb_inplace_divide*/ + #endif + 0, /*nb_inplace_remainder*/ + 0, /*nb_inplace_power*/ + 0, /*nb_inplace_lshift*/ + 0, /*nb_inplace_rshift*/ + 0, /*nb_inplace_and*/ + 0, /*nb_inplace_xor*/ + 0, /*nb_inplace_or*/ + 0, /*nb_floor_divide*/ + 0, /*nb_true_divide*/ + 0, /*nb_inplace_floor_divide*/ + 0, /*nb_inplace_true_divide*/ + 0, /*nb_index*/ + #if PY_VERSION_HEX >= 0x03050000 + 0, /*nb_matrix_multiply*/ + #endif + #if PY_VERSION_HEX >= 0x03050000 + 0, /*nb_inplace_matrix_multiply*/ + #endif +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__ViewBaseSet = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._ViewBaseSet", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__ViewBaseSet), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__ViewBase, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + &__pyx_tp_as_number__ViewBaseSet, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__ViewBase, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__ViewBase, /*tp_clear*/ + __pyx_pw_9multidict_10_multidict_12_ViewBaseSet_1__richcmp__, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__ViewBaseSet, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__ViewBaseSet, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyObject *__pyx_tp_new_9multidict_10_multidict__ItemsIter(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9multidict_10_multidict__ItemsIter *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict__ItemsIter *)o); + p->_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)Py_None); Py_INCREF(Py_None); + if (unlikely(__pyx_pw_9multidict_10_multidict_10_ItemsIter_1__cinit__(o, a, k) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static void __pyx_tp_dealloc_9multidict_10_multidict__ItemsIter(PyObject *o) { + struct __pyx_obj_9multidict_10_multidict__ItemsIter *p = (struct __pyx_obj_9multidict_10_multidict__ItemsIter *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_impl); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_9multidict_10_multidict__ItemsIter(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_9multidict_10_multidict__ItemsIter *p = (struct __pyx_obj_9multidict_10_multidict__ItemsIter *)o; + if (p->_impl) { + e = (*v)(((PyObject*)p->_impl), a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_9multidict_10_multidict__ItemsIter(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_9multidict_10_multidict__ItemsIter *p = (struct __pyx_obj_9multidict_10_multidict__ItemsIter *)o; + tmp = ((PyObject*)p->_impl); + p->_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__ItemsIter[] = { + {"__next__", (PyCFunction)__pyx_pw_9multidict_10_multidict_10_ItemsIter_5__next__, METH_NOARGS|METH_COEXIST, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__ItemsIter = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._ItemsIter", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__ItemsIter), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__ItemsIter, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__ItemsIter, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__ItemsIter, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_9multidict_10_multidict_10_ItemsIter_3__iter__, /*tp_iter*/ + __pyx_pw_9multidict_10_multidict_10_ItemsIter_5__next__, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__ItemsIter, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__ItemsIter, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyObject *__pyx_tp_new_9multidict_10_multidict__ItemsView(PyTypeObject *t, PyObject *a, PyObject *k) { + PyObject *o = __pyx_tp_new_9multidict_10_multidict__ViewBaseSet(t, a, k); + if (unlikely(!o)) return 0; + return o; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__ItemsView[] = { + {"isdisjoint", (PyCFunction)__pyx_pw_9multidict_10_multidict_10_ItemsView_1isdisjoint, METH_O, __pyx_doc_9multidict_10_multidict_10_ItemsView_isdisjoint}, + {0, 0, 0, 0} +}; + +static PySequenceMethods __pyx_tp_as_sequence__ItemsView = { + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_9_ViewBase_3__len__, /*sq_length*/ + #else + 0, /*sq_length*/ + #endif + 0, /*sq_concat*/ + 0, /*sq_repeat*/ + 0, /*sq_item*/ + 0, /*sq_slice*/ + 0, /*sq_ass_item*/ + 0, /*sq_ass_slice*/ + __pyx_pw_9multidict_10_multidict_10_ItemsView_3__contains__, /*sq_contains*/ + 0, /*sq_inplace_concat*/ + 0, /*sq_inplace_repeat*/ +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__ItemsView = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._ItemsView", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__ItemsView), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__ViewBase, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_9multidict_10_multidict_10_ItemsView_7__repr__, /*tp_repr*/ + 0, /*tp_as_number*/ + &__pyx_tp_as_sequence__ItemsView, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__ViewBase, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__ViewBase, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_9multidict_10_multidict_10_ItemsView_5__iter__, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__ItemsView, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__ItemsView, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyObject *__pyx_tp_new_9multidict_10_multidict__ValuesIter(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9multidict_10_multidict__ValuesIter *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict__ValuesIter *)o); + p->_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)Py_None); Py_INCREF(Py_None); + if (unlikely(__pyx_pw_9multidict_10_multidict_11_ValuesIter_1__cinit__(o, a, k) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static void __pyx_tp_dealloc_9multidict_10_multidict__ValuesIter(PyObject *o) { + struct __pyx_obj_9multidict_10_multidict__ValuesIter *p = (struct __pyx_obj_9multidict_10_multidict__ValuesIter *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_impl); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_9multidict_10_multidict__ValuesIter(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_9multidict_10_multidict__ValuesIter *p = (struct __pyx_obj_9multidict_10_multidict__ValuesIter *)o; + if (p->_impl) { + e = (*v)(((PyObject*)p->_impl), a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_9multidict_10_multidict__ValuesIter(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_9multidict_10_multidict__ValuesIter *p = (struct __pyx_obj_9multidict_10_multidict__ValuesIter *)o; + tmp = ((PyObject*)p->_impl); + p->_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__ValuesIter[] = { + {"__next__", (PyCFunction)__pyx_pw_9multidict_10_multidict_11_ValuesIter_5__next__, METH_NOARGS|METH_COEXIST, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__ValuesIter = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._ValuesIter", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__ValuesIter), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__ValuesIter, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__ValuesIter, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__ValuesIter, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_9multidict_10_multidict_11_ValuesIter_3__iter__, /*tp_iter*/ + __pyx_pw_9multidict_10_multidict_11_ValuesIter_5__next__, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__ValuesIter, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__ValuesIter, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyObject *__pyx_tp_new_9multidict_10_multidict__ValuesView(PyTypeObject *t, PyObject *a, PyObject *k) { + PyObject *o = __pyx_tp_new_9multidict_10_multidict__ViewBase(t, a, k); + if (unlikely(!o)) return 0; + return o; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__ValuesView[] = { + {0, 0, 0, 0} +}; + +static PySequenceMethods __pyx_tp_as_sequence__ValuesView = { + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_9_ViewBase_3__len__, /*sq_length*/ + #else + 0, /*sq_length*/ + #endif + 0, /*sq_concat*/ + 0, /*sq_repeat*/ + 0, /*sq_item*/ + 0, /*sq_slice*/ + 0, /*sq_ass_item*/ + 0, /*sq_ass_slice*/ + __pyx_pw_9multidict_10_multidict_11_ValuesView_1__contains__, /*sq_contains*/ + 0, /*sq_inplace_concat*/ + 0, /*sq_inplace_repeat*/ +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__ValuesView = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._ValuesView", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__ValuesView), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__ViewBase, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_9multidict_10_multidict_11_ValuesView_5__repr__, /*tp_repr*/ + 0, /*tp_as_number*/ + &__pyx_tp_as_sequence__ValuesView, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__ViewBase, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__ViewBase, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_9multidict_10_multidict_11_ValuesView_3__iter__, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__ValuesView, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__ValuesView, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyObject *__pyx_tp_new_9multidict_10_multidict__KeysIter(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9multidict_10_multidict__KeysIter *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9multidict_10_multidict__KeysIter *)o); + p->_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)Py_None); Py_INCREF(Py_None); + if (unlikely(__pyx_pw_9multidict_10_multidict_9_KeysIter_1__cinit__(o, a, k) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static void __pyx_tp_dealloc_9multidict_10_multidict__KeysIter(PyObject *o) { + struct __pyx_obj_9multidict_10_multidict__KeysIter *p = (struct __pyx_obj_9multidict_10_multidict__KeysIter *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_impl); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_9multidict_10_multidict__KeysIter(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_9multidict_10_multidict__KeysIter *p = (struct __pyx_obj_9multidict_10_multidict__KeysIter *)o; + if (p->_impl) { + e = (*v)(((PyObject*)p->_impl), a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_9multidict_10_multidict__KeysIter(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_9multidict_10_multidict__KeysIter *p = (struct __pyx_obj_9multidict_10_multidict__KeysIter *)o; + tmp = ((PyObject*)p->_impl); + p->_impl = ((struct __pyx_obj_9multidict_10_multidict__Impl *)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__KeysIter[] = { + {"__next__", (PyCFunction)__pyx_pw_9multidict_10_multidict_9_KeysIter_5__next__, METH_NOARGS|METH_COEXIST, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__KeysIter = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._KeysIter", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__KeysIter), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__KeysIter, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__KeysIter, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__KeysIter, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_9multidict_10_multidict_9_KeysIter_3__iter__, /*tp_iter*/ + __pyx_pw_9multidict_10_multidict_9_KeysIter_5__next__, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__KeysIter, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__KeysIter, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyObject *__pyx_tp_new_9multidict_10_multidict__KeysView(PyTypeObject *t, PyObject *a, PyObject *k) { + PyObject *o = __pyx_tp_new_9multidict_10_multidict__ViewBaseSet(t, a, k); + if (unlikely(!o)) return 0; + return o; +} + +static PyMethodDef __pyx_methods_9multidict_10_multidict__KeysView[] = { + {"isdisjoint", (PyCFunction)__pyx_pw_9multidict_10_multidict_9_KeysView_1isdisjoint, METH_O, __pyx_doc_9multidict_10_multidict_9_KeysView_isdisjoint}, + {0, 0, 0, 0} +}; + +static PySequenceMethods __pyx_tp_as_sequence__KeysView = { + #if CYTHON_COMPILING_IN_PYPY + __pyx_pw_9multidict_10_multidict_9_ViewBase_3__len__, /*sq_length*/ + #else + 0, /*sq_length*/ + #endif + 0, /*sq_concat*/ + 0, /*sq_repeat*/ + 0, /*sq_item*/ + 0, /*sq_slice*/ + 0, /*sq_ass_item*/ + 0, /*sq_ass_slice*/ + __pyx_pw_9multidict_10_multidict_9_KeysView_3__contains__, /*sq_contains*/ + 0, /*sq_inplace_concat*/ + 0, /*sq_inplace_repeat*/ +}; + +static PyTypeObject __pyx_type_9multidict_10_multidict__KeysView = { + PyVarObject_HEAD_INIT(0, 0) + "multidict._multidict._KeysView", /*tp_name*/ + sizeof(struct __pyx_obj_9multidict_10_multidict__KeysView), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9multidict_10_multidict__ViewBase, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_9multidict_10_multidict_9_KeysView_7__repr__, /*tp_repr*/ + 0, /*tp_as_number*/ + &__pyx_tp_as_sequence__KeysView, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9multidict_10_multidict__ViewBase, /*tp_traverse*/ + __pyx_tp_clear_9multidict_10_multidict__ViewBase, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + __pyx_pw_9multidict_10_multidict_9_KeysView_5__iter__, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9multidict_10_multidict__KeysView, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9multidict_10_multidict__KeysView, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef __pyx_moduledef = { + #if PY_VERSION_HEX < 0x03020000 + { PyObject_HEAD_INIT(NULL) NULL, 0, NULL }, + #else + PyModuleDef_HEAD_INIT, + #endif + "_multidict", + 0, /* m_doc */ + -1, /* m_size */ + __pyx_methods /* m_methods */, + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_n_s_CIMultiDict, __pyx_k_CIMultiDict, sizeof(__pyx_k_CIMultiDict), 0, 0, 1, 1}, + {&__pyx_kp_s_C_projects_multidict_multidict, __pyx_k_C_projects_multidict_multidict, sizeof(__pyx_k_C_projects_multidict_multidict), 0, 0, 1, 0}, + {&__pyx_kp_s_Dictionary_changed_during_iterat, __pyx_k_Dictionary_changed_during_iterat, sizeof(__pyx_k_Dictionary_changed_during_iterat), 0, 0, 1, 0}, + {&__pyx_n_s_ItemsView, __pyx_k_ItemsView, sizeof(__pyx_k_ItemsView), 0, 0, 1, 1}, + {&__pyx_n_s_Iterable, __pyx_k_Iterable, sizeof(__pyx_k_Iterable), 0, 0, 1, 1}, + {&__pyx_n_s_KeyError, __pyx_k_KeyError, sizeof(__pyx_k_KeyError), 0, 0, 1, 1}, + {&__pyx_kp_s_Key_not_found_r, __pyx_k_Key_not_found_r, sizeof(__pyx_k_Key_not_found_r), 0, 0, 1, 0}, + {&__pyx_n_s_KeysView, __pyx_k_KeysView, sizeof(__pyx_k_KeysView), 0, 0, 1, 1}, + {&__pyx_n_s_Mapping, __pyx_k_Mapping, sizeof(__pyx_k_Mapping), 0, 0, 1, 1}, + {&__pyx_n_s_MultiDict, __pyx_k_MultiDict, sizeof(__pyx_k_MultiDict), 0, 0, 1, 1}, + {&__pyx_kp_s_MultiDict_keys_should_be_either, __pyx_k_MultiDict_keys_should_be_either, sizeof(__pyx_k_MultiDict_keys_should_be_either), 0, 0, 1, 0}, + {&__pyx_n_s_MutableMapping, __pyx_k_MutableMapping, sizeof(__pyx_k_MutableMapping), 0, 0, 1, 1}, + {&__pyx_n_s_NotImplemented, __pyx_k_NotImplemented, sizeof(__pyx_k_NotImplemented), 0, 0, 1, 1}, + {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s_Set, __pyx_k_Set, sizeof(__pyx_k_Set), 0, 0, 1, 1}, + {&__pyx_n_s_StopIteration, __pyx_k_StopIteration, sizeof(__pyx_k_StopIteration), 0, 0, 1, 1}, + {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, + {&__pyx_n_s_ValuesView, __pyx_k_ValuesView, sizeof(__pyx_k_ValuesView), 0, 0, 1, 1}, + {&__pyx_kp_s__10, __pyx_k__10, sizeof(__pyx_k__10), 0, 0, 1, 0}, + {&__pyx_kp_s__3, __pyx_k__3, sizeof(__pyx_k__3), 0, 0, 1, 0}, + {&__pyx_kp_s__4, __pyx_k__4, sizeof(__pyx_k__4), 0, 0, 1, 0}, + {&__pyx_n_s_abc, __pyx_k_abc, sizeof(__pyx_k_abc), 0, 0, 1, 1}, + {&__pyx_n_s_arg, __pyx_k_arg, sizeof(__pyx_k_arg), 0, 0, 1, 1}, + {&__pyx_n_s_base_class, __pyx_k_base_class, sizeof(__pyx_k_base_class), 0, 0, 1, 1}, + {&__pyx_kp_s_can_t_pickle_objects, __pyx_k_can_t_pickle_objects, sizeof(__pyx_k_can_t_pickle_objects), 0, 0, 1, 0}, + {&__pyx_n_s_class, __pyx_k_class, sizeof(__pyx_k_class), 0, 0, 1, 1}, + {&__pyx_n_s_clear, __pyx_k_clear, sizeof(__pyx_k_clear), 0, 0, 1, 1}, + {&__pyx_n_s_collections, __pyx_k_collections, sizeof(__pyx_k_collections), 0, 0, 1, 1}, + {&__pyx_n_s_collections_abc, __pyx_k_collections_abc, sizeof(__pyx_k_collections_abc), 0, 0, 1, 1}, + {&__pyx_kp_s_ctor_requires_instance_not, __pyx_k_ctor_requires_instance_not, sizeof(__pyx_k_ctor_requires_instance_not), 0, 0, 1, 0}, + {&__pyx_n_s_default, __pyx_k_default, sizeof(__pyx_k_default), 0, 0, 1, 1}, + {&__pyx_kp_s_empty_multidict, __pyx_k_empty_multidict, sizeof(__pyx_k_empty_multidict), 0, 0, 1, 0}, + {&__pyx_n_s_extend, __pyx_k_extend, sizeof(__pyx_k_extend), 0, 0, 1, 1}, + {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, + {&__pyx_n_s_getversion, __pyx_k_getversion, sizeof(__pyx_k_getversion), 0, 0, 1, 1}, + {&__pyx_n_s_identity, __pyx_k_identity, sizeof(__pyx_k_identity), 0, 0, 1, 1}, + {&__pyx_n_s_impl, __pyx_k_impl, sizeof(__pyx_k_impl), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_istr, __pyx_k_istr, sizeof(__pyx_k_istr), 0, 0, 1, 1}, + {&__pyx_n_s_istr_2, __pyx_k_istr_2, sizeof(__pyx_k_istr_2), 0, 0, 1, 1}, + {&__pyx_n_s_items, __pyx_k_items, sizeof(__pyx_k_items), 0, 0, 1, 1}, + {&__pyx_n_s_join, __pyx_k_join, sizeof(__pyx_k_join), 0, 0, 1, 1}, + {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, + {&__pyx_n_s_keys, __pyx_k_keys, sizeof(__pyx_k_keys), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_md, __pyx_k_md, sizeof(__pyx_k_md), 0, 0, 1, 1}, + {&__pyx_n_s_multidict__multidict, __pyx_k_multidict__multidict, sizeof(__pyx_k_multidict__multidict), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_object, __pyx_k_object, sizeof(__pyx_k_object), 0, 0, 1, 1}, + {&__pyx_kp_s_or, __pyx_k_or, sizeof(__pyx_k_or), 0, 0, 1, 0}, + {&__pyx_n_s_pop, __pyx_k_pop, sizeof(__pyx_k_pop), 0, 0, 1, 1}, + {&__pyx_n_s_popone, __pyx_k_popone, sizeof(__pyx_k_popone), 0, 0, 1, 1}, + {&__pyx_n_s_proxy_classes, __pyx_k_proxy_classes, sizeof(__pyx_k_proxy_classes), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_kp_s_r, __pyx_k_r, sizeof(__pyx_k_r), 0, 0, 1, 0}, + {&__pyx_kp_s_r_2, __pyx_k_r_2, sizeof(__pyx_k_r_2), 0, 0, 1, 0}, + {&__pyx_kp_s_r_r, __pyx_k_r_r, sizeof(__pyx_k_r_r), 0, 0, 1, 0}, + {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, + {&__pyx_n_s_register, __pyx_k_register, sizeof(__pyx_k_register), 0, 0, 1, 1}, + {&__pyx_n_s_sys, __pyx_k_sys, sizeof(__pyx_k_sys), 0, 0, 1, 1}, + {&__pyx_kp_s_takes_at_most_1_positional_argu, __pyx_k_takes_at_most_1_positional_argu, sizeof(__pyx_k_takes_at_most_1_positional_argu), 0, 0, 1, 0}, + {&__pyx_kp_s_takes_either_dict_or_list_of_ke, __pyx_k_takes_either_dict_or_list_of_ke, sizeof(__pyx_k_takes_either_dict_or_list_of_ke), 0, 0, 1, 0}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_title, __pyx_k_title, sizeof(__pyx_k_title), 0, 0, 1, 1}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {&__pyx_n_s_upstr, __pyx_k_upstr, sizeof(__pyx_k_upstr), 0, 0, 1, 1}, + {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_object = __Pyx_GetBuiltinName(__pyx_n_s_object); if (!__pyx_builtin_object) __PYX_ERR(0, 11, __pyx_L1_error) + __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 36, __pyx_L1_error) + __pyx_builtin_NotImplemented = __Pyx_GetBuiltinName(__pyx_n_s_NotImplemented); if (!__pyx_builtin_NotImplemented) __PYX_ERR(0, 51, __pyx_L1_error) + __pyx_builtin_KeyError = __Pyx_GetBuiltinName(__pyx_n_s_KeyError); if (!__pyx_builtin_KeyError) __PYX_ERR(0, 117, __pyx_L1_error) + __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 224, __pyx_L1_error) + __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(0, 654, __pyx_L1_error) + __pyx_builtin_StopIteration = __Pyx_GetBuiltinName(__pyx_n_s_StopIteration); if (!__pyx_builtin_StopIteration) __PYX_ERR(0, 656, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "multidict/_multidict.pyx":268 + * return str(key) + * else: + * raise TypeError("MultiDict keys should be either str " # <<<<<<<<<<<<<< + * "or subclasses of str") + * + */ + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_s_MultiDict_keys_should_be_either); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 268, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + + /* "multidict/_multidict.pyx":515 + * return (item._key, item._value) + * else: + * raise KeyError("empty multidict") # <<<<<<<<<<<<<< + * + * def update(self, *args, **kwargs): + */ + __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_s_empty_multidict); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 515, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + + /* "multidict/_multidict.pyx":654 + * def __next__(self): + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") # <<<<<<<<<<<<<< + * if self._current == self._len: + * raise StopIteration + */ + __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_s_Dictionary_changed_during_iterat); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(0, 654, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); + + /* "multidict/_multidict.pyx":720 + * def __next__(self): + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") # <<<<<<<<<<<<<< + * if self._current == self._len: + * raise StopIteration + */ + __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_s_Dictionary_changed_during_iterat); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(0, 720, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__11); + __Pyx_GIVEREF(__pyx_tuple__11); + + /* "multidict/_multidict.pyx":771 + * def __next__(self): + * if self._version != self._impl._version: + * raise RuntimeError("Dictionary changed during iteration") # <<<<<<<<<<<<<< + * if self._current == self._len: + * raise StopIteration + */ + __pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_s_Dictionary_changed_during_iterat); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 771, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + + /* "multidict/_multidict.pyx":17 + * + * + * def getversion(_Base md): # <<<<<<<<<<<<<< + * return md._impl._version + * + */ + __pyx_tuple__13 = PyTuple_Pack(1, __pyx_n_s_md); if (unlikely(!__pyx_tuple__13)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__13); + __Pyx_GIVEREF(__pyx_tuple__13); + __pyx_codeobj__14 = (PyObject*)__Pyx_PyCode_New(1, 0, 1, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__13, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_C_projects_multidict_multidict, __pyx_n_s_getversion, 17, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__14)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + __pyx_umethod_PyDict_Type_items.type = (PyObject*)&PyDict_Type; + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_2 = PyInt_FromLong(2); if (unlikely(!__pyx_int_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_3 = PyInt_FromLong(3); if (unlikely(!__pyx_int_3)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_4 = PyInt_FromLong(4); if (unlikely(!__pyx_int_4)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_5 = PyInt_FromLong(5); if (unlikely(!__pyx_int_5)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +#if PY_MAJOR_VERSION < 3 +PyMODINIT_FUNC init_multidict(void); /*proto*/ +PyMODINIT_FUNC init_multidict(void) +#else +PyMODINIT_FUNC PyInit__multidict(void); /*proto*/ +PyMODINIT_FUNC PyInit__multidict(void) +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_REFNANNY + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); + if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); + } + #endif + __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit__multidict(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("_multidict", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_multidict___multidict) { + if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "multidict._multidict")) { + if (unlikely(PyDict_SetItemString(modules, "multidict._multidict", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global init code ---*/ + __pyx_v_9multidict_10_multidict__marker = Py_None; Py_INCREF(Py_None); + __pyx_v_9multidict_10_multidict__istr = Py_None; Py_INCREF(Py_None); + /*--- Variable export code ---*/ + /*--- Function export code ---*/ + /*--- Type init code ---*/ + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__Pair) < 0) __PYX_ERR(0, 54, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__Pair.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "_Pair", (PyObject *)&__pyx_type_9multidict_10_multidict__Pair) < 0) __PYX_ERR(0, 54, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__Pair = &__pyx_type_9multidict_10_multidict__Pair; + __pyx_vtabptr_9multidict_10_multidict__Impl = &__pyx_vtable_9multidict_10_multidict__Impl; + __pyx_vtable_9multidict_10_multidict__Impl.incr_version = (void (*)(struct __pyx_obj_9multidict_10_multidict__Impl *))__pyx_f_9multidict_10_multidict_5_Impl_incr_version; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__Impl) < 0) __PYX_ERR(0, 70, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__Impl.tp_print = 0; + if (__Pyx_SetVtable(__pyx_type_9multidict_10_multidict__Impl.tp_dict, __pyx_vtabptr_9multidict_10_multidict__Impl) < 0) __PYX_ERR(0, 70, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "_Impl", (PyObject *)&__pyx_type_9multidict_10_multidict__Impl) < 0) __PYX_ERR(0, 70, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__Impl = &__pyx_type_9multidict_10_multidict__Impl; + __pyx_vtabptr_9multidict_10_multidict__Base = &__pyx_vtable_9multidict_10_multidict__Base; + __pyx_vtable_9multidict_10_multidict__Base._title = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *))__pyx_f_9multidict_10_multidict_5_Base__title; + __pyx_vtable_9multidict_10_multidict__Base._getall = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *, PyObject *, PyObject *))__pyx_f_9multidict_10_multidict_5_Base__getall; + __pyx_vtable_9multidict_10_multidict__Base._getone = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *, PyObject *, PyObject *))__pyx_f_9multidict_10_multidict_5_Base__getone; + __pyx_vtable_9multidict_10_multidict__Base._contains = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *))__pyx_f_9multidict_10_multidict_5_Base__contains; + __pyx_vtable_9multidict_10_multidict__Base.keys = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict__Base *, int __pyx_skip_dispatch))__pyx_f_9multidict_10_multidict_5_Base_keys; + __pyx_vtable_9multidict_10_multidict__Base._eq_to_mapping = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *))__pyx_f_9multidict_10_multidict_5_Base__eq_to_mapping; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__Base) < 0) __PYX_ERR(0, 84, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__Base.tp_print = 0; + if (__Pyx_SetVtable(__pyx_type_9multidict_10_multidict__Base.tp_dict, __pyx_vtabptr_9multidict_10_multidict__Base) < 0) __PYX_ERR(0, 84, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "_Base", (PyObject *)&__pyx_type_9multidict_10_multidict__Base) < 0) __PYX_ERR(0, 84, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__Base = &__pyx_type_9multidict_10_multidict__Base; + __pyx_vtabptr_9multidict_10_multidict_MultiDictProxy = &__pyx_vtable_9multidict_10_multidict_MultiDictProxy; + __pyx_vtable_9multidict_10_multidict_MultiDictProxy.__pyx_base = *__pyx_vtabptr_9multidict_10_multidict__Base; + __pyx_type_9multidict_10_multidict_MultiDictProxy.tp_base = __pyx_ptype_9multidict_10_multidict__Base; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict_MultiDictProxy) < 0) __PYX_ERR(0, 217, __pyx_L1_error) + __pyx_type_9multidict_10_multidict_MultiDictProxy.tp_print = 0; + if (__Pyx_SetVtable(__pyx_type_9multidict_10_multidict_MultiDictProxy.tp_dict, __pyx_vtabptr_9multidict_10_multidict_MultiDictProxy) < 0) __PYX_ERR(0, 217, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "MultiDictProxy", (PyObject *)&__pyx_type_9multidict_10_multidict_MultiDictProxy) < 0) __PYX_ERR(0, 217, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict_MultiDictProxy = &__pyx_type_9multidict_10_multidict_MultiDictProxy; + __pyx_vtabptr_9multidict_10_multidict_CIMultiDictProxy = &__pyx_vtable_9multidict_10_multidict_CIMultiDictProxy; + __pyx_vtable_9multidict_10_multidict_CIMultiDictProxy.__pyx_base = *__pyx_vtabptr_9multidict_10_multidict_MultiDictProxy; + __pyx_vtable_9multidict_10_multidict_CIMultiDictProxy.__pyx_base.__pyx_base._title = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *))__pyx_f_9multidict_10_multidict_16CIMultiDictProxy__title; + __pyx_type_9multidict_10_multidict_CIMultiDictProxy.tp_base = __pyx_ptype_9multidict_10_multidict_MultiDictProxy; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict_CIMultiDictProxy) < 0) __PYX_ERR(0, 243, __pyx_L1_error) + __pyx_type_9multidict_10_multidict_CIMultiDictProxy.tp_print = 0; + if (__Pyx_SetVtable(__pyx_type_9multidict_10_multidict_CIMultiDictProxy.tp_dict, __pyx_vtabptr_9multidict_10_multidict_CIMultiDictProxy) < 0) __PYX_ERR(0, 243, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "CIMultiDictProxy", (PyObject *)&__pyx_type_9multidict_10_multidict_CIMultiDictProxy) < 0) __PYX_ERR(0, 243, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict_CIMultiDictProxy = &__pyx_type_9multidict_10_multidict_CIMultiDictProxy; + __pyx_vtabptr_9multidict_10_multidict_MultiDict = &__pyx_vtable_9multidict_10_multidict_MultiDict; + __pyx_vtable_9multidict_10_multidict_MultiDict.__pyx_base = *__pyx_vtabptr_9multidict_10_multidict__Base; + __pyx_vtable_9multidict_10_multidict_MultiDict._extend = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict_MultiDict *, PyObject *, PyObject *, PyObject *, int))__pyx_f_9multidict_10_multidict_9MultiDict__extend; + __pyx_vtable_9multidict_10_multidict_MultiDict._add = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict_MultiDict *, PyObject *, PyObject *))__pyx_f_9multidict_10_multidict_9MultiDict__add; + __pyx_vtable_9multidict_10_multidict_MultiDict._replace = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict_MultiDict *, PyObject *, PyObject *))__pyx_f_9multidict_10_multidict_9MultiDict__replace; + __pyx_vtable_9multidict_10_multidict_MultiDict._remove = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict_MultiDict *, PyObject *))__pyx_f_9multidict_10_multidict_9MultiDict__remove; + __pyx_type_9multidict_10_multidict_MultiDict.tp_base = __pyx_ptype_9multidict_10_multidict__Base; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict_MultiDict) < 0) __PYX_ERR(0, 272, __pyx_L1_error) + __pyx_type_9multidict_10_multidict_MultiDict.tp_print = 0; + if (__Pyx_SetVtable(__pyx_type_9multidict_10_multidict_MultiDict.tp_dict, __pyx_vtabptr_9multidict_10_multidict_MultiDict) < 0) __PYX_ERR(0, 272, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "MultiDict", (PyObject *)&__pyx_type_9multidict_10_multidict_MultiDict) < 0) __PYX_ERR(0, 272, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict_MultiDict = &__pyx_type_9multidict_10_multidict_MultiDict; + __pyx_vtabptr_9multidict_10_multidict_CIMultiDict = &__pyx_vtable_9multidict_10_multidict_CIMultiDict; + __pyx_vtable_9multidict_10_multidict_CIMultiDict.__pyx_base = *__pyx_vtabptr_9multidict_10_multidict_MultiDict; + __pyx_vtable_9multidict_10_multidict_CIMultiDict.__pyx_base.__pyx_base._title = (PyObject *(*)(struct __pyx_obj_9multidict_10_multidict__Base *, PyObject *))__pyx_f_9multidict_10_multidict_11CIMultiDict__title; + __pyx_type_9multidict_10_multidict_CIMultiDict.tp_base = __pyx_ptype_9multidict_10_multidict_MultiDict; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict_CIMultiDict) < 0) __PYX_ERR(0, 525, __pyx_L1_error) + __pyx_type_9multidict_10_multidict_CIMultiDict.tp_print = 0; + if (__Pyx_SetVtable(__pyx_type_9multidict_10_multidict_CIMultiDict.tp_dict, __pyx_vtabptr_9multidict_10_multidict_CIMultiDict) < 0) __PYX_ERR(0, 525, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "CIMultiDict", (PyObject *)&__pyx_type_9multidict_10_multidict_CIMultiDict) < 0) __PYX_ERR(0, 525, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict_CIMultiDict = &__pyx_type_9multidict_10_multidict_CIMultiDict; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__ViewBase) < 0) __PYX_ERR(0, 545, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__ViewBase.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "_ViewBase", (PyObject *)&__pyx_type_9multidict_10_multidict__ViewBase) < 0) __PYX_ERR(0, 545, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__ViewBase = &__pyx_type_9multidict_10_multidict__ViewBase; + __pyx_type_9multidict_10_multidict__ViewBaseSet.tp_base = __pyx_ptype_9multidict_10_multidict__ViewBase; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__ViewBaseSet) < 0) __PYX_ERR(0, 556, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__ViewBaseSet.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "_ViewBaseSet", (PyObject *)&__pyx_type_9multidict_10_multidict__ViewBaseSet) < 0) __PYX_ERR(0, 556, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__ViewBaseSet = &__pyx_type_9multidict_10_multidict__ViewBaseSet; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__ItemsIter) < 0) __PYX_ERR(0, 637, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__ItemsIter.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "_ItemsIter", (PyObject *)&__pyx_type_9multidict_10_multidict__ItemsIter) < 0) __PYX_ERR(0, 637, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__ItemsIter = &__pyx_type_9multidict_10_multidict__ItemsIter; + __pyx_type_9multidict_10_multidict__ItemsView.tp_base = __pyx_ptype_9multidict_10_multidict__ViewBaseSet; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__ItemsView) < 0) __PYX_ERR(0, 662, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__ItemsView.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "_ItemsView", (PyObject *)&__pyx_type_9multidict_10_multidict__ItemsView) < 0) __PYX_ERR(0, 662, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__ItemsView = &__pyx_type_9multidict_10_multidict__ItemsView; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__ValuesIter) < 0) __PYX_ERR(0, 703, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__ValuesIter.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "_ValuesIter", (PyObject *)&__pyx_type_9multidict_10_multidict__ValuesIter) < 0) __PYX_ERR(0, 703, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__ValuesIter = &__pyx_type_9multidict_10_multidict__ValuesIter; + __pyx_type_9multidict_10_multidict__ValuesView.tp_base = __pyx_ptype_9multidict_10_multidict__ViewBase; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__ValuesView) < 0) __PYX_ERR(0, 728, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__ValuesView.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "_ValuesView", (PyObject *)&__pyx_type_9multidict_10_multidict__ValuesView) < 0) __PYX_ERR(0, 728, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__ValuesView = &__pyx_type_9multidict_10_multidict__ValuesView; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__KeysIter) < 0) __PYX_ERR(0, 754, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__KeysIter.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "_KeysIter", (PyObject *)&__pyx_type_9multidict_10_multidict__KeysIter) < 0) __PYX_ERR(0, 754, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__KeysIter = &__pyx_type_9multidict_10_multidict__KeysIter; + __pyx_type_9multidict_10_multidict__KeysView.tp_base = __pyx_ptype_9multidict_10_multidict__ViewBaseSet; + if (PyType_Ready(&__pyx_type_9multidict_10_multidict__KeysView) < 0) __PYX_ERR(0, 779, __pyx_L1_error) + __pyx_type_9multidict_10_multidict__KeysView.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "_KeysView", (PyObject *)&__pyx_type_9multidict_10_multidict__KeysView) < 0) __PYX_ERR(0, 779, __pyx_L1_error) + __pyx_ptype_9multidict_10_multidict__KeysView = &__pyx_type_9multidict_10_multidict__KeysView; + /*--- Type import code ---*/ + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", + #if CYTHON_COMPILING_IN_PYPY + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) __PYX_ERR(1, 9, __pyx_L1_error) + /*--- Variable import code ---*/ + /*--- Function import code ---*/ + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "multidict/_multidict.pyx":3 + * from __future__ import absolute_import + * + * import sys # <<<<<<<<<<<<<< + * from collections import abc + * from collections.abc import Iterable, Set + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_sys, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_sys, __pyx_t_1) < 0) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":4 + * + * import sys + * from collections import abc # <<<<<<<<<<<<<< + * from collections.abc import Iterable, Set + * + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_abc); + __Pyx_GIVEREF(__pyx_n_s_abc); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_abc); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_collections, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_abc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_abc, __pyx_t_1) < 0) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":5 + * import sys + * from collections import abc + * from collections.abc import Iterable, Set # <<<<<<<<<<<<<< + * + * from cpython.object cimport PyObject_Str + */ + __pyx_t_2 = PyList_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_Iterable); + __Pyx_GIVEREF(__pyx_n_s_Iterable); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_Iterable); + __Pyx_INCREF(__pyx_n_s_Set); + __Pyx_GIVEREF(__pyx_n_s_Set); + PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_Set); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_collections_abc, __pyx_t_2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_Iterable); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Iterable, __pyx_t_2) < 0) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_Set); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_Set, __pyx_t_2) < 0) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "multidict/_multidict.pyx":9 + * from cpython.object cimport PyObject_Str + * + * from ._istr import istr # <<<<<<<<<<<<<< + * + * cdef object _marker = object() + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_istr_2); + __Pyx_GIVEREF(__pyx_n_s_istr_2); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_istr_2); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_istr, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_istr_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_istr_2, __pyx_t_1) < 0) __PYX_ERR(0, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":11 + * from ._istr import istr + * + * cdef object _marker = object() # <<<<<<<<<<<<<< + * + * upstr = istr # for relaxing backward compatibility problems + */ + __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_object); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_v_9multidict_10_multidict__marker); + __Pyx_DECREF_SET(__pyx_v_9multidict_10_multidict__marker, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":13 + * cdef object _marker = object() + * + * upstr = istr # for relaxing backward compatibility problems # <<<<<<<<<<<<<< + * cdef object _istr = istr + * + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_istr_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_upstr, __pyx_t_2) < 0) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":14 + * + * upstr = istr # for relaxing backward compatibility problems + * cdef object _istr = istr # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_istr_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_v_9multidict_10_multidict__istr); + __Pyx_DECREF_SET(__pyx_v_9multidict_10_multidict__istr, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":17 + * + * + * def getversion(_Base md): # <<<<<<<<<<<<<< + * return md._impl._version + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_9multidict_10_multidict_1getversion, NULL, __pyx_n_s_multidict__multidict); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_getversion, __pyx_t_2) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":97 + * return str(s) + * + * def getall(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Return a list of all values matching the key.""" + * return self._getall(self._title(key), key, default) + */ + __Pyx_INCREF(__pyx_v_9multidict_10_multidict__marker); + __pyx_k_ = __pyx_v_9multidict_10_multidict__marker; + __Pyx_GIVEREF(__pyx_v_9multidict_10_multidict__marker); + + /* "multidict/_multidict.pyx":119 + * raise KeyError('Key not found: %r' % key) + * + * def getone(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Get first value matching the key.""" + * return self._getone(self._title(key), key, default) + */ + __Pyx_INCREF(__pyx_v_9multidict_10_multidict__marker); + __pyx_k__2 = __pyx_v_9multidict_10_multidict__marker; + __Pyx_GIVEREF(__pyx_v_9multidict_10_multidict__marker); + + /* "multidict/_multidict.pyx":218 + * + * cdef class MultiDictProxy(_Base): + * _proxy_classes = (MultiDict, MultiDictProxy) # <<<<<<<<<<<<<< + * _base_class = MultiDict + * + */ + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 218, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict)); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict)); + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy)); + PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy)); + if (PyDict_SetItem((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy->tp_dict, __pyx_n_s_proxy_classes, __pyx_t_2) < 0) __PYX_ERR(0, 218, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_9multidict_10_multidict_MultiDictProxy); + + /* "multidict/_multidict.pyx":219 + * cdef class MultiDictProxy(_Base): + * _proxy_classes = (MultiDict, MultiDictProxy) + * _base_class = MultiDict # <<<<<<<<<<<<<< + * + * def __init__(self, arg): + */ + if (PyDict_SetItem((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy->tp_dict, __pyx_n_s_base_class, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict)) < 0) __PYX_ERR(0, 219, __pyx_L1_error) + PyType_Modified(__pyx_ptype_9multidict_10_multidict_MultiDictProxy); + + /* "multidict/_multidict.pyx":240 + * return self._base_class(self) + * + * abc.Mapping.register(MultiDictProxy) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_abc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_register); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_3) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_1, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy)}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy)}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy)); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDictProxy)); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":244 + * + * cdef class CIMultiDictProxy(MultiDictProxy): + * _proxy_classes = (CIMultiDict, CIMultiDictProxy) # <<<<<<<<<<<<<< + * _base_class = CIMultiDict + * + */ + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 244, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDict)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDict)); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDict)); + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy)); + PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy)); + if (PyDict_SetItem((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy->tp_dict, __pyx_n_s_proxy_classes, __pyx_t_2) < 0) __PYX_ERR(0, 244, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy); + + /* "multidict/_multidict.pyx":245 + * cdef class CIMultiDictProxy(MultiDictProxy): + * _proxy_classes = (CIMultiDict, CIMultiDictProxy) + * _base_class = CIMultiDict # <<<<<<<<<<<<<< + * + * cdef str _title(self, s): + */ + if (PyDict_SetItem((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy->tp_dict, __pyx_n_s_base_class, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDict)) < 0) __PYX_ERR(0, 245, __pyx_L1_error) + PyType_Modified(__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy); + + /* "multidict/_multidict.pyx":256 + * + * + * abc.Mapping.register(CIMultiDictProxy) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_abc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 256, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_Mapping); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 256, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_register); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 256, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_4) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_1, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 256, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy)}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 256, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy)}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 256, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_3 = PyTuple_New(1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 256, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL; + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy)); + PyTuple_SET_ITEM(__pyx_t_3, 0+1, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDictProxy)); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 256, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":445 + * return default + * + * def popone(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Remove the last occurrence of key and return the corresponding + * value. + */ + __Pyx_INCREF(__pyx_v_9multidict_10_multidict__marker); + __pyx_k__6 = __pyx_v_9multidict_10_multidict__marker; + __Pyx_GIVEREF(__pyx_v_9multidict_10_multidict__marker); + + /* "multidict/_multidict.pyx":472 + * return default + * + * pop = popone # <<<<<<<<<<<<<< + * + * def popall(self, key, default=_marker): + */ + __pyx_t_2 = __Pyx_GetNameInClass((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict, __pyx_n_s_popone); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 472, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict->tp_dict, __pyx_n_s_pop, __pyx_t_2) < 0) __PYX_ERR(0, 472, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + PyType_Modified(__pyx_ptype_9multidict_10_multidict_MultiDict); + + /* "multidict/_multidict.pyx":474 + * pop = popone + * + * def popall(self, key, default=_marker): # <<<<<<<<<<<<<< + * """Remove all occurrences of key and return the list of corresponding + * values. + */ + __Pyx_INCREF(__pyx_v_9multidict_10_multidict__marker); + __pyx_k__7 = __pyx_v_9multidict_10_multidict__marker; + __Pyx_GIVEREF(__pyx_v_9multidict_10_multidict__marker); + + /* "multidict/_multidict.pyx":522 + * + * + * abc.MutableMapping.register(MultiDict) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_abc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 522, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_MutableMapping); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 522, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_register); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 522, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_3) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_1, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 522, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict)}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 522, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict)}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 522, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 522, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict)); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, ((PyObject *)__pyx_ptype_9multidict_10_multidict_MultiDict)); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 522, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":542 + * + * + * abc.MutableMapping.register(CIMultiDict) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_abc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 542, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_MutableMapping); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 542, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_register); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 542, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_4) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_1, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDict)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 542, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDict)}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 542, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDict)}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 542, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_3 = PyTuple_New(1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 542, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL; + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDict)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDict)); + PyTuple_SET_ITEM(__pyx_t_3, 0+1, ((PyObject *)__pyx_ptype_9multidict_10_multidict_CIMultiDict)); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 542, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":700 + * + * + * abc.ItemsView.register(_ItemsView) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_abc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ItemsView); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_register); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_3) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_1, ((PyObject *)__pyx_ptype_9multidict_10_multidict__ItemsView)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_ptype_9multidict_10_multidict__ItemsView)}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_ptype_9multidict_10_multidict__ItemsView)}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict__ItemsView)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict__ItemsView)); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, ((PyObject *)__pyx_ptype_9multidict_10_multidict__ItemsView)); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 700, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":751 + * + * + * abc.ValuesView.register(_ValuesView) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_abc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 751, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_ValuesView); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 751, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_register); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 751, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_4) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_1, ((PyObject *)__pyx_ptype_9multidict_10_multidict__ValuesView)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 751, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, ((PyObject *)__pyx_ptype_9multidict_10_multidict__ValuesView)}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 751, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, ((PyObject *)__pyx_ptype_9multidict_10_multidict__ValuesView)}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 751, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_3 = PyTuple_New(1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 751, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __pyx_t_4 = NULL; + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict__ValuesView)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict__ValuesView)); + PyTuple_SET_ITEM(__pyx_t_3, 0+1, ((PyObject *)__pyx_ptype_9multidict_10_multidict__ValuesView)); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 751, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":811 + * + * + * abc.KeysView.register(_KeysView) # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_abc); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 811, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_KeysView); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 811, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_register); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 811, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_3) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_1, ((PyObject *)__pyx_ptype_9multidict_10_multidict__KeysView)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 811, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_ptype_9multidict_10_multidict__KeysView)}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 811, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, ((PyObject *)__pyx_ptype_9multidict_10_multidict__KeysView)}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 811, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 811, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; + __Pyx_INCREF(((PyObject *)__pyx_ptype_9multidict_10_multidict__KeysView)); + __Pyx_GIVEREF(((PyObject *)__pyx_ptype_9multidict_10_multidict__KeysView)); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, ((PyObject *)__pyx_ptype_9multidict_10_multidict__KeysView)); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 811, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "multidict/_multidict.pyx":1 + * from __future__ import absolute_import # <<<<<<<<<<<<<< + * + * import sys + */ + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init multidict._multidict", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init multidict._multidict"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if PY_MAJOR_VERSION < 3 + return; + #else + return __pyx_m; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* ArgTypeTest */ +static void __Pyx_RaiseArgumentTypeInvalid(const char* name, PyObject *obj, PyTypeObject *type) { + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); +} +static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact) +{ + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (none_allowed && obj == Py_None) return 1; + else if (exact) { + if (likely(Py_TYPE(obj) == type)) return 1; + #if PY_MAJOR_VERSION == 2 + else if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(PyObject_TypeCheck(obj, type))) return 1; + } + __Pyx_RaiseArgumentTypeInvalid(name, obj, type); + return 0; +} + +/* GetItemInt */ +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (!j) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (wraparound & unlikely(i < 0)) i += PyList_GET_SIZE(o); + if ((!boundscheck) || likely((0 <= i) & (i < PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (wraparound & unlikely(i < 0)) i += PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely((0 <= i) & (i < PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely((n >= 0) & (n < PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely((n >= 0) & (n < PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return m->sq_item(o, i); + } + } +#else + if (is_list || PySequence_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* BytesEquals */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* GetModuleGlobalName */ +static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS + result = PyDict_GetItem(__pyx_d, name); + if (likely(result)) { + Py_INCREF(result); + } else { +#else + result = PyObject_GetItem(__pyx_d, name); + if (!result) { + PyErr_Clear(); +#endif + result = __Pyx_GetBuiltinName(name); + } + return result; +} + +/* RaiseArgTupleInvalid */ + static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* RaiseDoubleKeywords */ + static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ + static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* KeywordStringCheck */ + static CYTHON_INLINE int __Pyx_CheckKeywordStrings( + PyObject *kwdict, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + while (PyDict_Next(kwdict, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if ((!kw_allowed) && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* PyObjectCall */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyErrFetchRestore */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* RaiseException */ + #if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } +#if PY_VERSION_HEX >= 0x03030000 + if (cause) { +#else + if (cause && cause != Py_None) { +#endif + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = PyThreadState_GET(); + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* PyCFunctionFastCall */ + #if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + return (*((__Pyx_PyCFunctionFast)meth)) (self, args, nargs, NULL); +} +#endif // CYTHON_FAST_PYCCALL + +/* PyFunctionFastCall */ + #if CYTHON_FAST_PYCALL +#include "frameobject.h" +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = PyThreadState_GET(); + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = f->f_localsplus; + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif // CPython < 3.6 +#endif // CYTHON_FAST_PYCALL + +/* PyObjectCallMethO */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ + #if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || PyObject_TypeCheck(func, __pyx_CyFunctionType))) { +#else + if (likely(PyCFunction_Check(func))) { +#endif + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* PyObjectCallNoArg */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, NULL, 0); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || PyObject_TypeCheck(func, __pyx_CyFunctionType))) { +#else + if (likely(PyCFunction_Check(func))) { +#endif + if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { + return __Pyx_PyObject_CallMethO(func, NULL); + } + } + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); +} +#endif + +/* StringJoin */ + #if !CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyBytes_Join(PyObject* sep, PyObject* values) { + return PyObject_CallMethodObjArgs(sep, __pyx_n_s_join, values, NULL); +} +#endif + +/* RaiseTooManyValuesToUnpack */ + static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ + static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* IterFinish */ + static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = PyThreadState_GET(); + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +/* UnpackItemEndCheck */ + static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +/* PyIntBinop */ + #if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED int inplace) { + if (op1 == op2) { + Py_RETURN_TRUE; + } + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(op1))) { + const long b = intval; + long a = PyInt_AS_LONG(op1); + if (a == b) { + Py_RETURN_TRUE; + } else { + Py_RETURN_FALSE; + } + } + #endif + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact(op1))) { + const long b = intval; + long a; + const digit* digits = ((PyLongObject*)op1)->ob_digit; + const Py_ssize_t size = Py_SIZE(op1); + if (likely(__Pyx_sst_abs(size) <= 1)) { + a = likely(size) ? digits[0] : 0; + if (size == -1) a = -a; + } else { + switch (size) { + case -2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + case 2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = (long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + case -3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + case 3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = (long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + case -4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + case 4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = (long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + #if PyLong_SHIFT < 30 && PyLong_SHIFT != 15 + default: return PyLong_Type.tp_richcompare(op1, op2, Py_EQ); + #else + default: Py_RETURN_FALSE; + #endif + } + } + if (a == b) { + Py_RETURN_TRUE; + } else { + Py_RETURN_FALSE; + } + } + #endif + if (PyFloat_CheckExact(op1)) { + const long b = intval; + double a = PyFloat_AS_DOUBLE(op1); + if ((double)a == (double)b) { + Py_RETURN_TRUE; + } else { + Py_RETURN_FALSE; + } + } + return PyObject_RichCompare(op1, op2, Py_EQ); +} +#endif + +/* ExtTypeTest */ + static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(PyObject_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* UnpackUnboundCMethod */ + static int __Pyx_TryUnpackUnboundCMethod(__Pyx_CachedCFunction* target) { + PyObject *method; + method = __Pyx_PyObject_GetAttrStr(target->type, *target->method_name); + if (unlikely(!method)) + return -1; + target->method = method; +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION >= 3 + if (likely(PyObject_TypeCheck(method, &PyMethodDescr_Type))) + #endif + { + PyMethodDescrObject *descr = (PyMethodDescrObject*) method; + target->func = descr->d_method->ml_meth; + target->flag = descr->d_method->ml_flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST); + } +#endif + return 0; +} + +/* CallUnboundCMethod0 */ + static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self) { + PyObject *args, *result = NULL; + if (unlikely(!cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL; +#if CYTHON_ASSUME_SAFE_MACROS + args = PyTuple_New(1); + if (unlikely(!args)) goto bad; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); +#else + args = PyTuple_Pack(1, self); + if (unlikely(!args)) goto bad; +#endif + result = __Pyx_PyObject_Call(cfunc->method, args, NULL); + Py_DECREF(args); +bad: + return result; +} + +/* py_dict_items */ + static CYTHON_INLINE PyObject* __Pyx_PyDict_Items(PyObject* d) { + if (PY_MAJOR_VERSION >= 3) + return __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyDict_Type_items, d); + else + return PyDict_Items(d); +} + +/* DelItemInt */ + static CYTHON_INLINE int __Pyx_DelItem_Generic(PyObject *o, PyObject *j) { + int r; + if (!j) return -1; + r = PyObject_DelItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE int __Pyx_DelItemInt_Fast(PyObject *o, Py_ssize_t i, + CYTHON_UNUSED int is_list, CYTHON_NCP_UNUSED int wraparound) { +#if !CYTHON_USE_TYPE_SLOTS + if (is_list || PySequence_Check(o)) { + return PySequence_DelItem(o, i); + } +#else + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_ass_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return -1; + PyErr_Clear(); + } + } + return m->sq_ass_item(o, i, (PyObject *)NULL); + } +#endif + return __Pyx_DelItem_Generic(o, PyInt_FromSsize_t(i)); +} + +/* PyObjectCallMethod1 */ + static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { + PyObject *method, *result = NULL; + method = __Pyx_PyObject_GetAttrStr(obj, method_name); + if (unlikely(!method)) goto done; +#if CYTHON_UNPACK_METHODS + if (likely(PyMethod_Check(method))) { + PyObject *self = PyMethod_GET_SELF(method); + if (likely(self)) { + PyObject *args; + PyObject *function = PyMethod_GET_FUNCTION(method); + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {self, arg}; + result = __Pyx_PyFunction_FastCall(function, args, 2); + goto done; + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {self, arg}; + result = __Pyx_PyCFunction_FastCall(function, args, 2); + goto done; + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 1, arg); + Py_INCREF(function); + Py_DECREF(method); method = NULL; + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); + return result; + } + } +#endif + result = __Pyx_PyObject_CallOneArg(method, arg); +done: + Py_XDECREF(method); + return result; +} + +/* pop_index */ + static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix) { + PyObject *r; + if (unlikely(!py_ix)) return NULL; + r = __Pyx__PyObject_PopIndex(L, py_ix); + Py_DECREF(py_ix); + return r; +} +static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix) { + return __Pyx_PyObject_CallMethod1(L, __pyx_n_s_pop, py_ix); +} +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix) { + Py_ssize_t size = PyList_GET_SIZE(L); + if (likely(size > (((PyListObject*)L)->allocated >> 1))) { + Py_ssize_t cix = ix; + if (cix < 0) { + cix += size; + } + if (likely(0 <= cix && cix < size)) { + PyObject* v = PyList_GET_ITEM(L, cix); + Py_SIZE(L) -= 1; + size -= 1; + memmove(&PyList_GET_ITEM(L, cix), &PyList_GET_ITEM(L, cix+1), (size_t)(size-cix)*sizeof(PyObject*)); + return v; + } + } + if (py_ix == Py_None) { + return __Pyx__PyObject_PopNewIndex(L, PyInt_FromSsize_t(ix)); + } else { + return __Pyx__PyObject_PopIndex(L, py_ix); + } +} +#endif + +/* SetVTable */ + static int __Pyx_SetVtable(PyObject *dict, void *vtable) { +#if PY_VERSION_HEX >= 0x02070000 + PyObject *ob = PyCapsule_New(vtable, 0, 0); +#else + PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); +#endif + if (!ob) + goto bad; + if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* Import */ + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(1); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + #endif + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_VERSION_HEX < 0x03030000 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ + static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* GetNameInClass */ + static PyObject *__Pyx_GetNameInClass(PyObject *nmspace, PyObject *name) { + PyObject *result; + result = __Pyx_PyObject_GetAttrStr(nmspace, name); + if (!result) + result = __Pyx_GetModuleGlobalName(name); + return result; +} + +/* CodeObjectCache */ + static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ + #include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + py_code = __pyx_find_code_object(c_line ? c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? c_line : py_line, py_code); + } + py_frame = PyFrame_New( + PyThreadState_GET(), /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_PY_LONG_LONG(unsigned PY_LONG_LONG value) { + const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG) -1, const_zero = (unsigned PY_LONG_LONG) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(unsigned PY_LONG_LONG) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(unsigned PY_LONG_LONG) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(unsigned PY_LONG_LONG) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(unsigned PY_LONG_LONG), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ + #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPy */ + static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CIntFromPy */ + static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CheckBinaryVersion */ + static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + return PyErr_WarnEx(NULL, message, 1); + } + return 0; +} + +/* ModuleImport */ + #ifndef __PYX_HAVE_RT_ImportModule +#define __PYX_HAVE_RT_ImportModule +static PyObject *__Pyx_ImportModule(const char *name) { + PyObject *py_name = 0; + PyObject *py_module = 0; + py_name = __Pyx_PyIdentifier_FromString(name); + if (!py_name) + goto bad; + py_module = PyImport_Import(py_name); + Py_DECREF(py_name); + return py_module; +bad: + Py_XDECREF(py_name); + return 0; +} +#endif + +/* TypeImport */ + #ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, + size_t size, int strict) +{ + PyObject *py_module = 0; + PyObject *result = 0; + PyObject *py_name = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + py_module = __Pyx_ImportModule(module_name); + if (!py_module) + goto bad; + py_name = __Pyx_PyIdentifier_FromString(class_name); + if (!py_name) + goto bad; + result = PyObject_GetAttr(py_module, py_name); + Py_DECREF(py_name); + py_name = 0; + Py_DECREF(py_module); + py_module = 0; + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (!strict && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + else if ((size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s has the wrong size, try recompiling. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(py_module); + Py_XDECREF(result); + return NULL; +} +#endif + +/* InitStrings */ + static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if CYTHON_COMPILING_IN_CPYTHON && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { +#if PY_VERSION_HEX < 0x03030000 + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +#else + if (__Pyx_PyUnicode_READY(o) == -1) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (PyUnicode_IS_ASCII(o)) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +#endif + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (PyInt_Check(x) || PyLong_Check(x)) +#else + if (PyLong_Check(x)) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = PyNumber_Long(x); + } + #else + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Long(x); + } + #endif +#else + res = PyNumber_Int(x); +#endif + if (res) { +#if PY_MAJOR_VERSION < 3 + if (!PyInt_Check(res) && !PyLong_Check(res)) { +#else + if (!PyLong_Check(res)) { +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + name, name, Py_TYPE(res)->tp_name); + Py_DECREF(res); + return NULL; + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(x); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/RBXLegacyDiscordBot/lib/multidict/_multidict.cp36-win32.pyd b/RBXLegacyDiscordBot/lib/multidict/_multidict.cp36-win32.pyd new file mode 100644 index 0000000000000000000000000000000000000000..e0e3532bba078aeec7cfb0fbadd4818acb11575a GIT binary patch literal 117248 zcmeFa3w%`7wLd;dCdmL3X3(TY4H_^g3R-lC1p^o!0aP9dj|3_&6>yAbQD!tMAa*j9 z@fb^ewAN}%q*iXP?WMO?L$o#!6sQO&qG)VOf6{uU12&>URNC|Ze)m4FnKKih*Y@}8 z=YQdoGv~3_+H0@9*4k^Y{hIvsHLe_&%jL$Orny`#_~pM!aqa%%&vLnXpWWKq^=yyV z`n6u@dg)EySh(n- zf`Xo9I_jH-z1MvH?EgM&{FV0WcX$wtlloycog_s^6lxT;@6MA1$f6h-e{bqX}g71~KXVN(sq-6ScQD{*^ z2p{K+lO&3=>U0}0r6}!1w=TRUbdAe3$z-mUvG}kG-$&v1 zT1UFJi>_aM!@^}QS0Ukd<2z|w>qu95(XGpFy$+uQPxkBSo=lf+(UNZ${SH#p+H2<3YGHGhnL(yTV-O1 z`UQj{k*n^-kMFs*#MxTduZ2A-#b9>AA6emd`5tM*mumGiaDDdN&0bf%Ke#`biM~N-`Ez;546Si157;=xK>xa|I4h^b3ceII_8RuoYa=(cZKrn^VQCLmuqV- zpIL`$HIg>Ag%+tFdt9#IyQjPS>a$Qa^_xuQE}LD97wh=q5&2@owORh)FkI9ddm-1^ z(YEmU>V6axNJ@i4`E>}tP7q&ZMwGxyIzY?Ts$+lGG)*0c7mXdh=p;VO24bZ%GRhS? zt5&^87A1757WUR>gAdB3%1-rXpiO&A%N_XZEI*j0eu(Gx`{sV2h2K}dMTqu#Z_srE zvV8zwnwc!xcDFmwmY6zm9iP-5pm2570gD;gf*J3#WUcrX86Y@h5O^jCtaq^*#u5GO z!1l&A-y@rpP3kgct^O1sTKIt4!^pgpnfJjrWra7Yg?+^^9oQ zpxh;W?jJo8K5Ke4 zv24Km>9xGOAAs_10idE|TT%H^xekV@%Hlqr)BDp@t!kYFF!ULk~D zqPqdL7uizWt}IdmM?j^{BIOk|h>B~6bk{27)u?jNA|(10z$!aYe}w)R4A$P7N1;W^ z$O>;(I9Clq=Bx}}f=EQRc=aUUdXalU7F9qif<5#Hd$?U~T9F60)mo7kKXaMsJqOEB z2lc0^uYl>%woq<;_UzZrUpM5XEPt$EFt0@edF@-qYk4WJBTH~ye1FkI@l!BD{PZmr zKe>hCXMkV)TTqO)9Cu4aB* zHv&&vbBEwcy0_*I6xd{+~nQZIWXsN#ap4%mC4?UE4{b&RK|F6 z#&|2X-Rg~O9$4{K_%DIBd2`#9&56&{M${Y>H|LVJ_Jz$fG_@~Px1mJU?u5HycL+MB z{I=8gi{41nc}d&6_6=F{+SRE7D3q_Rho~ir)e1aUtGD0^BM>4kwH^-}vf5#ib^&Q) z+iX~_cC`U-VVy*ZCGtLBy$&~tLr5(2U!5T#QL*X=5E*1#EQCb})8s-S;!jg-W)^DU z5-nV;9!9d+v*)&}&x`aSZ*T^Rpnk^J?P@;oMcaIhNAO5`KH=;5hqwwc^ti(LujDQP zK$iLr(5i3Z3uts-(guuMgc}NXSDsjEitZ{twU4zle)q}=3+7{WS?b#N{ z<+oVXC$XxoSkf_EFT@OJ(@3POFZIsoFcquvv&r15!}Rt&bjW%Acu zuEoPqM=pPOZu1EI+|$AbI%45T8+*a*FCM-cweAm(f+%g|w?Uh#Yt&}g1{(Cxd69-P z*YbY4rfs^<)krej70O$+!s`m7W-gIIaMA}LB!RHMn%5fH&L&(!pNdez??3ZKwVq+n<;;;eBmWztyOFd z6+~YN^*FkF9rZ1-xT)$Bn3>WA)cdXg>*APJRD#Egc~dNJnyL;WQ6Upe?rf?$7MOf6 zF!@+uTg9$${>G`mo$yC@hP}$Bf<6;mu1)*>y|%@Kejj~3CiGfXp%6t_#8V<8X+Xg+ z(61Gh0WVrP8pPnzHkiVqfq2s{?$zIc)3e)K8t}foWfiaLUr}IXqC4Vo$0oYf4~(a5 zJhitx+m0m2NZQNkwXm3!6x30!GTHOm`GF%%!j1)ZK$vQ)W96%XQ(Zw}+Tx8x*_gzd zR`dEuBd<@a;`LX{dHr<*u8Z$~&70XlAxo{of z4!1+j7D9CJZ}xo90?KO+vs$$<>>(OUHnu_Zfc!z)5VajVLZdCkf581Htt)bH;PgPp z*dBfTNKoHXeE>;lA#Y;RMV+odTcm)WvIE;|>tcNg)3jpWbl;Q8$5+SlW4F6wQ$4Xd zuTr&7nS21m-Sg26H!r$EsoGzDb7;|$UfYza_v*9CCoH--+-uvcde>|RWOyuK2XtPp z`9`+oWw}D<#sZ?8=tt z=Fj^N*gjY2CRuX#)?AhvDP4H)@`Psc#~+#!*zU{PO*1?b_4A1`xht|iFR(q*l@|^~ zw}-#lOtwX~c_cVA$91HtvveKbhE9oWE0ynIxm@C1piKxKG`u~~Haxcw$=FP`@6ZPL z7eLzwj1bpKaUCYE6UB9sxK@kn3~`+$u5-k7fw(Rd*PysA!4(Z`psig9;t5`q(eB#7 zk)xF}KyOwkPs>s^&1+XGhrnXY8osnLG`&VE8g>mVUuR^C8!h2jWxQKY9j?`udYi}L zWAIqBae4enlxc`o^f0K2Y$=T7L0AD&t5guTQJ}4&yhYr$1>UGA|1ki34{z?Zx!0zUn}pYDMb-F>d<+zkk(EDz zI!vEgTNk=h8Ce27eW@C6l)ld(p>p96T<;s7)iW4+5PhbYNxD%HuyYOakH9G-_ZJ^o)M%x-jD_d#u?iZ{G>`));)UTq zv0kV593=W?nq^U+yYYk`6HAM-dg}A)bAmx|uHIdZE^g5lWK$hniWlUKczGXK64_FA z*|;)yxIZ3x;w~>-48;NML&)qJhxw}l||?DbHPt(U#yJwEN(RTDgyu9@G zJ%5FF(|%vCO-J`7KexknyeSmM-CnCc_yv2o>^4FLz)^JczYXUF%v8D{i>d7No$cz2 z9Qmh}y8IvE??e23jz6FNUVICcc{KhNZ38Qze>UA>k!osUlgiv$(R=#W$U)EanaYx~ zLS=JpU9>HRCR+I$k`(%?y5I>6Rr;!%V4FHZ7br#hai_dmr>bZpphy+De6gZn`0{pl zIA0msuH_{LvW?fu2g2=ACY2Q{yJxC*g1wX+_8aCaZ_%}YDnQ1|k8OrhYejy@0u-PN z6j8JN$hM#^_Kixd=m1_(V~-$WlxjGUs*lun2)1EOVNb43E;GCgu%l2pth@swtY-n^ z5vl>!(`tjI5X|YpZEK-0Mf*hE_!B*+%cRI!RM22YK4~eMKaVC&DVT-AH@}Fo)iCGU zx@!KbsYj9Jf34`4$Y_MJ&tb3}#e}QeZnuxDFIguul{lO!Z=Lda^pLMH3p|Z>ghvM1 z)N4gcbk4!U`2hTn1>Lb>TK@dG>sXsxa}VHJiT7JY6#~>H*apRvbv#pfOU=e>aDWm~ z@ydz|hA(M%Hx;&r`zbwRmqoHPU$hBeP>&je`Sa#W<|_q6U-41+;-*_Fo7}Xuu4=G! zB4|R9AqX9z8NwGVXF{20OvqV9=gJF|51R@<9;EutUPVAKvQf)yOtjlbQ=Wdsw;k`EAP}Hzl)WTLXNf2dAc0$ny!`tza_`LhtJvS zM#Rm%b^g_wt9?EzbH@(IPt&bhKtoL69Y{37JPo}oCVlK)nTML$e06*D$jVaXl|fs- zKWOvoT{&Bo8SU!h%Byv~wZfeM?`1|tx6_eWCE3+!;MDM6k3ih=@?3MCaD0{8s z-$b;lZf$oW&uo$44D?cV&I5Jv`}(+C^JW`rO#cK`u2fE}t4W5^bu4^Aa|6VS=6{C} zGdCOU=A~eMU2wx%d&A(JpX6EyehGVk9R5NZ5I7R(2x!qm;ZxAc?guu#1qcO|54G|o zkaL(BnF+-N84WihU~w zMLSlWr__0yuF{lC&IeFl#Bc{_6H>mHsDcU&xGIo~Nk{SmKLLZ>@3|$b}nOSF4`Y zEnb2^eo(~_A9{dV`BLyK(2-t}e2s-L0dQ6;a-TslebM)UUAfsybF+`PD?M;3Dsqpp zyk$`6lBOAD{+jx1Wfu%nF(jfy{TTvLs6Az0{CPlB2S5p(R?C3(8m$yV0v<#jf&e`Hm{BQTBk(FX=cs zo&KG0741z2bV0XYGs;TP823k)1(|0DIvmK+%otJ$`vQqMtc-Ol4MQQqb0c>bx*MVq~s z8A1j7x*i-NuQLgi5F;rsy30`uhopO`cBRPuW3>Nogl05*aTDoshkMk!YP2_~laa2R zu#YXdZ0+#Qi+5S!0^1u;yR76w(IirNBW>PlfxzFcyuR_+x-5SjAwrj~e>0(ffez&_ z!{>X$K6NraK$G+4i|#^geU>f=P*X&X@=zy{# z3C|ON^IO<-n|YCg=pWVzmE8a$!*^0gw|+mcdC+Fm%iaAUpnl);}!c#j@=PEHQ|8!0N{8r>cm3~vMuv&|C`{h|8opm(igw&a-? z71|!hz?>Z(?UQgo6;)X=1njv>DJTX(N>)WzsHmFF2`m<59SyRO2!za(xkC{KAGWn> z2vE7URt=Ru44Ao`o?-{wRMEK{i_WFq=}+7K&AS?MGuLS6jUq-q%W55lVzm6A4JNp1 zbG0a2O>jg~(flS@hN7F_!6w~e_l8RvcthdRb@-)Sn9<(lh_+*#`bAm8l+NvECEv1^fJa zu#6uY+wQnnxx&4^G*lR91Ei8w&s}M|rCT z&Coor`htk%y3<%iJ*`<3af9~1J`4U8dJCe(k%lHL8e%P{?28b!9gVtwdSCuBBwxUb2f-m!|&+Y4}JQ34yy<*@q@vc2^-C z9QYN|f%2f8TzO{G10$$5`Av7bo33zc2pAoUjXoCnOIA36#$PL1L$f!QwTNaawX3gP zgmJpMwSCfcmfjx z{PJqS)9FFKPN28s(Ga8E z(@W4aB4QK)1uKj#M*x#Iyp+PvD`Lq-v3!l!Q86y;Wc&IxWtUKxBcUnEE5rSviR_qc zg24+PMo(WauCFl;2#V`5z?)%~|BY6!BXg-W8^HtCY>0r48gGDZl+*#-f_nd}QGs+5? zZj66@WvB-lrCdxVK$0<~6`kP=l>BM5pCecb=s&_*cR3}qcKX3ECVAmq6~9QVy2Loo0U-6K=23T4|-J;-~r$Xy+|AeYjys@91OIx zIJyoKB@6=$|N55FL@CRiW0ZS{GTtorZ7kGnCs^)tl^jR8y+*mibE8K@nNN!#{_zK2 z^mG(?8VzyqMF?2}T;=^j+zZ(w19hQLRXMh_5G{9XBdOCfH2ljoEb`zCFQzOk_ck9EFr+gi(T6Zs;m<*_#1 zy&8vlonA9DI1nGK3`$5NGl6kMx0#Vkj8qE?$bJCK{Cq_ImLnokTUP^7&e*LuO(~cI zu)5&G9Fe%#U;`ou^X3zOvzPgcQ67YOhL3l1l;@4%6Y#q^`ns?2Qpnel@%~jx-yjl( zd)8`ugrCshKQh6;D%__|f4;-$NGkI@74tp5=rrnHtzY>qf*YIS*_+ht$RG0~`|~Td z`JzNR^Vek%$0M6dLw$~Hu2f!brB#B%nbVo@D7Te~M}R^bdakeZRsE%@@T-uXy0W?O zP{SVIb9uh!E=%-@e3p%e=kt8eUzTtydCFxKTYb^DDd%5gg?nR=b$OXTd;UD%!w9bV z{_E99+W!4xbnK>~Ep-F&=f1924?#*FoO}Z zkCA(Hb9fwXzu@hr@Nh_;pFv??A?&{M8s;WQlV05~2P?|MIYi ziv_}B)WmKshFhzrrAK=n>x^yY%M9){;86*iYBTec4|Rl@$j! z=t!iALzf2ZPt#Xt4WO|DG)%q6KvI(mZm+H#N|7`&=yZ*7*G+1s{c})rvUK3~61*(UR_AUncBW3HyBk`-~1d^tiCE5H=sM!B5d+ z3$FhKvpxWYWY!E|BkL)dOZz^+jW*5~9Sl4okREKXNl7!&0G({bLhaRNOne=iiSEmG zfb?B|9`G`4aM%w(A2m===w8RV?jFIPiQrryz%UB9QXL#e6f!oCP6ET=-Bin82(6xr z8Gs9cqWiQ5nf67b9p3kG2tsJ2R^EW$;Ea>W@FOC*9Y`?Li!!1|S=fE~J|)0-00b8x zAMmzR);*vum&FV=kN^%c6#2!sxPmy!&+IN^2HofFzfc9tlMP2JV0#m`lCWJrBjK~aOIfVtxZ8%mt+~7|7|**MxZ6<4yWO~3 zQ^C78aaUT-yM26jG4I~P-O{s&8`Go}q=Xp!*T}VL%HygX@o9Pz$y{ zVP^rhnHdL{ZWPp`pt8s%D9wI+kOk$PSCs>4)5}!7X`HO17-U1J)+sx_b**up3wi->oq*xfPAciZ6VTiE$OTY^f)hz)lOaiVVcrODy=%UXi9HUs@Nx+wp01LxpV%Xjn zqA0Rn`b#pT8dQ*;b)9(ZmXz2{<$hwx)SO!wrV+y^Vi@);sn|yg zaXbQkV-hff;2IWq3h-dDjer@zmyrMqLoG4<8W@@>5y4^4lZsOCg7mCw!DF|)*k_V5 zn}|Y!6aWRw?4%G&CSF(=t|o>{h+%yTsqljrq^DZ~4x0qbCHTi@>fpg4HUhQ)Uq%8f z4D*TM7r-Fp!jFORt+^Ay3(~W04IaBCr5Cy)Qb(>Oq992jj^GFzDT9e66E7?b3yI-e zU_hW5UJj~;nABgr$yH^Zb?fm$IM|)nf-#MqJFAfpfa-K3X6v34&#o9DBf+t!*MShxTn#jGXzp z&3soA$tB3Qc`b^kyoH#BOPS&oPnc_5PziZZ11tlk6JoAkcz{yC4sEQ!Vt|FMiP&n= zk)#v}{JYe9-z7i5481%S@EQWo1u$5^#2Hy&fjmf%w-H@)u)r~R%y}<@Cwe&?J-J+y zCr=RTSsvrcSnL2^PvB0h>PEPfi8FG>%I~KH`87b&b=FxJ zULuBHeI<>AC`AJ2F7?y5lO@su-bCP60St*`;*5N^K-vjXl`cPLMHI||KcqhQ2l_xx z9p&T-Y1wuX=L5j06tE{Oy2Fln8cG2vRM)F|r8?xBOMWE2_lR#4@s+biAyl7O>ZY4X zoskyM?+NsqqI3c!bG`^Urz;2cX3+ChiQ*NYK-?Y`w~iM_IkfU&C`UcKU19nsfk2ub zE`@d!39~tM)JKT=MxaJv$^fs|RB~ag$BHwAc0F!KM|KPVJ?&5 zc-N$&xDsgp1hmccvFkKfY=XBrNOEuon%*OuH#8!>(HiM&wosH3#jJFcY30PE^j&og zS{6;)qm?rgiM~7paxKhgrks(g4wKA5%wrVtkSMVTq=5Do8l>KU(1Wnf4qkVf;N=9@ zPDM2qEkw4kBoF0az1==cnNHI+)%(dEP_(xBBEE-D~X7WLpRx9Ml>^tCUq)M z(|C8FgJCcQvm@m+%RT6XDf7Hsr?QQ=Q#{IBn9%YOhF|?!Rs`Du^hWF#pK{={bbD5_F0ZDtlvdy_gt_S*C?hget$f zth+*9OW+(9vdV=y6=fLxRI1!FjWWeg<%=Sr3p>^AY;)0O`l8$pBDE$a5><;Ibg=7Z zSUaJWj0$Ab$}=S*vLSFtGU5)RY6B_a<5j-%$h|8Y4mb{lii;pADK%d zbGZY#NTi&lWN79t1PX*|q(Ok?t~!DTh+r5Hn7VmX!-I+%)1}EuyM}2W^Pxzo{-dss zuVDc+fYa)uh2|lmd6{TXU+_b1bw!0d1hMv83i(@Vp`7>p8&e@M@st`_*o{UC6uL%| zwb01H)W{`eLy^HSb&U{&B#pF4e3~R~Ktm)a=J}#8yFf*XM*aftM6Ch!P=R?EFpp4Q zu{pmE9{v|G*8;P%@PR|*D^(KnzA~$%XmHk5qNUMX>IJ(Daejl;q>LQxGpl(ov#mk4 zrdsZ!T6Q=q`BzN&D>UPxXv!W;^9~%MDi%Zf$NE*ZyVAWt;06Gvu{ZIvgdPgn->HtK z4<+<4AFrkQSp07$o*^Vcwu@q_yxv^d))?sAwTk&R;jUu-sZO(O{Ll;*QSAf|!GLuO z1skQ(s89Ss*BD1*($nm}9%2q2%o_~? zzX|u#(3UnAh)B^E(SGTu$BFi9A}~)JP?>2MYr0oZk0vWY(P^t{Z7`c zFS-ZBr7FrQ9xhg?LI2Slbke(iHS*zC7ewn0q$F1j9>l4v^H{h}$7is=NkQQzCGi5zOKFti6J1yiB-4*OX0$&K=bc$wJNZA5^y3>$H z%R*XIG!W4NgaDPI-O{2K!GE?}NULn?n6?FJGs)v?>OMI6>E+Qv^Anoornz^ZT6-#5-BXCjyfbPWCJiSR}buVNTh{% zGckvN*;)7#^l(iJ>7~6ABAL!Y4niKX7u3Nlm*LSA$z%&@5x0#g--awVvsg|G*;AOd zB-?KR?Lh|-UEYeuAS*ME4;PMis>8Ni0rVSg*w=@i}ob zOpy52ko0XGzfG&pk>3UsE94P~= zI?UF`>}9Tk#VEvnN>2F#mPrupSr^oi2v7;-1(?J+2RKDsPax6C-Jr$*9One&dQb^? zeG1?tRH*^jC5t3`)iTEiOu$>a0er@p<8BjhMK^%ePQVmMqIf z4A>Ip!ZuVGRx)t$k{@_5)7ac`A{#-zzMR~wFMtW3l04fOOA@@Xa*h-sEPk#nPiuUr z%HRO>gKInV)f39d11M3-5(;LA(P*r+NuSYrXGUcxoGkVDOc^~4o}Xk!4diEXi2`Ou z6Py`MLVEQ|c;)McNolb2B5XoVbV3dRajKt?At&U16Y>ftWEc3Sz6Bp*-3TY7CG%H0 zAq$!3}YU?W6Qg@vwQ`9xC2C9(`AsrPPJuqG4LK7N5i}4X{_tAB!Nw0yJz%*}Ga< z^{D>YlQ>Q8Yf#->hP;$jkLw?>h7e8NrLveixt4@$p^V#W2>f1H7Z2?MjAYMhwdYbnTzmYUZ=mMB z{QTR{7$oKYC6})xwR~SPe$FJf=&xm~lDy&!+ZlqJp1NAOaB~=zTZ;vF2*8T2Z_4gW zhNYHDVzbX)tJ03wzHy!3m(jj?X%$8LhOrrl!HKnR|A>5sv)VHVCE*c!JUj{|EN;AFH1k8T~QwAvWG` zSXi#tij6mTsrEocoS?U{V+S`eKhPC9IHOtL;&fbDH8y;}oF42?7HGrl8%%t{wX_Zw zu_v0G(kU`>_@GHaq?mMAflD1vX%ah001HRLVpz5D%;&#&rhcYHSnH-C}rO<7LR6aX<}mx-x6R#Rc%;l$Dr1=W9Hh zxAQzWQ6pdZ7&#BtavWqejph6&0WW4-3`cPo1RTmB;PSE&*j)j;_KYj5RL|Oopbi4F z9$>-*tMCljN4%7LX+cgh8W^6YjJ3~v3Xjndju7}N4SeI&d94zkTjJBVrAQt44`_*H zDPdt><7-d>qD27sE^j~spD>w%m?xnMFv%h31WjX_M z%bp`C1|19@8WG1b+f)u~ncI;shwpb;>QTG5M$9LPT1T|!&~9X4q_&y?1M#QYbt z-&sw}Uy+#(;eAhee-7>g9RiUJ&AC8>!VUx#zUb{j%B`JB08n79z4ZMM4YPf+R_xa*{!thXRHiho# zg`5rXIy87242V~AvHR^eyWapsmAE-y!)1aCWeURj&~8m?8k1d9-Q<1*DO|7-lb~SL z5Vz_7hfzG4-IKtPDua{@)=64pnU*Wq z-O9PCaoX^9a>V*Yv;gYGKM}PM=tx}P2t|_&heATJ3%OZJT2cT}dg6q$k|42tB4Rq- zM0d5h75JpGISO_Z;^yM}AA*U(p%XTDQN%am&v$8>{!(nehRt2=o3Hf~jBR}yEuzry zmb1awbqv+VMBF|x7aNLlZ>@5!!GRT>;DI})Ui=}lL#=Zf_Ed@DnM^3COQ)H`6wSm9 zanYGvy>Th|`}&V1f4Q4O#-0J#=8k_2LnC^5uq+1x&0T4hLXx0b(9k-$X@7XQY`#lV zoA1{C2ANR&mSPlEmQxft{wZWm%L^PDUh7$T^BM)4IsR4|vAzloU7?nrw#LwOzN-8o z?I&d_w|DYm6PQNMCBz1-yVX0;NK#kY8m9T8KfqhG1cenhdzL@susGNpxFgix(Jsjs zw%zotoz-MLj5P|_az=7h9;Y70u^z5iC+Y!vpC;}>dQQp2CQHjWN@ZJ56u(wfg3(uw zFT-40)X53d$L{Y!i0tlE>x1P+s2ug-PpOXq$ls`sW&hLq&~9h9uuv=7faXWpM#I0| zxB6a`5IfC$mWAzV6a)(z?g-ZzmGXe2QuK{Am66<8^Q7xX?&7J8HcaU(;oMiT!l2W#ok7iqY7D_$`X#gp!KUg}&%- zd0*$PsPkh}t^&raQMu|D8?dDuC!vCO!dkSR4PskzL1+9RA+GT1-t$FL{Z^$`w+R>^ zAP!=2#eWGvBEN|Kquzd>E?H4-vO=VcF^jxd9vY&gKuyhPW4Qb4-`6Ty)_$z2$H0Y_>QUVTsUn$JA&#BQKr9k98T^@f|pi0v`+m8+b?qOrR?TgQq}gmSxA%hT;#iDdL( z8>3Nz*4tRB0_A2;t|>@a>|aQ`D1$iwDt z#jW>aiFI|2@|t>&sA;Z?>Df92%!xM=0??w}E}8{_uL+i+;M^f|sBlc!Rfs-;WELak zU}qP&DIJy3z=Q!eIV_DWobUEzg%)eKdt@DP8?^c?8L2o3l2RiTtrbQs$-1$y3#qFK zGNX-MfK;D+!d@Gw@OH2%J`O6+Qy^ZIJ8j0FLBB#UUcY?F{pkm?>v}`fl>&Wct$*-#E4Re@c8`N`D#)ca+hE zFmR$j{Y#*RS0j6I$LUWOLZQxinxvd?f7*@y{6E&8zL(6h`qK~b(dth(;qiaepH7(4 z`ZHp-`qSShX1zas2k-U%^b*`V`_qI2EB__^>4PT8tt9#6`qM88@=u~a{Sx!|f2%)z zyX?s&_osK0&VNaNdNo8}s*0^Y{SLA}!T$7qCOxtKbl>&1tnB^i1aPD_Vw4NW;J=_h z{VW@>KYmvCQ`x{`C7`tj$-n`qQQM{&c_n$S%1*{b?o?$8pyd zlE3#nBKdog{b}s+SM*gkwJ z0^-7*gReex^=QP&uw5$LwQj=UuPGmV24ZdgssGL()CfmIitn65BEY>rGNB$N)vb5 zaFCHc(3CXyawsL^dIXBt$Jwa~f6?wAn*GC%rQ>+g{lktYQ~QTq7o(f^28d4IKYa1w z4BlX?7v*l*L%?5xPwXEG|0TJ9*c*vtk4&WOwtq;EWu<;bWa_B1QpBlK~S%0J49$=0&}KcnxF#TVjj;VGl4TZU1lq z;MhMbgs09>Xomjb%iyL|>@$Gr#QKK=f!fhOEHrA#-ajOEy?@BtjYC-*-^6K$RC22? zdLsS9l#y0$UE6^pQQKgq2aAE$5)QPM;ZK|*)$L&Gvu2?Shk$*3IM}Kx<2_iQT`C7! zi<1XhcRZBA6iCDOD-&OT;@hMTv?}EsOA^Damm`H7ZuRTKtpgbjINoq;N!m6>DNxZ{ z4;+$1Sjpo|`blzntjgC|&5SY7x*a^4R92=B4Gk89tf2uYvlw9Q#IR866q1c>0|*0f z{p6S#%2+YH3jcJdGM0QE)-+lS4mBaI3z6tygLXsaHv9la3d<5aJTdG+;KA_5ayAOL z+gp@?7Jm~^`tZ*J(qcOstXk$(iA*YGUgMA#27-*NDn(X%AD3BSn+IQGKxX9}Nx?wu z5<(}PXW?r+Po~cm1F>jeN?`QLYOwu3%<&lPQLA8@Doyj4LJZc9!@*HI)kO$yuxKJb z?o#52fulh#aTsABE>-qo_;v3~dUx?5!aA2JVDJfq48-`W@BUZO(&5O%QYlA{3YzH{ zR`o?Ws4HxJ($H#)DWBh>d>%C)XW%h$nbiL|yuV1^-^%;`^8O~?7t8x=d9N=I7>j$o zyC}8bd7u@_j=~m;;n(XZe7576Q6F z!1XO7V5<+fJ|YN?i>2vl23!rzVN}B!bp0OlxfgQctkNoK)e&e&v8h#I4!90bsyxbT z7;sg)(W+vl%FwEM1Op2(syPXwUY(>*Ar7*_qRl{1aL%K`Y9ftoRmgvky6(G%*MW91 z)XH-JbX}WD3T?V}xlVytgKXEb5QkYoz#I^8ia2pK`!lC*Tt9;CJH=~IjG_4I?R3pmI+m!qtA-bV6f=y7f2 z;Er!Sq4CwNUo||{4dABDH?;bL-1`!SP>!$G?15cM?ho$Bgo0|*P56E-`FhK}lCLM( zAH=pbXkO(e_Rzu-=;Crrx;n%GQRUs?LwwO+L3DUPW91BG+ot##kw4ZkHf!|cw`iW_6dZ2jm=P|H?^Pe6^N(eb(e$H83Pn!^@5n?qUB3;=y zBCoq_ETA(8R1F{yU6EUYjKW1Yq+zT)q#-nlCvxF5-^9%ox#DbzQ*9snDR+7C2}jKh z#d!Rw@<<7sIwTIA#Muyu;l>FMQi(BYkJCf7M1P*p%fk{Vk}!R!*i=sd!|0A(N!FVs zJ(ooP5RW`1A=e8`p;7w536~m28w3ZVo`XYlFG@cEMk=s{Xdn>{0HVl2FY2&F%t^sM zTTh~i7apvWivKXF2oSg!kFZaL99`j@xdv}?cwmxF0MB?3m5uU>7E6-yQxn^0V&m>V zW_VyLMac{glo&%?I0&+c8?3`K==A^>_4yike6o@$JfO_g1>V+rGTBV_cIfn~BA$<+=@aE!_$q4H~o_#WcZ}oTa zCL98%iZM&XcYh^?3-&%~{QE>FCD=~Mg!;n8NY~uy=k$5o^)pAU*$Pi3%~trHkdD@v z!TJ{3r$nKAd%OAeiR9WiPFv-o%)GoxwpChu2uf=UP?_?wN5-e*aeN&-^-`b0fbDHy zLgW;NL%erPVHmb1HAMC9g@`n930~aCicFotusT)R41?DCApyTbK+c?Ci0Vc-kcLOB zPhsdpA{n9*DY*$qrYQ`+Pm!$fS0pogn9O{LEM6k#I*j*YGfA9`3mF?v8A~ixintQj zh+>_Mf?83mjUdB_VwLN`fmAVS{WTI6l()`p`LrUUOm>_xs=&R#<)M! zz#Xr%)c&~h9}sXnjMK;SP@|3(GIqOX>jXLdfkiJCr7lr$Dp1N)h@rL$LtV92u;>Mt z=N$P$3)i%cOfKZT$%QQ2CS4ejlq?x(?q)Fivxtgg#TOf9n|j_@vME=GqsxY z9lj&#T*k8!gCVl%<~s~V=@`!%CkAIS#0S8jCe9VH9WSSADy9PG zB`W0F?3u|uWuw2UN1X#=CH72tyc`@_>13flYU~YntI3(O$m6kiFNSRjj9+AjMi!L|vK@lVVPX5fSwGiMKkpC!o9pKsv#bplpaQznPb=x2 zO#08(&-o^0KVf#6_0vkYo(cci`gx5>*LG%iBKm28yh@Oh*3VjmXzlIiwI&IFB);zS z(@Oe2lAg4FUT5a?SLT#iKdpoxA>o&(pR1ki=R%VxPdECxkotM>mVa~otTD^_)cL4@ z?)1}2`ZSaNv-NX^N!cD|msvlpgnOCrpRJ#@CS6CE-HGU@1=2;3lh)7YICBk~<`^D* zok>FPN_f_ntDn1_?dNQhsPd$|lH1Q=)Xzybf4TZ;%(pMZ znd9$b_{6IeamTqjWd+wrmtYxpA)F&u#phwa9>NnpN@wEKdY7_Gz5jOBB*qfNYt(3& zFlDV<&NX66Uq#_;gGuqK0>lsBsOZFj=9~d17T+Uu$aBuyj0tgVm<9LSuwS1A$6e9f zh_Mz&-?&2$S-p~kKK_bVcCh?rrvq`g5x z5ir)GHzNAQh@yHT8v4buY>Zr(t*u#Ms5P!^ANnrDpc# zgOr_W`*o>`^1CFE_&1R*qoT~UZ0P5RbJ}xOO*A7@a=scEGbzenlAz+bdgcyAY5BZ+ zMezvYSNg7mVq8pP^qKAlXr+UwVREEw@0*O~5T}Fo5G^hnJ zO1lOvG|{lPVlT+)z?eyc7AHZ)+d0W3b-0WM^0uShYtU*FPgP3pKM*rrQXG$1#^g(o z98Jxr`dqu%NXNq$eHvuQdV44%h6{kfQQ^1orO=?JjDR13a;1oS6Q`^2TvYghB@PXG z#zZ$hrQlBz-A;AQykrsnc@jXJ-N&>Xf05wQ;e1WF$5sQn(L^yJB~MOb%v9AcBtgaZ zr5o^IRV$x%U)ArKc&5zx$Y80}L9!I9vuXzX5Y5q|0!Ya2Y-YoZ&KH zL5<-ugsy|v)4jWpy#GQ2R*GBh0uRS3@in4ewfGKGrY|x}H3_9k3^zQ1&@#&l2wjup z0bt4dS%4CvEgelV#5S0XkrXk5xSM%%9(Dy|o%Cj7SU3)Hk zpCF%GdAo7n8W7z$UlPsTZ?P!~h{6Vp>dhZXEzP|1A{KvP1b(SwM13`o8f&(}jMfI^ zgc0?XqTg2CdM6$&3R})k<}?}B4VO2N;5{$~>;`f~J@fW2a=lC)ghS_9t#Y_dOc$v0 zI5s@SbOG)`khBBA|E=-(5h$ZvU$W{seLOx(I3034z7YINJ08CZa2$_+R?IrhI3CaW zd{P<)025}af{)lVzuWx>CIYo%JpOE>BJJbx#IBFW^LFD5R#^YJR)r;x$7AwFy1grw ziQ14!O^K&qVhj*^QWwxLkL3n;#f6;Q=!??iU>kP!@T(@)V4@PY>q;4ocMJ>{n|!!^ z05~cJ%(?%aarWl}v{`cJy9jTw^W9tA`3}3@rEZsvS<|noD}pe@Are|5ZC`2ax)`&lZl6Kf~1I_e_e zoGEAHMHC>C7oimwoX=Y?0Rp<}1rIGHi zO|7@D6`eO%X~)j}JJp{ok_GlfFNEonmHQ^hkX1y|EaS_)Pwql0h@n2u6I-;1`JGfc z&X;>v`A8|^E~=O(HVZ%M(HnqXcKD46XQjX{Tg^Vep12a{zdI*WjQ7-M#a4Q?@@Lv0 zkC1BgW~S2<*cRD>HZWW+sA~PHiR)D0vbDh6GzW}U-=1Ze63nJbofu6cZp3zqePL?q6i!=zyH?9rKfqzUAl2Slho z8d_b>^IIPR4mlrkmpT9zh2BT%)}pQ7Ln5nETXGx%JdXAFm0DSkhtkiAF*j(m%^9CR zPk9-;BcWE@@uwk)dW#r2SUg>MhrxJm1U1$ZI5KTI1_}lSG{uEA7jXRXSBgK;gz(%3 zow-_ngSqdMxp$yfM>i6)i{F&WK-ZoDZasakbBy!{nEnc+$GQiMIW!;!XkXT;r|)N` z|A6UFXL?l#X+>99-|DO30n14gOan_rg|~hc&@Czf_k)AVd@&JI$9%DgStR%k5?qCO zn!X4AjPw;2&g_25sq@zF0!8NqA8$G9IFDJ|xWCQA+iD*g;E^24{x z43==d(~oV?9d(9Mr)uH7Mq}aYNkEPw57Oc zRW9Yz_@ySgN;65EN&dzX398m+q-qqY5~+6~HH`Y;it?c_s=ny&@XSMYo&_aF@26+$ zCZvx)!lc}Z_$^(daSSUqq8x_MtKkiemVA1}r08Q%WNUKJ5G-LOUT;R7lQ*MGOmS2( ztM*kj=UQnYQuhdMeHB@WOr>dKezp)3nz&9dLH(pgT8JaqqBalmnLd_I!A&c)X+4Sz zUIR`$p_5%Knhq@T5&Nx`)+*&Z2^n5CHA6)6v6S~Ua&!hbI>ys6w1v0+W9vQLp)uWf>8M^slRpMd8iDz)-TxE3?DH)wD@ zB0uPm@V^BBw`E(Rl@HSsVHYVXbz3N3>AXtlZpX^W!Ah{M2D`SEv<~^D-fMbQ)Pw9b z{RTEK-bC_98wg9BfomDo&oyE8IOyOd+)ugPuiVWpa48yo^)ZB+L95)L%}WD}F@#ob zA&(6pTev}6jc@YiRu5iO%XJt}v{m9$bxR>$`6g}FH=Umd+8o`douEhvgT4FoE=7VH zcGLuUyRn)A@%~gP5b$kSTVB1Phy54Ez-Rnq>%2Cy?Do63N=WjAp$P;Hd z%VvT1_cQ2zm3eRtjNUF}r0DkmX%VSkl^SGP;CJxsw7_2nX>qO+PGf~H12o&60$dZhh*?-#oLy-I|Ar2Sn25jtu6dj=NzeHr$5 zDQI)p-@8DO!~Xu70x0b-Z#OmoQoZg(Q;0O3H3F;4BWzuz9fRQ z=xR4K_@e)Xx5%TUVv1+^YPP-(&k20P`+=MxP^MItaM(ijm9fQ~FUp>xv=wLcGkDmF za@o8!=yrP()Qs_EDZTF z?8AQ0=CBVpfFg%|_&G&L+6UflJP1g2?D!1!!8@J%1M9tWPBJRPB)($EGCVc7W;7Tn zPT4hQ8^Zb{ppWm!?$#;0$xOYLskx*!gHtvXbQ)taqEKidMLAldz|ry~!Rc=$cen2~ z@rwkbkU(}s??8ItkBz~N;h{Xvmfi5j{P^$>`(u|&e~f+Yy=wUDqLN`qMug7Q`_K!G zJUz0t#>Zh|hdj7?>@nj?LRZ#Hr)`?)v~4m=^b;0&2`^46<%@5Q0LG()-GjeVx zIVVzc-+8F@;#B`j%&3HbVm;njW=`KVb6Uro;*bWJ)0~VsJKBmO>2Dpy_e}oQ z_!$m=>zGNwPeDP7MLg-=Zqalh{Z4-?t5j#M5VV$*ykM`Nq4MO7mCAD@v-6aGGa2)I zjx;Wn=ptc!a$1d2Sti>N?B4Gj8GQ`pmm%`A!IPRGpIdpm(L>tm$H<~>)0{z9o&cEq zFOHQ+Mr@8>kUv!3Jx!@B)#JWs%bYZP&!pi`XORXzxAJ!5dkSdy^XL<$AvJ%-U$LIF zycqpv#$cy08xoP#u&#=^)wNDkz0L6L#R+G9#4Q)VJKv*o~#j4oxU+QMbjRtP8!pBmQ~xU zQi{ItE8FFG^#m)v3;BVnmscay1vxtI_(79dT~k494g7J)5pOr%&&+;!xt^I*lm!Rv zCksfqwvQh)0tJ}4v6@IYe)2CEKZqQJe~lr8HWAMVbYLI>hcfXXCv>OG0 z80F=-rL4kTeIagd;SrNwk6;j4`Hb-^W3+cFK8Vwqe2wFHdxuBM)AAjjp4d_67@V*0 zML5w6TZA#jUZU*cK85O4XpD@l(q_p_KV+u8flx}HFZxYt3VdDLUSGEgL=4oI3Pfe< zgVYp#p|yU5(-9(~bM4fP)PV@JLhz~5Y}O&i5$I2vLnZpaaGH~tjv>RuHL+!0We;Mw zfg?4b?M96M*+|p3oy&%dB`$=D`l6Voz)48c3G3ueKk_yB^m4pIG zefkzSls<0Ry|Zi^Ax~bQv4A4~pChEmABxARRcK%^e)2cdMqBo|#Acs|r_#m=R&_#?8o4g@akhkDZ{H;`2r{v(hp_~mL8m- z>qw{)=>3Sk`W|>2se^u$S9bAVG+g)~#38bUE_{A#kP;Iz;;Zo=14KABIK)Dnx|uD{ zvhV^+lC#~qkbuJgSQc67$!@yY6I!H&b*IP__Xo3w@31@b zkgb%Ojx(XTXXJw0UCL{4`Tc67KgUp~^{8kXh2WoQM!Oa$8_ReoV+` z0IB<;$}BG??1?ifE8a0SU!me1Ut=qN>8$U?xX4{-TSbVU6C_vSgW`+%#DJW=o!qG$O&dc)fr={-tnJIe zIIyu|mk8Lttr3B4(X|1Ec;N0_`!pS#ikD^p5hUR7TkCDZ|I zt7Lg5CwlOQyStWFyzILlU$uBMDk)XD*!PptahPo6l=svmcbZ9ZprG--Od>i{FLp0@ z7F-tL{sc6ss2fbwzhmy3nM9y|xO>zVTnFJeqL7Muv5C5bNv=0hkLVt?1$PGFz6v;U z64EhzqBlIUxdC=MxDYskOLRn>G9_}yV2({dZ!rmYgoyqO16rSxd}N|S0WFyC5$3;X z)nT9NU2y!l4IYL)5i|rjyx{?JLwQrEewE9z;bM3(QG%Pjr82|9`y}v&ajc)aE-}^N zsY6Gwi>`ug%&zdN%JmR*OdFVz8QD`nmS$tlB;WV|xTOAds5I7EVqlu|6HMXcoM3eoLjdB9}2aqq^H-sP1jJ7&GM3RQGJNx|h;u z*HUh<3oKlM+L>%wI7-5OZyk01brbMT0`?`~PW3{tRxD-cpM0z)cWtpCmIEU3)l{(> zmRe&LZQlZb@a>u@S~iDEEU1S-m6);?o9~U`&^#euyP}XUxDwp%D4l#+8GHvBgy#tP zn&OnN$zGgL1=;%QA}L!lGE@365T)&pCA)^5s1H^XRe@mGdFu}wNg zYyHW4WIn#S+t#|DDGk@125ES|2-P&fLp^i|j+Jx~leQpfq$|t!z>nD!76cI|IQ|Ca zPORsc1S}=iYk^fSz-7`jZ^=}Ez#-{ySQtXYuodf^^&;p;{2sDdNh|SfO#B!UV>O=& zEq7PQAyu-$T&<{(onc+oSnQIrWZK;(Ko{9NrvN$t^&r!E%GJgql7$sidlvp zu`9iS?W7orG7vKWVKSDqa-ZOFtzPvJTh`|Zh9HP8XV0c0UPr{=mx!G%oI|S-O%X>v zbCJZ^2ABJr(z^lJpnZk$GGN54p%99qyoE`tYPN$UeKU7MfGs4yB1w5b;K-d`*Dm#g zk;3pe44q!{HaWL&wi4&>`modBYkVCpwnVj~N_XI#DtWG1sCNmw3$RMT0>%M%qsoRM z49JzRA7C}RGLoCZVKofamSL#U7v0I1bC9NB96N=W4UUwTVZpd9%^*B#!jgKoiQ$I&JiujFVramVI?iohmhGeI)E>@mDWu7Da;CAXfF5&4rm-dL* z5La)@xl0wf+!5N>_)U=1%mv(7hE<1F9gQLd5IgU(Ww!GB74v(_2l{uafBc$G;bT%R z?Do%s_!%J{BLwVFa49^weyK>GXOe0p!M4dA%F)0fhu{Iqv4o4eC5h2t+p6!0d}pot z-b?ExR(eCe)L5tNthuiR$o)9UNKITi_q8<%8K>MN}dyr1_nSykpm!@%|Q!1B} z!emSH!!wZ44q7l{yY|qnQV&Bs(_t7?uv+M%rH2lyQ)yybK{h-@Y=(YaD@)p8Gt*Y` zyP3QflItrZ52aJBEHt20E%&3EYiy!atd2ayN*sp`74W8at+tnn1tqwVF(b}Iq|K&k z6)`BlO(HO=g@I!R43U&$I;ML4M5>oz%sJJbT{LVk`M5CC+_%Kn*v>YjE87?4f*(Wu zWIRBq-*~(>gg?Dd>#7H>{jToGQ2FlA1wpXKXkf20`8kQT^A~KI!5{?i57x3#GOAyu zk|nEPhE#X{11eaaq=GW$8<1AkpnI}O_d?Qr3F)>eps$f$GipS(puT8tT@{VVVDY~p zpqP#!SDJ_`iTIr!KwLDKjbT9uSG92#Uhue6v}oy?s4GnPzf$79Px!I!NITYTJp=W^ z*0rc=NV^|h4fwe{k^OG~!T2AzUC3>CKICJAN7;r7SdO`hVw@vMu!<>I6WmSnfm954 zOabf&dXDsK$0@tSn8i4C7FOaJ@vJC!SQ7(c6xg_}C0K3lyDF#)~V38(c8t&Dmyqq~q1#LOALGZ=%9!fouOc45aKeJYnpK6h2?~gi(2RU)UndPs08WAW=bu zu`vwIzysa|ieMvQI$W_ae)Yib|ayQMiTyv*8Nw!J$m*QN6<1F_3j&vxOFu&L@*Fy(NA$FO35w{MSH-`mXGS zJA4vBSX#Un5$_z}{Sok{Q!bh=*%lli{mU~X<8Yr$b|)D)QzV1hu&who&iM>%vTYet zh3Dip&lkNI@6r)qwFi%pfG=po#YE}kPW?E&Sz8W`6==|u+Dd$ZNnbJB&fcJp9^CNx zn7;Qh9LmP()NzR8&AzZYs*HDk=&pDq>4P(W8ip_WP|pGs&b$ zTRg}2`@ir1dwHIrnVG%U-h1tJ-)ryjUj;TBKaBKi9ln9o3ar)%-J|eU%|(HJ20{Kp zAYY{T%kJtcl2MR6p@_mQXyvRTl08m!^wXOBUd_rG3kzo?7$ikIGfFkm%NnrIf*{ut z$RPw0#yV`Sf{$o1p3(AHarjOQYAhLb_OB2(Iudl1OzUq0+b_L;smb`MJbscCPHoz9zHq5^4nz7wjW5rhY>xVFgtIp>?G6Gk|ZtaMfF}{Qviuq!ZX#TIT^CfhNlNc?z|P{pxcwo zM=x8`40melKM<+V_bq^jaJ>tpBviKpm1Fg@6+ScY6n?7pex-U3qn>(OL$?%6Q%6Uk zarH$qMyG4<+8MLkR^J~0aS(NCGqCK{qIy{9Ls>rpByK?sbjcKod1gqY9g&Na7Q`<>9 zd?YM0ge4@hO=S&e15j^7gCEl=HS~g9lkl)4xGgba779R(seFsp;PHjD(XFY^87!5K z&UW|#X+CO>4|cIK`l_Gk1BR!~yM;zhOeXY zw2DZ|`Yh@N(xOAVdO@IujcRnkEd=1li>AiHuc1*_C&FXWSxi!SAXrI8B?AwTJ@*}T z(vY!I^BJ*on5-q+Og%^C6TDYX*Hw4X2F?%RFzb2I_)o`+<_S=2*=8OjN=uI+h>ySLZ=c5Ju|<;8g888YNW(!~#vh zVTURk6VdsQb}FZy$%?#n<*mRm9Dx;62>!qG{GQ){RkhoFvEuljpWpME379jS-xGvC zg5du>=l8_ujcDih1REJmjr`T~d(H)LFpH|6@A*9?#E*UhKdO@+gmxQ2Yfh~4S5H;? zBhaIJ3C54O1-S-KRU+l}SI+P0(n*^YEf4llf_fTI!x--0IKO8p;bD0g9)e?d60PYx z$rv(BYe&?9q5QG|4ODl!Sml={&+j>Yr8;2a`8|)L8|oBM*MfZ6`1w70{xptYsk8b? ztucS4z9j^483Rit%%CDMr}Ec(h$7a!gNa+x2fh^f43!ArQLHhp!b9^p6+V940+D{= z<9Z)MzEW;=LR}QCzg>g?HTFQL=U7!9Ski;JUj%^#7E=s))NtfX)h%eoH7jqJ+JR%# z0U{-BNqZ6;A!7#hO~DLmfJm$!MqEaoAq+*0ZcY-2=ZJ3Vi#{TxILIa(wBB0(4pHTs zYL4i6WlH}D(4m>q2*pvZ^d3VuG+BI>%620vT1dGxEKd>nQwOR&9jVKdDO~`sOC9#- zC>)$@l>lf~KywL(UPtN3{dY~TbRy8%aVmjon_RKy5IBjhc_nCXRkl014f^S=w2Q=lx0T*FxZVHm7^-Ee3efH8W}S293H6cr0%d?DH-QfGl;YTFocsF{fe8i)0$$6;p1VTlv~#E6os zPqPs*PKNze;Rw)d8I9@{;2_{(Xnc4@Xi^UO*E6Z?y@W6HDWUSiMP-y-Jqy%Sy%bUq zqtY;@G0!}duf2u*khqFk3P4b%%ct~%&foh3mbY3yu3~E-AVHZvwbuc2@s%HFHqxS= zvDG|DSU9@}aJ7Ea0JF&$e|I?-*bg!PWb*1jK3gIvLu2wYVDcm2 zd?a3A{Rr_th`0KOENU#LS?ON@X)020zXVg9f`}=te&d|d1n`K;3F=mNl|X~#7g70B z#H^6Y@c}Cv^QV0$pyFAQ7i|`Ud>YGD3+>NWf&Sb}@K>_PBf7(eH4@UNsrbOx0RWE> z06JK9Bti>73i4l|2lZ6V2$^TWs;j9g9l#gH@EE7?4oDwdpz77K6sA*=hK9AV0}w!G zBc!2i2D@@phq+!%nU)iwW3|jQ6r5^8uoZjhiHVYz^k{}WuO+B}FNJigs$O-OQ6gd5 zh#y}?2h|B7gw;#{{D>q8`9zz2$aiB(>=%^QS-C#np}y1^5AsuA(Yd4BF52zi5DyeurpeQE*f)hEK*{6kpGL# zUDI^SU;D?})1d56>| zwWp^5G9m>8eLjOkgisp7Fw?Kn5E#d|QV}ZEwjX3wwqsdkd#a1$SrmA%N^i#lP1d2w zAUZ+~>0|$ZMEj^Eqg)nX8GAEiSR%rRZGZj~>LPh(<&0Pa-xJsuTqB%9iCjJv1IEnm zfr+A;1`Igw{G`HR@^PrPdmoI6?U$n{_{ZXa6yfg?v!tB#xe5EU;<4Q-uA&k{zy?AT zrKC8vH;X6~GiQR%?^I{qimAt&#B7KQiat_FAa^8{7)0Af$U4nii7~2&{~mpm9iy_R zI-2o@y2t7!n&b>xV8F=RSAL|LsnIoQIH?LxJ7NP>)RNgT{+v3yntMx!RAB>FN1R8M zw+^sFc3xrY)E2Lfa5b2v&Id&_m=YYCM?W`jB+QR#VZ{86JMl5)&`!LHCDDMlPy46f z?o$waB>O5mY((}Y4JPGIMEHhLGpq>gz6j%>@P+;jBkkb7Coa}wc>Bo=$U&>vS6?R@myEHl#fP7zYRjTbbBj{Y2fLEDBMc zuC}x1Smg3ba;*LbanS&&uq> zNoCjzTlq_EE*zDX=pftf*OExFOa0*nz7kN^On4PPq!IM?@dV`6Kd+@LwZwlcfI}fu8LUPNb+}1n z6=+ z>WGOKMS%!njotvHb~?n>N|k?_0GCSOeg-%Pn)WBK4@W>W)p6_>j<8AD32dZcz?FSj zioMKsBqmkjDNqcAlRk{MV+C3+4AhSmyx0Kic| zuY|u`FxP9rC-j_^#bQ5?bx9OnNI`%wlZtd2NGMrb=TDBj{t3%OpF}dm8!D8BjY5<$ z!*+a;T|gw!fu;yPPi01o{9{>fd{1I@<;a-%zhNo=8oZLXH?fp|3;gkbUVzl9!LHD{ zNzJhU(B%@m(QvmgV)UV+jxt8$Dw4>tMjznFmn5(aZx*+^K`twtc8vvX@@@0yH)i}jIZ{>iub3rs(iQF zgPZ!OdxZ)=i^6(-1TxbDxe@-Pkh}=ZLNMU=3DmCjshI{$%ma_4!I@1Wj^P6tPbCr#uHCbS5C;CpUhP4*iX^D7_5b8(X zH5%E+i4=_q^-@e^L-4s!JEEn|VV+so19Y?jNdpTaVy1|eu4#Ek@K5}AHx0h-Lpo)n)E z$waH{NqCkJ(ip18W}qddZdH(~9K2j@5hQNGGH?yrnn^-zY9J;Cu`blrkTtEvUdGki z=$E0km4-&r)V9(MVHY%&#+pikWY88%;ueq)3L4;s7z9*Mfg$*E2(6PtOv~vg-vX(@ zuoN<(v7q*bR^9*$9sey#uZ*p{fzH^!A))dE+)<9hOUCvNqx<2Mj%f?GGH55nhr*fa zq^_t!yBO}Fy5WgB1wvkla%(k>>qJO?)hf@i4%ckKq=TdzTz>&^(8`at&hb38b33YpPbpF})v%|&12+Zs zB4l?G?StP39%oPQ1oLB*;P9Z^K&^@NBhi6|W+>dH{HxB&{Fy2HOzwRpI;OPEC#ft3+Bq@Y&B zeqh|Dh&FZP(jbtCIuc;URqmuP8p40?9;NPOYC#3OimrM4Y#^pJauhy|{A=jf2Cin< z-0~%MSgO&D7l2K}iHgY1>w_<4V6h;cSe6o@=`#dhx*?Y^(X?r5hzjC(Lm-H48u=nx z9^rL6K2SsE1{QA{{va9~j~$MH7NcuqJSzj8A@eZecn%rI(>8oOs~ZB*#CXzaJU^WN zH^wt5YH0Nebe9Qogcb~DC9Q%|oKI!B3Fz9;Ji6aPKQ|{*le6_E10f}Z@coG~)xG|0 zkljK-s^^8!7jc9-2hzpM{E%Rd6-xo%pI>LqgeF3?VMM)UbWH+>Szy88w8j`}=8=#Z z94)tLKOSj&!(a-`ofe&D#_!&Q@Ya<*+T&|W&>(?@2>?ctSX>1KRO{pFDLhkg#S`!( z0B50q&*e6h_EA>OQh5BOIP(!-fT8qpSo?Z9IN-7c*@EBU86u@Y&)p05xB~&DfO)tO zsK3`5pyL+oK~eo9IQy%2!M0{_-&X}#5N=1nt+nCs4#sf@5&8zl^=>8miF50G87LcAO00 zxT*g43HG0@Paro6E2ZpA0W41kPhwA?h-V_h>z1$E=!2p4AH!hOGkYjbgTQ7CK~tVRc^(twGyHOmn0B6IIkbiRq~!rvg<*ArmP!NY(28DJ}Rx$|P9pY1ASy*5CT86_(^E{y1KH_fk?~XD0h%)Re2PI`7B`86R07n9VWueK0rMWt_`4LFWj>jW&?bU z$Q~Y3{`{|%pAM&Q78s(BWf}|T>D0PuS?ILY(l@1%n-N0*(qe!m;5HtXOolq&bi#nj z`}Ha|G2a}G>X+&OD@T%+^ch@TL1W`OQ>pa5qW^j4;UAi>AEvqMCGC3|SFoTFE%aWn z#RVk(<@4~b!^DE8RrdQ2zE^YveG6MC9*7>X2{tYiFCo%U&%>vmn~$PqweW3V4vpU{ z!eo6?d1B>~2=~Cyo0KdWQBYrdLn>tAJaDAhtPt6Jp zKLI1Ci6t}ig|d%PFDaC6Wgb!{%fpljf}P3omGtd4zo)J_`@E6b<9UG| zkEIDORzJRxMIYaYtFWQ}AyS^1qqG&-nn_MbyA|15N(GjiwV=b3H1K_Z8ZdW3F7ydp zZG*=@0u4Tml0gz;4L*sNAq|pOf#e(dFeJiQzS!y?kSC(=vM?h^5Cou(CkTE6z?a<| zk~lahEZ_yoKwxaI>Kun*P344_04NAN*kGv1xtPfJMF@Ck1}t~gA+QYH6!OhX^?`A1 zoqanN73vXi6Z+xP3w1GH4)hq*dQ=`^;FzY4JCU(pXGn7aR2-%CM` zu6Ee+(x)~3@H-sP@I}joh6q zCI9gGvtl^^pt@f4an9+GuR(dD%9x_Suz#XqFUD>CBE^9sQJ8srR~O4j>W4Y~w2Ls2 zv<{L!LxePhupu&>MKrt@U;;u~jY*UVp}FE@aF+#I%SJ64SMLy^D(Uk1qeZvGRuBCi z@>!Gk>X)SrwtEoN1b{-ga2z4=W~0Qf9TbLFlF@srlT~j%46iCNP6j*`f7C`0VWBem zBLQXq;U> z3$XiNk=Gk`{7;kDB^a%GIKE#$9De{K7FAwL#71NygH6u+`=fuCJwFY73$y3@phscy zI-5vCmDkyu=gn-f%qP?Ll2&IIJkSuz46 zB*tY0I#5d;cnP;~HH@2t>kx;~ExxX5W5G(NQ7OpnWMEC9W!l=jFyjl@Qg;RWOVntQ zKQAz>6dD$97#0JGv4PJF;w#wfztC)BVS<5xRlW8TP0%#CQMa<434#{dF?#-vsLD2p zxS9VRVu|#05xZTK5x=Of_BA?uU!H=0Kd4%lL8QUlHKh>W1!I;qU6LJ1Am~RomI!v=8mFuSIZbw)0*z zQJ?71)Xti8Ed+?@#cv1^1)_cw@eAv}E71RaG1Pz87W%t+4fTJ(A%x)Olcv>y{Edz7rQW{}6|cjfbh~eXK(+cH z^;;yUmM{|E86jJM+N3lBS_26bqK}bK0gx1Z3_2NJOdqmpSdx5ObUiN@*e6+=yoFO` z?f&X7SlmdJwU1$jY91nxG%^|%cEr?BssW44PHLupkEFRuC!_1r$(Bb@zjmxzR{M5| zR(4=m*$1ht8EN;;{Y7{T|;M78P@9|j4G z?RDsgMaraI^w6}ka%3!K4fBWZ-Km>eI*uA%-Gwq9kkAI)X^Waa?^Me#b@xZg3$7Gg zNx0^p>^Qt~J5pt%v6<6K`!Wy)Pe+>MDQwa~>LPZeK`d(r(_yAMj#7_0l;85V9f<>j z)jDVhD^@KwVO}UZi?W`@M}^pHd@btz+7^^HKnMFJqkoB&J3yG}+lfLhq?Avh zTNXs8`mmzgL;mX7!X4|P*e9yw)4Hj?bWQ|Q{c7qi%M`l`r`FYci~Ln2O;UsNe+wKS z5iDe)LOW1M-mQb(6bWqS0N50&v6R5}!?8*Mkii0}RG^qZzURMKkDRl7xSHGWRH(z@ zi+5fC7lOt2v3+5|0pNrz%a7szkHIR2wu}~io?@}gc;VfbF~VA%RVx=HEu~Qy2oZ4jB6re zspcSFsv5UzU@9IaOt%2xs4uz&Uc5msrUAwEU%XB)D8*ELac!VR3B914Xq9(kz<6O1 zwP5KYD!n)WCMf`BEWPjrz#NT4O2N`MQ|Vq*+MkW~kTpqvYi`6764-A7EK;B$Q>!mU zFjd_)qINtr(Bkg|BMLRh1heKs8AjMXO+plUS&J+NE7ReXH5o)BVewFe1xF$CpSEf; za-9EXg!&xG*yquga1#d%C%l7|<$Je7G65Kf$^w>87|zn1(-O2}gX1bbL-XpOt`7_< zj^L)?1)Wn$w4k9O4k20NiAsE+MmMTK=V(;6BOG9hN>36fbdH`D>e9#VrXIVnnogCs>jtxV&@IJDq@7=+W z#g1ovow@{N9)_^>c%imwX1^#~))UfZsHe%#1pz&NfGH8=3qglQX7EN%{faDT3{LAH zWb^iGU-uqN13llgW9TNGj#^xCMCHl($Ig;Rdk^1z3LRmXrCINjq*&PdRd}ZoU;W97dGD-zDDIK% z!Zt@%eB2}3Dt7xXscS~bNC_*J^o&}BkCt!Cx_Rd?7{fn}V-tZpWEIl)Q~N@_w`4rK zv#uSb-|yOZd}K4{f6tJvMBC4TnmJma9vOm#dXAG!8H)qI9$_l-GS>tTrc2y9oJr5 zM{%9O)e_&VO2CzfOTyI~*GOD>xTfG*hyJX`-yOIP;X01%Ph9b(QCw$SHeADS<>K<= zT8QgmTpMt0!*vkXaa=#)iuOfum*Tn>mxOB|t}(buaLvPYAFgL{ZNv2uu4A~);EKgO zB;t~A^~E(BR{^d$xR#-R3viXE%TuxjaamC^~R~W^e#Pt!bUAUgYbsw%dxQcL% z#wFsq7FPnUXk5qfh3kX3w%}Te>poobaOL7!h_PqjFa47`a`Eg(6aVqAMa9p-RaTOl z?e}tAVOfDcKQ}+epPCi;Ng%q+nAOE;nsIgMEM#t#lon4b&&nyt_W8U%j?2&W7Wwo2 zv}(lV6c-eDbNu*$TvA+8T!aGss7DwO z)gi3uxGSZ8;~Cx;`bOq^X+0&Uz?)4CP4xPwcuRdW3{LGg#|`)T!Ds${K%2J^X!Zhc zIezrC)O%}LeyP_-gUm1T`Lm01yeX!lVm}@LR?mJx>}F*F>&2yh8i8ScSiew|(~+*s zn3X(ZR#!JY14aumwHSL*cA-}tJ6iH*Pezm3ep6ww&u=oDN{W5?Y)%SHn5i=Ap$RZd z6Z5Bdi^#ek?k_H3xL~t`$RXiR8#v7bwUw1sQa&wf3L^k&$y-?BFE<6rucvbiD2~nJ zn7~uRuOOwS0uaZQdM9EwyxQFPy*@T_eCpm?&JZ-AxU4AGX-c93WhLso4J!i)7kYb@ zmKL*mK6DNJ<0cj7)6xlKUa5)xVtxsZK6PAXc}bv>Mgp9QM%G>cUI2F*y%CEM%mP#* zQcFf-DJDi;T}}Qn%(f4BVYJAQKcTp^FdK}Lf?g0`>HoStly@WjZi7pK>#!V$d@RpU zWxyD{(7*AQ;!;Z|M8y-Nl<`ravP1>XO}?Dt`t-~kNRnnudK8!H2bU)nGJJ94qPP_F zCp8~(B|FhdT>u;oUNe`<^Eng1mpTs#`;CijsFl9sH6KT z?XP~H-&WOS`u)Lr?~gU|e)Gj#JPtu%^sCoDWT^k)r5s%Hpsu`59Pt7k9ED%DuTt<1nqI}4mKmn9q0i5gq9)^8@Gey`u z=+COiX2Y}5hH@#e#Y8+SFg&{mwpm|1i}de*ZN&ez5zp5~OaPVM05fI?vyrk$m2nR= z&nO>Jl%G?a>&@y1bECAVcWH6qa6g$@9Oprep1sS8m>s58;B3t@%7+Zk@)eb!jDG_6 zBknWvO1;^+!~HNRv--e-$!w98QJ&Q^&zm!Om^Y`4**F993-kTlE#7ND&yeSABPpN;9 zWx;1?QGJ%j=8YOa|I5fwZ%3mg^MqmdY1A1GL(~i7GplEDkso%lkGmIrBU_y&mF!%7 z?Dqwp19jX<8o{)jENUr#e3@T2quvaTOsjZHuxIEkS(6<15tV|ultusb1hs+~t^*DT zBC|0y%WFg!%z}EG{g`rg`oV zzy>SE-NIVyon7G55q1Om)VrWK+plrUm$)C~_2(7mst-2m^Tq&NP4K{mS^a#&%f|bO zA-RrdTbp@$2I}y0522jSrTv+O8n2Cg2t0@h1;ta<;ZMa=jrxH(mGo7I>m>Dqu>#SB z`fRm^gD|G!{y=H14?W0>dkFW86MFa?Z_UVRXv86t1&2HX4LVeN}ie z$Xl3&iDNi%qbEV_0px;bCmLa8mcqCGhd$1p#UD~g8;*gj|*a53Rr=^l|jI2norRI`!eXzxdBE)EN3%t1jand`#fXVKgTR<^1`1;~z zAl;s%a*O;KrNud3I2z?GGQFjR;4{Bhf5gpb-p{Aq6qojf2q`PYORo>rICu2WGH+>l z24o@O0NSZ1e12;Dl5pt#a=FV4PgUCm54kVtsXEn}hz<1cVE|tG#a$kip6{dZj#_wN z{`gWvr3jfmZgg|9{PUSe7)~?U7jld9qqpha@nsVydQ0_Q4(K&(aIb-I7#PO-aE0E& z!s01jE(xB$&!3y+C+f*BnoyjTix@^}ad{R+3plPXmGr@^;;dYjmsvi7qSyc;&n0QW&03mGZlD?k`V+v-*N^3T9teP*Dsm zraf4aaGU)B>)|C+mn2`8^Ws}w z5S%Z}rtqZq5zNQd!2d3|*}1vrjrZ_2BimMgwU6fu_k7DWg-eLd0>oN58oPcqQHP2} zmUlr~KA$p7Ea-FDM4NiCrrOn){+lWXhcIX_G|_ez+{FUjw)ngG?eCjM=IU}7mYyOK z^1F#K8mpZbPyZEodJc#Fw~4xXvA>-c|0Y}q`iCFPj@SP-JuX)Kzwx-#{$V^3Xdtru zZ+gtcIg}TI1#kEkO_bM*HLYI_dOxUP|LbiG-J3J|n8x1`$CpKkNvbbsaFYYMTjwiN znuOh`O|SQ1xEHx-Mw2x1oCNy(@0CZWlFY;Nf?Z68`+1gyxpLlcr)hwV^JDxS8K3>g zZNYz`frxx9&+b2)tH`kbBe|4igo)swmYdZe%>1Te0t_AzOKQF^H{VAITZlq7Wt;yU z?O|?$PIZIy5bR&TAx7E1r+v+d1Uw_17=K5m8MZkao&u-Bln525J6j^Z2vdO=*X}e9PfF}p#i2g zgAKxKa{dTb7|Z_$JQaW!LJbY$J(LpuhH;QD&Ei3k+obq$L%NA#!@(G>9_XZ);$#Yl z;UCb+0$Y0UDsmg7?@5jXoq9bE60!WCkQh#6y=ky{SgeD|sUUoEy7V@5he>ycj0V1L z!0L7@{?d@n(?;XC9!1Es%|&7k^2LKGgrP2TLB zJW3a*O?DA+U>G0a`M<24fNj%2KLYPz`!E~_L}cWC2X!!K)R_^-sFo#y2W>P3Hz@lF zACch&WiOR!8Yb~3$vM!mLBB_at@AP91`2T8_%f_)QeqNoPrp-bIAeZ$po(F=i|RD= zoGt4ROqB&W7K+FHpitH@=HlyY5G;f8MluRaCBY@E6I-g&igt8UkJ_Z)Y+6u`MWq>% zwBPD{1yRRk1yRG+`~W@#FBE|_H89Y|-qG#=MAk-hF=_E6c=P#4JtO!%^c?|R)Kw{U zT`<2Wrx=U-NPlOmhRNRGDi$qW8JDrJb~q^C=c64StgLFxa^PF!{?JM|a^cmj2-shO zyr_b5jwV@cEg5z2%Ivok+yQz0L1?qn5LD<~k8W}7lt z%7#fpD>rz=z=7CTg3KttX)3nG6qor;UfK|W4JIf%L``U9DI;8IcD~Q+Butouz;Lro zl%D1tk31aO+rSbNF(3N&6qA~(fD{>1VYa#zgUQ}(lh2EMI{4eF^rd;#;0J54BqKvh zusPI6SbioR31pdIkBc_Iq+$9#AJ$B%7bzkjO%tXd(53pd)h#qh!>A92#8*_-*R_U%|h>xEM$*D%bUuzZhgyLWnmo)bw21g2Q;C$cup&7MFTW z(0fIU9hf=57$8tNGQaiAE}~U9eS$zN0)t|e@cYV`3b2wC15z(S7f3DJtBOJ9VA!CGA|)FQ_T>0 zT}-*fpf18O_V~c&&~jM}mB2%O^d}Y7DeqX{|CYiSDZdD24ROzSWJqCi97$LtsQUBA z=OYeWo@xpv3QW({gOk}O3fSt=B5Y%x%WHbl~`Y06s~+Aw_MB(IewOzZaQMgg9*@8~U?#ySmTFnA0+CMO3`L`6%ty;64qa z>%a6bJ(?9a0mkW)a5&3xW^ZHg(ymb7r$wmSP`zOc6_B$<-l?W3(B&*u1}F|%pC{prq=9CU7v^I>pjCZjv^Vl^>Fi+tI)cVFq({X!NBP~rPa^I)k9xmg6VBJc z{ZUE1kH`5$JnrdB3`9{I9*c^X=smCA9|eBWa38PTa}nk;Ko^wGM>GL_rOR<90BA78 ziWo-|!{El<85~CdE;&^MH{b8`7EB0$SPwj>dr zxt~x@l@NG}X9;+=5q-HxD@MS`eO`xhtpd-$(|8t*{ggyw z>>uz3SU1KwivS?77`-$~R>#IyE>Ha4K{>3V%` zkGh9Y4%_(Df2f;=a_Q)c5#AcWTZ483f)V8wqg)Kibq>_U{!GBzh;qhxi^j8Rlnd6y z{zBB54ntFAQpzcDn zYvhlPD0c|$8lNr2vzaIt#3{!W;8{kr1^L@SxuW(h9o4$#$}*n6wpi)tb`GulxzBOA8fAs&XpawE&O-UfOdCW#g^t-_egZQr7@x)nBm5HT#4%)R;h?{;9=q{QW*P01GR*3}z7g z9NNmnHkbws7ejl-pH&YB=W&=<<+&hp!OO@l9iNYg1e}ngx%$j{)KqVspOF%<{L)@Xb6#=(I;bp4-Edj2rN+R*5OCm0dnY=hhks}teD zh6r^d6l0H!^U?k<%tyz@mc*Bj%e6g@mo1 z*}24dYO+^qz>K2yav^U`mj-=-65OXQ3msI%d()uXnW z`lstqQp2>mq=s4o=wtpC!Ptc{R>cIzT!s6k-*C*g-St9L{6XBu52RwlgHvy8Aw#AaolT7lN?9>KYnS=MlzYV?bgVrYGmqIJ4-G~&g zGUEHFLEen}3o*g4?rHc=1B`eBoGR!V(l={BcVTrRJlJNa8{u`xBjY$$f+qC_BH7&` z6&t(J#?hdC*jQb|yQ7eEhTBGe8pm<${-Axh!FT>J!Wz)Vu@o@IMSyeiJn28ffBt=- z7!b)%^3SP1P10h}ZO()(rAc?ldF;)`Vh5#o&~{er)l2Sbl>7^I+_E$b3L{HRWH^SeL_>TM5}mza#!@KpyF3SDoj4%!#mepu?#DSH}V2OMkbj z-ug(q6nw|Q22Pu!u5oOOU?&Cid!zQ9s+z6~^XPnCNxqe>(0lU#zrS=KTOVBgaS_b~ z;?Vg0zyJPU!$1^Aem<@_p(Bsta1kn$C_;&*Hl~v=X~hJktuX5T&kEySkWD z`7S1$?1bY>iYB_cjL7WWUFw3A76dS}u~hDLbt(7yx_I1eE>P$o*uwDz){&jrzQWWg<}Q@GkUs&ZT8uQdMxd_OVc{l1AOD|z z1SyLnAh~AzME*X0E&m$7pZ|hC#{a=LGsl_7n9I%c&3Bt0GjB4VF~?YTSbnzj6b1^T zgjsZ;4v|O8{x6J4dQS+2WXkGh_7ZFFsMZF7C^>f;{ezQyf#-|v3b zUF$yW{@MMTyP2nxC(+Z@{cYN6e?pEiEY)!Q!+GvOHwjV0qcH z)w0{tT!;~pgbBj!!YW~{utC@cEMFm}ilXQgr-=`WYs8ntYpmB<)2vgh3#^N+E36M& z*IGZd)>?nCHnVlKO}5Rj&9i-P`@`1U-p2l%eVhFw`@ig+q#LDFht1L7k?ok_c;4~8 z0T9hRk*r!1dazO|gR{A6hl%q$V^1#Z?08-v+X)t z4_j~BU|Y5=&sGBZTLJppVB2KdZhOb}p6$5pByf{pzsBCxZnpQZPqMF+UY9O(T;j+9 zW_CG_IIPY=&ZW**o!gwdogX>Bb^hvXA-9p;a=JWF9xu<7?~s?uE9AH3y}-{A`IP*# z{HNSpX{}tMT%&YV%!(7Z>ZJ@(vJ|gUs7z62D07ucTyDcz%o!jnK+&8!fx^Hr4yNldD_bm4!_nq$L?uXqkxwp8tx!-kv z=|1j0=|1C*_O$j~JdQCJv~D`BRyk0<2@ywGEjB}=IQ~@L!QSxPk5g3 zJnwnMv(vNNvmccHt>={IUmh+EiUNz$QG822mcNuwX^yqD zwOnRNvRr53!ApI?O~WmtEMqO%mdTcBmf4nhmW7tZmSvWQEst5Aw7dv8x7)Jca>Vi_ zDC!4Goh4ehKxi*qDO@9T7i1w#=q>aYGK5S>!91Zvm?BIU<}hh^r?6aDAv^?m_#7y2 zi?B=hK=?%XLO3P-D4Z3ViSc4PaB5e^t8Q_iI8+=Z-YjN|Ua?r5F5WIK5$_i77yl`) z7M~Gc5I2cC#P`IH#m~fA@w9kGZ_gLSverP>x{lfYUIRA|GoVBGb!Pddn$(CdjY?94w>uVcn z8)F-1n_w%oO|{Lk-403oply|Ht!<-iv+WIV!^gHGwy(euwYDF?6V2=~_KWQ%dy>5y zIKyL4xA(Va+Q-}T>;?8x`)vDs`y%^2_SN>a_VxA`AXnbB@3DVq|Jq({|IYrS{TKTm z_SVuxQi61qlp>iWyX2AvNEy;dX`ECf`K4*nZ0Qb2_j{#Bq-UfTq)p(uz0!Nq$I_S5 z_tH<&Z_=Mqb4P1OJICb?ljAyv}L9rGM_IaW9xay;gE#W2cXfsiNpp>J`Jg|RxbAU1;9Be2;QA8!<0sc|u4wlq?)L8M-5z(k zyT3cro#UPaiFh0M@=5nA?yc?}?!E5A?xXHs+<$^6FZNvKxzf|cWB0f`1Hp}Do|&Gx zo;y4%7#}|4dDZil=RMDH&uNmI8HlGrZpMT6OnegG4f-aX@6TuQIs9bs+23pDJr-fMltdK?-m%GS~rYrE9e#b&dmK|=dLm1}IT z+TOH%VEYtw_%Bf5m7u@t>=wHd)HlRF!k%R>vd^_w+Ml++X5V4oZGRth=8<|!`I1kX zC#{xtO8cY)(987>tHa~SaAZ019a9}E9cvx?9p5;<2Xzo_$dKv7oTH)Z?sUEfZTCB5 zdV(y-BR~sJz=qg?`F~&jTK*BX1WZcB0qbEZ?v_JWNc!B6Gq^6xOq>;(T4-@=?=zRH{eDR{Ga9^~F*<`>Lwn!hpsVm@bX zW{I`fE#;OukYz7e-n0B_NfU-ZR^24z3T2Q|cR@0JBAkYG)?ADcuN6~7i`Y{fB#y-F ztQMbx)OkbPB_0q@ifyb{L)O?ZLqn}MTT86dVN2Wx>GFbgmvz7OsP%;PU)HnMKdqNQ zrX<-?Z6YK}AKNh7Ew+iaLdcUjwo2RmOqy(fG}&dVvVCIv!d7GZ5p&oYa-_YzGvtWP z-V3vMvwfUB-(GC5urIOSZ(juo@*L*!T}Y1;_Otds?H597TrMR_-6dJ-A!SPwAunc0 z^Q9%yebO4~Dd}CQO8QJXE}fLlNOe*h$7PTZ*E)EI!_mhv)NzwzoMV#1=a}oLgw^<< zV-4iPtByA@{|6xV+;NY z{u*$ih0o%>{0#75CA8iX{Lj#CH^KIM1M_#pe9U~(+`6T#wL+@kg~oYTI3$#aQ^dvM!{X!66&uB^;%{Q2mA6_MN99?It$yoF>s;uByQ~jj zj-qW>+dQ`0U}bN!b+mtOKV?5>zY6%|nH25=89W{`xEvCAfwUg7_Z`UHucZ^xX-M5R zj_!^gj$0f{9D5xtor<%1KLmKvXjdqQ7UG47Xwz_5aU}i_Z0)E&DTd>Ogsk_>J%H0=yu-LQR z^N#0B*v>zDekXpI1C{_DFXAubuj0%3f52*3%Rj@vz;EH-fVFUvKf}xB8z8?HnC~<% zH~-TdXSvjJh2lrBrP^KgfsjLf`yBi2 zjE2`j1HNv5$G#ta%sG2AiToHra!P4Ze`zde_ipJ~P;5uXNXKHwhmb=*Iev3+&U8%* zZEL z>$K}vSDh=yEds-7kTz@KrM%_d=YAjf{lfjNJJxfl=Vn-Qk9gL2p7m@5wF19Okp~6* zUJq#_^F2YOxqLBfwA=Zm{QZ0FmDXkO zQ69BE51Zv3SS(*!PcgaD%GS+hwq@FGhMhjyHrw{F?FC4Y4{U$htoB>&Pr#yRCbg9= zldhDqq&%q*(*Awv6X|nlo#SQ4Hpjb;D)9ENj^@rb&P$xvIB_6j5=*W1Otq z3X6S(@(8onpI0_3Zz%65KSDx`cagQ-#Vx=)%yZxBp5lHK+WcAf@9+*=dEz_?o~t}5 z9y9dzhlKN09Cr{n@4#OTTu;TM*fXF<~6VSdiM z8B*d$$oZ=+$rdjpMVVzfJfO=p>Chjt{w5(yC>3T1^MvnVbG8nTl$H6z8CC-Qb zeF{?kW${&Uo48y2P&_PtDV`927Tu8Z1FW||Zp^bTv@W$iWPQTA$-3M6J}gJl$rsz~ z@S29gTAXBi#P+!DS=#}~^BUVPwsWuxW8o(avyZVC+Gm0OpRsSXzXh-I8~gW=-hbF{ zkcLV(OS#a<#nKdM2IlA+sTS5;f}^t|8B*E>DLvYe=O}gD2d%swUj4_8FC9OCA1-iu zoV}cboyE>_=OX9*&Xvwbo$Fz({RlbTM!r(+A`gr%L)ooVDW56dDrX_fFNSs28~*(;*HrjWcQgO#3+U_bTygFW?ydok zX|8*jd!_ps_iOIAA)C&*TY`I%JlDf-b$RA_?)Ll>T=b;pInT=~A8o*QFc2)eirHOm z*j(fJJa{{^z)36kb&yvdX&$Rn(6a4;$`BMqFGE6GsN*?f%t&< zr1+xvnz&P}Vp8iO>or!<>bCZ^4z}i43$0VFv#j@6S0g6#lJ!;CU*B1Ox3;jw+1epK z(-pkeAKZ6~Eth!)8^L?~ZHH{1!x#9?77hFV21vY-nDblhGoZIC?GM26f5rX=Z2nJR z^PfUY;XtW=gOE8%4m&mK^!!MB|tIUTN^u70kOF0ZTD zwbJ#B>lOIYZ@Z4TjzgNBamBi?awoejZV5hge`d)|26xYNSGbozx^7?|^vCd^FM!va z2(MX0Oe4c{ljnBW=Z|{UdtUIo4eR_c^w}?D6KvzSl}wj))ep`DV`*8bMcz+XNx0Ts%voEwS z1>f(41>RDMhrYW?>Id&G2a@?d={ae$vpK zxeU^IJ8X((a;$8YCAmzV4z9jKzMIXXprpeR%TxT|=(`|&A5tEN-g`mWuN+dAxju$p zRU7cCOztFicXto>0QWd|p?j)(srx}DhhKKT3QOaN`vhjHGbFLyqj-iRvT;9RPirwx zFM7Btm>QTH1#yYx{L}oKd=>v4f0iGO7+pRha}|)mcbV@uuQ5M`NW*6HE=c2_&6ise zVRQGj47220N?~Q(W?6uE!g}y*2D7_qULHm~>S^Ic;Zt}<7mFRmZXz#AqD$;0-U3ct zjHunSurgkUh4HD_&U(2u5fMDIb*nW7K6^J?FI$GK$Tki9cN8-BoUMiZ0(+eOGJ7IC zA>QtQC%)KzpZ!Vu7W*#y5$189w*L&tTP1x8&O0IfAT@KuIxcl|1ougZU?n?6_`+Ui z3A~*Z&a>dC)^bNVNxlvoWk(#Vznp;>U#|R|yctof&*X0r!D^+%B8J)lzOPH^1FNou zE6&xymF%*(da#((aM#VQ`L1Q)pQqvHZgRcmdJ`OU&XvHpCl&M29~^TBqD~LGSGyBD z9S||?3Ms1lu-i!|9#wtU7+A|@aE8M7=5Io5s2X$77RNC6g+3gGxmb!hSZRLSd=y?! zEoPuKVmkw&4@+P*S6j|lez!yk7Ybd38xfhgOL#}=(k?^V!Q0 z^LP}t$TN_9dw}^b?cdseu*X8~T`qN&x=7bcR>>ocK;(LyG)ekJigL7M^Dxg5=j;nx zZ@60az8{V zM#{G^$+u8mA}^C4lpleo`i%SvJeQB;!}6D~U!oAF5a6r!gvBxkaoMGaFutg4f~Q&n zy*Cq@ZxQ^N`!Hu~UGKUMz_aVe$J`%-d%i-P@In@m z=;O)tY-Q2EDp)k1dA>&c4|8}L`|iL)&G{JqBECJ7a0B=o`CDL97C{p(K>XoutTsFh z3w{It79xEg@SpLgApe?~DGHGSxmSot-bV9Qcty=D11yEGa%Na6paJ(o&V3FIn1U$W zXxKRuggv{l*;9k*ZlK>C7dx+uh{+QYW#?hs(zjL;%fcQ{TuE`k+0!#Ur% z#F>KVf-Luv^C9IH%gg15F?TN_YP?@QD1Q!X|5tcaQjr#}KpRGOdp7xM=yeAFO+#Js& zPbnfucQf95-18JP)F#gknmZ2P1H;_i$v?<{&;QQ1H+M28n!CbMv6Ky=@h!)Dj_$A*`#MKCvz|r|d`Ny2eDj>VQGP>yQ$7Gs>on{~itqJMdSgvvIO2S3lnt;S ztCSBB=p!b(VU#0yG}ZoaNB@8(0+h z7tBvQRz)Oe{-O44==~}7nf7_mJg*?W+Z~aFV(B&MZD^bt*m^(1+Ut#I+gwCkRzcHm zK&++Kk>pH)=5a&T-RFG5`Mh%r{INHkdm-s&VkYjBU&0)4N(*S3wy@r>g3o1E>`Iz4 zK)DBg?i0%E%G+2|I-&dw?Q@CiN?2^;u(DJRIrbppZQCHjK6HKR`T>?eDy+3W;D!?D z|Jz`vEptD_ys?jA!(RYjtR1wE0IGk(v&*v=*4ZbXFR=2&nWDH%gY4>s6^l{)SgelD z;42`p9_OEe4gU&a4sY@Y_%!n{@Wc0ryS1`(!`gxka_V)Y8PC!&_kTgshjX2I+X(8fb_eu|fTh<|# z^QLsABgx@}hjkZbU=;kOBzU{Be1qH<-fp&BD3?P5-3>liCqE1AL7F2PYkFAuE2kF6z*_DsO~&~&VO-U)x`MbB%{3m*Ze z$2}a6@9F}l$VcT_V#|G&W0vnM9HL^^3fBuFq6#yGxx!ZXLc0;gAx&@&njiu8Z=yI= zoQoLF-B=l22R*PAt12IgpNrp!f;Am}&}jI)KEyU}v)<0)n`^)Wv^F>&n&5uu0*YO3 zgN*vbb{W>EJKIz2gW&0ohTVIgw$kx3Vmu$(Keaod7ig8*hv?4jn1NN0RT~hIlpN`f zn;m72IgW=N>#(94@9Y3BusWTPRehY9@N~vG^Pv@^2j0qb!@H14pSiwrRkO8* zo7}n334UnshulxVlilWi2bv+)GaA}^hUZ?-D$na!zk}>a!?zmQJn_7PABd>)O^`hk z*-G6UrnBF~n%tN0RO%p$63yMryjj9(+yL{XmhSLZCCd$#49f`1&9KADAdT*XEPB@R z8rBQmflm0r(ptDkND!_PiiByHtA7ZK5qn!9T!2}+3bRBp=uzTWu>?AM4rVEa$);Xd z<0`UFvmOFp)L;dxh3x|PryUxbj9EeDiKt?UG{ZorG@36gVJBYdZ9BT#N+kUlO zggLvK>6E_kR42ew@iTsT+P=a55?e3WXa4|s20tT47bRI>tM$bk=CLTnT&Yr8iM6Ni zq<%ra_b76BO@9*02=+E<7#IQ*l>l;#jxC-ht2jW*2oj&E9B0IOQgsmqDg(^ z!SZmHi9p`WT=^ezrF@UP3K6FDnAO+hcv#f!m5$)EOz_!0Sk;FyuP2o=N?WY>c86{4 zgl9U)mFp^U&B6NlS=UVH_WkZdh~xd><~$d`ioOOBC7Z{MRrH~ranS73JhQMydna`J zTEzFZBMyDU^Bv7^2EO@-KO7&&cfgv^5ZGi-@Xy2d{Dl7+v)Td~A6J-7=1gdqBJ))6 z;332!D86^b{0BI2rsWySX2`c)u)@Bu#2}j275YUK+`@RwsZS^u=41VHqhJ#=MGDp? zqp1``UPmL2ecG}@D8O7zvz?XNJ4E;(nUF^)unON52hAOYw&D=e=!?~)nOLPbA{-T} z1rB~vvZ#pZ@Q_Nxnc@O*y?97G3h#9w_;!pn*P3T7K*VYqV%-Z6wOWkz&lQlTYp_bO z9x^onYe?ycVT{H)e}Qc#C}k=1*&5qAto3h&1g(NMbqsdLSz8PuSRJvl$=gNvQhmYS zW3Z-Kf@tmn`*QmV`#P*>ZpDgb6{5Mvu%3Aq^b!j%*Mu0lC`q82fzlY*_$64ESOEFE z0+h2J+`kQ8??LI1R1G_W+*I0iyHj)V2$cT96EL=<=>^yCJ|Mr0W5 z#>(Xp*ovnS0gFaFq`fm45<_vOVf8Z8nd{7RPQ&`;V&ovKLTqFsC~r5U#u2PvoQD4# zjp#^wcylR`9%=9tGC_ZNasgH{m&+?spMwjFqrw)*yQz z+8yssbf>s^tVxb`kAdtcfX5aOFFDV%4nFb*=*wfClSEG&u&)|(mk3M6&rjnQ^BWO| zKMR{t!YX|l=*SNm+G^g7nATDA8bpM7Pz$XTt`yb?dm*Qfz%q%&>}H6|u_m|%@_D^@ z5EeSkBF*6jtTgSm9>nTJ3?h(okSnwpF$nU=*4frW57I2|vhBu99)xFn1an!9+zpz~ zbBIvUoF>4c>2J_M$-f_BPCgx%self)._impl._items + rgt_items = (<_Base>other)._impl._items + l = len(lft_items) + if l != len(rgt_items): + return False + for i in range(l): + lft = <_Pair>(lft_items[i]) + rgt = <_Pair>(rgt_items[i]) + if lft._hash != rgt._hash: + return False + if lft._identity != rgt._identity: + return False + if lft._value != rgt._value: + return False + return True + elif is_left_base and isinstance(other, abc.Mapping): + return (<_Base>self)._eq_to_mapping(other) + elif is_right_base and isinstance(self, abc.Mapping): + return (<_Base>other)._eq_to_mapping(self) + else: + return NotImplemented + + +cdef class _Pair: + cdef str _identity + cdef Py_hash_t _hash + cdef str _key + cdef object _value + + def __cinit__(self, identity, key, value): + self._hash = hash(identity) + self._identity = identity + self._key = key + self._value = value + + +cdef unsigned long long _version + + +cdef class _Impl: + cdef list _items + cdef unsigned long long _version + + def __cinit__(self): + self._items = [] + self.incr_version() + + cdef void incr_version(self): + global _version + _version += 1 + self._version = _version + + +cdef class _Base: + + cdef _Impl _impl + + cdef str _title(self, s): + typ = type(s) + if typ is str: + return s + elif typ is _istr: + return PyObject_Str(s) + else: + return str(s) + + def getall(self, key, default=_marker): + """Return a list of all values matching the key.""" + return self._getall(self._title(key), key, default) + + cdef _getall(self, str identity, key, default): + cdef list res + cdef _Pair item + cdef Py_hash_t h = hash(identity) + res = [] + for i in self._impl._items: + item = <_Pair>i + if item._hash != h: + continue + if item._identity == identity: + res.append(item._value) + if res: + return res + elif default is not _marker: + return default + else: + raise KeyError('Key not found: %r' % key) + + def getone(self, key, default=_marker): + """Get first value matching the key.""" + return self._getone(self._title(key), key, default) + + cdef _getone(self, str identity, key, default): + cdef _Pair item + cdef Py_hash_t h = hash(identity) + for i in self._impl._items: + item = <_Pair>i + if item._hash != h: + continue + if item._identity == identity: + return item._value + if default is not _marker: + return default + raise KeyError('Key not found: %r' % key) + + # Mapping interface # + + def __getitem__(self, key): + return self._getone(self._title(key), key, _marker) + + def get(self, key, default=None): + """Get first value matching the key. + + The method is alias for .getone(). + """ + return self._getone(self._title(key), key, default) + + def __contains__(self, key): + return self._contains(self._title(key)) + + cdef _contains(self, str identity): + cdef _Pair item + cdef Py_hash_t h = hash(identity) + for i in self._impl._items: + item = <_Pair>i + if item._hash != h: + continue + if item._identity == identity: + return True + return False + + def __iter__(self): + return iter(self.keys()) + + def __len__(self): + return len(self._impl._items) + + cpdef keys(self): + """Return a new view of the dictionary's keys.""" + return _KeysView.__new__(_KeysView, self._impl) + + def items(self): + """Return a new view of the dictionary's items *(key, value) pairs).""" + return _ItemsView.__new__(_ItemsView, self._impl) + + def values(self): + """Return a new view of the dictionary's values.""" + return _ValuesView.__new__(_ValuesView, self._impl) + + def __repr__(self): + cdef _Pair item + lst = [] + for i in self._impl._items: + item = <_Pair>i + lst.append("'{}': {!r}".format(item._key, item._value)) + body = ', '.join(lst) + return '<{}({})>'.format(self.__class__.__name__, body) + + cdef _eq_to_mapping(self, other): + cdef _Pair item + if len(self._impl._items) != len(other): + return False + for i in self._impl._items: + item = <_Pair>i + for k, v in other.items(): + if self._title(k) != item._identity: + continue + if v == item._value: + break + else: + return False + return True + + def __richcmp__(self, other, op): + if op == 2: # == + return _eq(self, other) + elif op == 3: # != + ret = _eq(self, other) + if ret is NotImplemented: + return ret + else: + return not ret + else: + return NotImplemented + + +cdef class MultiDictProxy(_Base): + _proxy_classes = (MultiDict, MultiDictProxy) + _base_class = MultiDict + + def __init__(self, arg): + cdef _Base base + if not isinstance(arg, self._proxy_classes): + raise TypeError( + 'ctor requires {} instance' + ', not {}'.format( + ' or '.join(self._proxy_classes), + type(arg))) + + base = arg + self._impl = base._impl + + def __reduce__(self): + raise TypeError("can't pickle {} objects".format(self.__class__.__name__)) + + def copy(self): + """Return a copy of itself.""" + return self._base_class(self) + +abc.Mapping.register(MultiDictProxy) + + +cdef class CIMultiDictProxy(MultiDictProxy): + _proxy_classes = (CIMultiDict, CIMultiDictProxy) + _base_class = CIMultiDict + + cdef str _title(self, s): + typ = type(s) + if typ is str: + return (s.title()) + elif type(s) is _istr: + return PyObject_Str(s) + return s.title() + + +abc.Mapping.register(CIMultiDictProxy) + + +cdef str _str(key): + typ = type(key) + if typ is str: + return key + if typ is _istr: + return PyObject_Str(key) + elif issubclass(typ, str): + return str(key) + else: + raise TypeError("MultiDict keys should be either str " + "or subclasses of str") + + +cdef class MultiDict(_Base): + """An ordered dictionary that can have multiple values for each key.""" + + def __init__(self, *args, **kwargs): + self._impl = _Impl() + self._extend(args, kwargs, 'MultiDict', True) + + def __reduce__(self): + return ( + self.__class__, + tuple(self.items()), + ) + + cdef _extend(self, tuple args, dict kwargs, name, bint do_add): + cdef _Pair item + cdef object key + + if len(args) > 1: + raise TypeError("{} takes at most 1 positional argument" + " ({} given)".format(name, len(args))) + + if args: + arg = args[0] + if isinstance(arg, CIMultiDict): + self._impl._items.extend((<_Base>arg)._impl._items) + elif isinstance(arg, _Base): + for i in (<_Base>arg)._impl._items: + item = <_Pair>i + key = item._key + value = item._value + if do_add: + self._add(key, value) + else: + self._replace(key, value) + elif hasattr(arg, 'items'): + for i in arg.items(): + if isinstance(i, _Pair): + item = <_Pair>i + key = item._key + value = item._value + else: + key = i[0] + value = i[1] + if do_add: + self._add(key, value) + else: + self._replace(key, value) + else: + for i in arg: + if isinstance(i, _Pair): + item = <_Pair>i + key = item._key + value = item._value + else: + if not len(i) == 2: + raise TypeError( + "{} takes either dict or list of (key, value) " + "tuples".format(name)) + key = i[0] + value = i[1] + if do_add: + self._add(key, value) + else: + self._replace(key, value) + + + for key, value in kwargs.items(): + if do_add: + self._add(key, value) + else: + self._replace(key, value) + + cdef _add(self, key, value): + self._impl._items.append(_Pair.__new__( + _Pair, self._title(key), _str(key), value)) + self._impl.incr_version() + + cdef _replace(self, key, value): + cdef str identity = self._title(key) + cdef str k = _str(key) + cdef Py_hash_t h = hash(identity) + cdef Py_ssize_t i, rgt + cdef _Pair item + cdef list items = self._impl._items + + for i in range(len(items)-1, -1, -1): + item = <_Pair>items[i] + if h != item._hash: + continue + if item._identity == identity: + item._key = k + item._value = value + # i points to last found item + rgt = i + self._impl.incr_version() + break + else: + self._impl._items.append(_Pair.__new__(_Pair, identity, k, value)) + self._impl.incr_version() + return + + # remove all precending items + i = 0 + while i < rgt: + item = <_Pair>items[i] + if h == item._hash and item._identity == identity: + del items[i] + rgt -= 1 + else: + i += 1 + + def add(self, key, value): + """Add the key and value, not overwriting any previous value.""" + self._add(key, value) + + def copy(self): + """Return a copy of itself.""" + cls = self.__class__ + return cls(self) + + def extend(self, *args, **kwargs): + """Extend current MultiDict with more values. + + This method must be used instead of update. + """ + self._extend(args, kwargs, "extend", True) + + def clear(self): + """Remove all items from MultiDict""" + self._impl._items.clear() + self._impl.incr_version() + + # MutableMapping interface # + + def __setitem__(self, key, value): + self._replace(key, value) + + def __delitem__(self, key): + self._remove(key) + + cdef _remove(self, key): + cdef _Pair item + cdef bint found = False + cdef str identity = self._title(key) + cdef Py_hash_t h = hash(identity) + cdef list items = self._impl._items + for i in range(len(items) - 1, -1, -1): + item = <_Pair>items[i] + if item._hash != h: + continue + if item._identity == identity: + del items[i] + found = True + if not found: + raise KeyError(key) + else: + self._impl.incr_version() + + def setdefault(self, key, default=None): + """Return value for key, set value to default if key is not present.""" + cdef _Pair item + cdef str identity = self._title(key) + cdef Py_hash_t h = hash(identity) + cdef list items = self._impl._items + for i in items: + item = <_Pair>i + if item._hash != h: + continue + if item._identity == identity: + return item._value + self._add(key, default) + return default + + def popone(self, key, default=_marker): + """Remove the last occurrence of key and return the corresponding + value. + + If key is not found, default is returned if given, otherwise + KeyError is raised. + + """ + cdef object value = None + cdef str identity = self._title(key) + cdef Py_hash_t h = hash(identity) + cdef _Pair item + cdef list items = self._impl._items + for i in range(len(items)): + item = <_Pair>items[i] + if item._hash != h: + continue + if item._identity == identity: + value = item._value + del items[i] + self._impl.incr_version() + return value + if default is _marker: + raise KeyError(key) + else: + return default + + pop = popone + + def popall(self, key, default=_marker): + """Remove all occurrences of key and return the list of corresponding + values. + + If key is not found, default is returned if given, otherwise + KeyError is raised. + + """ + cdef bint found = False + cdef str identity = self._title(key) + cdef Py_hash_t h = hash(identity) + cdef _Pair item + cdef list items = self._impl._items + cdef list ret = [] + for i in range(len(items)-1, -1, -1): + item = <_Pair>items[i] + if item._hash != h: + continue + if item._identity == identity: + ret.append(item._value) + del items[i] + self._impl.incr_version() + found = True + if not found: + if default is _marker: + raise KeyError(key) + else: + return default + else: + ret.reverse() + return ret + + def popitem(self): + """Remove and return an arbitrary (key, value) pair.""" + cdef _Pair item + cdef list items = self._impl._items + if items: + item = <_Pair>items.pop(0) + self._impl.incr_version() + return (item._key, item._value) + else: + raise KeyError("empty multidict") + + def update(self, *args, **kwargs): + """Update the dictionary from *other*, overwriting existing keys.""" + self._extend(args, kwargs, "update", False) + + +abc.MutableMapping.register(MultiDict) + + +cdef class CIMultiDict(MultiDict): + """An ordered dictionary that can have multiple values for each key.""" + + def __init__(self, *args, **kwargs): + self._impl = _Impl() + + self._extend(args, kwargs, 'CIMultiDict', True) + + cdef str _title(self, s): + typ = type(s) + if typ is str: + return (s.title()) + elif type(s) is _istr: + return PyObject_Str(s) + return s.title() + + +abc.MutableMapping.register(CIMultiDict) + + +cdef class _ViewBase: + + cdef _Impl _impl + + def __cinit__(self, _Impl impl): + self._impl = impl + + def __len__(self): + return len(self._impl._items) + + +cdef class _ViewBaseSet(_ViewBase): + + def __richcmp__(self, other, op): + if op == 0: # < + if not isinstance(other, Set): + return NotImplemented + return len(self) < len(other) and self <= other + elif op == 1: # <= + if not isinstance(other, Set): + return NotImplemented + if len(self) > len(other): + return False + for elem in self: + if elem not in other: + return False + return True + elif op == 2: # == + if not isinstance(other, Set): + return NotImplemented + return len(self) == len(other) and self <= other + elif op == 3: # != + return not self == other + elif op == 4: # > + if not isinstance(other, Set): + return NotImplemented + return len(self) > len(other) and self >= other + elif op == 5: # >= + if not isinstance(other, Set): + return NotImplemented + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True + + def __and__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + if isinstance(self, _ViewBaseSet): + self = set(iter(self)) + if isinstance(other, _ViewBaseSet): + other = set(iter(other)) + if not isinstance(other, Set): + other = set(iter(other)) + return self & other + + def __or__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + if isinstance(self, _ViewBaseSet): + self = set(iter(self)) + if isinstance(other, _ViewBaseSet): + other = set(iter(other)) + if not isinstance(other, Set): + other = set(iter(other)) + return self | other + + def __sub__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + if isinstance(self, _ViewBaseSet): + self = set(iter(self)) + if isinstance(other, _ViewBaseSet): + other = set(iter(other)) + if not isinstance(other, Set): + other = set(iter(other)) + return self - other + + def __xor__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + if isinstance(self, _ViewBaseSet): + self = set(iter(self)) + if isinstance(other, _ViewBaseSet): + other = set(iter(other)) + if not isinstance(other, Set): + other = set(iter(other)) + return self ^ other + + +cdef class _ItemsIter: + cdef _Impl _impl + cdef int _current + cdef int _len + cdef unsigned long long _version + + def __cinit__(self, _Impl impl): + self._impl = impl + self._current = 0 + self._version = impl._version + self._len = len(impl._items) + + def __iter__(self): + return self + + def __next__(self): + if self._version != self._impl._version: + raise RuntimeError("Dictionary changed during iteration") + if self._current == self._len: + raise StopIteration + item = <_Pair>self._impl._items[self._current] + self._current += 1 + return (item._key, item._value) + + +cdef class _ItemsView(_ViewBaseSet): + + def isdisjoint(self, other): + 'Return True if two sets have a null intersection.' + cdef _Pair item + for i in self._impl._items: + item = <_Pair>i + t = (item._key, item._value) + if t in other: + return False + return True + + def __contains__(self, i): + cdef _Pair item + cdef str key + cdef object value + assert isinstance(i, tuple) or isinstance(i, list) + assert len(i) == 2 + key = i[0] + value = i[1] + for item in self._impl._items: + if key == item._key and value == item._value: + return True + return False + + def __iter__(self): + return _ItemsIter.__new__(_ItemsIter, self._impl) + + def __repr__(self): + cdef _Pair item + lst = [] + for i in self._impl._items: + item = <_Pair>i + lst.append("{!r}: {!r}".format(item._key, item._value)) + body = ', '.join(lst) + return '{}({})'.format(self.__class__.__name__, body) + + +abc.ItemsView.register(_ItemsView) + + +cdef class _ValuesIter: + cdef _Impl _impl + cdef int _current + cdef int _len + cdef unsigned long long _version + + def __cinit__(self, _Impl impl): + self._impl = impl + self._current = 0 + self._len = len(impl._items) + self._version = impl._version + + def __iter__(self): + return self + + def __next__(self): + if self._version != self._impl._version: + raise RuntimeError("Dictionary changed during iteration") + if self._current == self._len: + raise StopIteration + item = <_Pair>self._impl._items[self._current] + self._current += 1 + return item._value + + +cdef class _ValuesView(_ViewBase): + + def __contains__(self, value): + cdef _Pair item + for i in self._impl._items: + item = <_Pair>i + if item._value == value: + return True + return False + + def __iter__(self): + return _ValuesIter.__new__(_ValuesIter, self._impl) + + def __repr__(self): + cdef _Pair item + lst = [] + for i in self._impl._items: + item = <_Pair>i + lst.append("{!r}".format(item._value)) + body = ', '.join(lst) + return '{}({})'.format(self.__class__.__name__, body) + + +abc.ValuesView.register(_ValuesView) + + +cdef class _KeysIter: + cdef _Impl _impl + cdef int _current + cdef int _len + cdef unsigned long long _version + + def __cinit__(self, _Impl impl): + self._impl = impl + self._current = 0 + self._len = len(self._impl._items) + self._version = impl._version + + def __iter__(self): + return self + + def __next__(self): + if self._version != self._impl._version: + raise RuntimeError("Dictionary changed during iteration") + if self._current == self._len: + raise StopIteration + item = <_Pair>self._impl._items[self._current] + self._current += 1 + return item._key + + +cdef class _KeysView(_ViewBaseSet): + + def isdisjoint(self, other): + 'Return True if two sets have a null intersection.' + cdef _Pair item + for i in self._impl._items: + item = <_Pair>i + if item._key in other: + return False + return True + + def __contains__(self, value): + cdef _Pair item + for i in self._impl._items: + item = <_Pair>i + if item._key == value: + return True + return False + + def __iter__(self): + return _KeysIter.__new__(_KeysIter, self._impl) + + def __repr__(self): + cdef _Pair item + lst = [] + for i in self._impl._items: + item = <_Pair>i + lst.append("{!r}".format(item._key)) + body = ', '.join(lst) + return '{}({})'.format(self.__class__.__name__, body) + + +abc.KeysView.register(_KeysView) diff --git a/RBXLegacyDiscordBot/lib/multidict/_multidict_py.py b/RBXLegacyDiscordBot/lib/multidict/_multidict_py.py new file mode 100644 index 0000000..89382cf --- /dev/null +++ b/RBXLegacyDiscordBot/lib/multidict/_multidict_py.py @@ -0,0 +1,449 @@ +from array import array +from collections import abc +import sys + +_marker = object() + + +class istr(str): + + """Case insensitive str.""" + + __is_istr__ = True + + def __new__(cls, val='', + encoding=sys.getdefaultencoding(), errors='strict'): + if getattr(val, '__is_istr__', False): + # Faster than instance check + return val + if type(val) is str: + pass + else: + val = str(val) + val = val.title() + return str.__new__(cls, val) + + def title(self): + return self + + +upstr = istr # for relaxing backward compatibility problems + + +def getversion(md): + if not isinstance(md, _Base): + raise TypeError("Parameter should be multidict or proxy") + return md._impl._version + + +_version = array('Q', [0]) + + +class _Impl: + __slots__ = ('_items', '_version') + + def __init__(self): + self._items = [] + self.incr_version() + + def incr_version(self): + global _version + v = _version + v[0] += 1 + self._version = v[0] + + +class _Base: + + def _title(self, key): + return key + + def getall(self, key, default=_marker): + """Return a list of all values matching the key.""" + identity = self._title(key) + res = [v for i, k, v in self._impl._items if i == identity] + if res: + return res + if not res and default is not _marker: + return default + raise KeyError('Key not found: %r' % key) + + def getone(self, key, default=_marker): + """Get first value matching the key.""" + identity = self._title(key) + for i, k, v in self._impl._items: + if i == identity: + return v + if default is not _marker: + return default + raise KeyError('Key not found: %r' % key) + + # Mapping interface # + + def __getitem__(self, key): + return self.getone(key) + + def get(self, key, default=None): + """Get first value matching the key. + + The method is alias for .getone(). + """ + return self.getone(key, default) + + def __iter__(self): + return iter(self.keys()) + + def __len__(self): + return len(self._impl._items) + + def keys(self): + """Return a new view of the dictionary's keys.""" + return _KeysView(self._impl) + + def items(self): + """Return a new view of the dictionary's items *(key, value) pairs).""" + return _ItemsView(self._impl) + + def values(self): + """Return a new view of the dictionary's values.""" + return _ValuesView(self._impl) + + def __eq__(self, other): + if not isinstance(other, abc.Mapping): + return NotImplemented + if isinstance(other, _Base): + lft = self._impl._items + rht = other._impl._items + if len(lft) != len(rht): + return False + for (i1, k2, v1), (i2, k2, v2) in zip(lft, rht): + if i1 != i2 or v1 != v2: + return False + return True + for k, v in self.items(): + nv = other.get(k, _marker) + if v != nv: + return False + return True + + def __contains__(self, key): + identity = self._title(key) + for i, k, v in self._impl._items: + if i == identity: + return True + return False + + def __repr__(self): + body = ', '.join("'{}': {!r}".format(k, v) for k, v in self.items()) + return '<{}({})>'.format(self.__class__.__name__, body) + + +class _CIBase(_Base): + + def _title(self, key): + return key.title() + + +class MultiDictProxy(_Base, abc.Mapping): + + def __init__(self, arg): + if not isinstance(arg, (MultiDict, MultiDictProxy)): + raise TypeError( + 'ctor requires MultiDict or MultiDictProxy instance' + ', not {}'.format( + type(arg))) + + self._impl = arg._impl + + def __reduce__(self): + raise TypeError("can't pickle {} objects".format( + self.__class__.__name__)) + + def copy(self): + """Return a copy of itself.""" + return MultiDict(self.items()) + + +class CIMultiDictProxy(_CIBase, MultiDictProxy): + + def __init__(self, arg): + if not isinstance(arg, (CIMultiDict, CIMultiDictProxy)): + raise TypeError( + 'ctor requires CIMultiDict or CIMultiDictProxy instance' + ', not {}'.format( + type(arg))) + + self._impl = arg._impl + + def _title(self, key): + return key.title() + + def copy(self): + """Return a copy of itself.""" + return CIMultiDict(self.items()) + + +class MultiDict(_Base, abc.MutableMapping): + + def __init__(self, *args, **kwargs): + self._impl = _Impl() + + self._extend(args, kwargs, self.__class__.__name__, self.add) + + def _title(self, key): + return key + + def _key(self, key): + if isinstance(key, str): + return str(key) + else: + raise TypeError("MultiDict keys should be either str " + "or subclasses of str") + + def add(self, key, value): + identity = self._title(key) + self._impl._items.append((identity, self._key(key), value)) + self._impl.incr_version() + + def copy(self): + """Return a copy of itself.""" + cls = self.__class__ + return cls(self.items()) + + def extend(self, *args, **kwargs): + """Extend current MultiDict with more values. + + This method must be used instead of update. + """ + self._extend(args, kwargs, 'extend', self.add) + + def _extend(self, args, kwargs, name, method): + if len(args) > 1: + raise TypeError("{} takes at most 1 positional argument" + " ({} given)".format(name, len(args))) + if args: + arg = args[0] + if isinstance(args[0], MultiDictProxy): + items = arg._impl._items + elif isinstance(args[0], MultiDict): + items = arg._impl._items + elif hasattr(arg, 'items'): + items = [(k, k, v) for k, v in arg.items()] + else: + items = [] + for item in arg: + if not len(item) == 2: + raise TypeError( + "{} takes either dict or list of (key, value) " + "tuples".format(name)) + items.append((item[0], item[0], item[1])) + + for identity, key, value in items: + method(key, value) + + for key, value in kwargs.items(): + method(key, value) + + def clear(self): + """Remove all items from MultiDict.""" + self._impl._items.clear() + self._impl.incr_version() + + # Mapping interface # + + def __setitem__(self, key, value): + key = self._title(key) + self._replace(key, value) + + def __delitem__(self, key): + key = self._title(key) + items = self._impl._items + found = False + for i in range(len(items) - 1, -1, -1): + if items[i][0] == key: + del items[i] + found = True + if not found: + raise KeyError(key) + else: + self._impl.incr_version() + + def setdefault(self, key, default=None): + """Return value for key, set value to default if key is not present.""" + key = self._title(key) + for i, k, v in self._impl._items: + if i == key: + return v + self.add(key, default) + return default + + def popone(self, key, default=_marker): + """Remove specified key and return the corresponding value. + + If key is not found, d is returned if given, otherwise + KeyError is raised. + + """ + key = self._title(key) + for i in range(len(self._impl._items)): + if self._impl._items[i][0] == key: + value = self._impl._items[i][2] + del self._impl._items[i] + self._impl.incr_version() + return value + if default is _marker: + raise KeyError(key) + else: + return default + + pop = popone + + def popall(self, key, default=_marker): + """Remove all occurrences of key and return the list of corresponding + values. + + If key is not found, default is returned if given, otherwise + KeyError is raised. + + """ + found = False + identity = self._title(key) + ret = [] + for i in range(len(self._impl._items)-1, -1, -1): + item = self._impl._items[i] + if item[0] == identity: + ret.append(item[2]) + del self._impl._items[i] + self._impl.incr_version() + found = True + if not found: + if default is _marker: + raise KeyError(key) + else: + return default + else: + ret.reverse() + return ret + + def popitem(self): + """Remove and return an arbitrary (key, value) pair.""" + if self._impl._items: + i = self._impl._items.pop(0) + self._impl.incr_version() + return i[1], i[2] + else: + raise KeyError("empty multidict") + + def update(self, *args, **kwargs): + """Update the dictionary from *other*, overwriting existing keys.""" + self._extend(args, kwargs, 'update', self._replace) + + def _replace(self, key, value): + key = self._key(key) + identity = self._title(key) + items = self._impl._items + + for i in range(len(items)-1, -1, -1): + item = items[i] + if item[0] == identity: + items[i] = (identity, key, value) + # i points to last found item + rgt = i + self._impl.incr_version() + break + else: + self._impl._items.append((identity, key, value)) + self._impl.incr_version() + return + + # remove all precending items + i = 0 + while i < rgt: + item = items[i] + if item[0] == identity: + del items[i] + rgt -= 1 + else: + i += 1 + + +class CIMultiDict(_CIBase, MultiDict): + pass + + +class _ViewBase: + + def __init__(self, impl): + self._impl = impl + self._version = impl._version + + def __len__(self): + return len(self._impl._items) + + +class _ItemsView(_ViewBase, abc.ItemsView): + + def __contains__(self, item): + assert isinstance(item, tuple) or isinstance(item, list) + assert len(item) == 2 + for i, k, v in self._impl._items: + if item[0] == k and item[1] == v: + return True + return False + + def __iter__(self): + for i, k, v in self._impl._items: + if self._version != self._impl._version: + raise RuntimeError("Dictionary changed during iteration") + yield k, v + + def __repr__(self): + lst = [] + for item in self._impl._items: + lst.append("{!r}: {!r}".format(item[1], item[2])) + body = ', '.join(lst) + return '{}({})'.format(self.__class__.__name__, body) + + +class _ValuesView(_ViewBase, abc.ValuesView): + + def __contains__(self, value): + for item in self._impl._items: + if item[2] == value: + return True + return False + + def __iter__(self): + for item in self._impl._items: + if self._version != self._impl._version: + raise RuntimeError("Dictionary changed during iteration") + yield item[2] + + def __repr__(self): + lst = [] + for item in self._impl._items: + lst.append("{!r}".format(item[2])) + body = ', '.join(lst) + return '{}({})'.format(self.__class__.__name__, body) + + +class _KeysView(_ViewBase, abc.KeysView): + + def __contains__(self, key): + for item in self._impl._items: + if item[1] == key: + return True + return False + + def __iter__(self): + for item in self._impl._items: + if self._version != self._impl._version: + raise RuntimeError("Dictionary changed during iteration") + yield item[1] + + def __repr__(self): + lst = [] + for item in self._impl._items: + lst.append("{!r}".format(item[1])) + body = ', '.join(lst) + return '{}({})'.format(self.__class__.__name__, body) diff --git a/RBXLegacyDiscordBot/lib/nacl/__init__.py b/RBXLegacyDiscordBot/lib/nacl/__init__.py new file mode 100644 index 0000000..3b39712 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/nacl/__init__.py @@ -0,0 +1,33 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "PyNaCl" +__summary__ = ("Python binding to the Networking and Cryptography (NaCl) " + "library") +__uri__ = "https://github.com/pyca/pynacl/" + +__version__ = "1.0.1" + +__author__ = "The PyNaCl developers" +__email__ = "cryptography-dev@python.org" + +__license__ = "Apache License 2.0" +__copyright__ = "Copyright 2013-2016 {0}".format(__author__) diff --git a/RBXLegacyDiscordBot/lib/nacl/_sodium.cp36-win32.pyd b/RBXLegacyDiscordBot/lib/nacl/_sodium.cp36-win32.pyd new file mode 100644 index 0000000000000000000000000000000000000000..77081182020ee972b063fa5ed5fe4b0867929abe GIT binary patch literal 183296 zcmeFa4}4TtnKzzf0z@V>p-F9^vBo#3DBW5+RjWlCY6oKN&`Hus=(Yjfwiat`DJ5H~ zjV29obGclXeV4p~;x4YZcGs+;LZuoo1CazsAyZ-rAxqOX+S`(C2O7%&k-XpUbIzT) zGf9B%*4N$N=ck{NJNNv5&htFyd!FYx=LY_AZJH-7EiDuOjgF?JZO32!uSEVm|3A6u zX=#_d`of1Ued!k;DO$Gtq5GD9{>w#ofBwM-A6ilLSN9ezuX(WO ziw_pvd|O4)mmj+4-m9lgn>tHZoxAg^b8mnCvWJ}is(<*6hbr;=r{BnbkbjM$V-E%8 z_d6dfm%q6WR?FY%4=%yqJ@xqyR^jjM&*wk%SNMC|O}~Dq7JvWw8}E8BB!4f{e_wq_ z{k~j&mw)l@rED{Ggh78=+O6r6(thyl=NCC?18EuW@upvrmiEDnw6uRxZ~r0KJYW#KmuLLPqt88gKULd(e=R#|z_&XzKT@iwIX^W?uPCI9x4amKp<9e0oS7fHV-?0*_$;H`{z3uI-S{MD~~ z%Svwl{fXb(3zGZ0ow~zC;PcVkL~2joS;ATLz|KLdsjs=a(%xs z{38`+b#A3ulU-%jX5F85e^EuH{SA~@J^$Tx?~Bd4(X#Fu9UaXssVr-n|89HDp|mus znQzLg?AyB1RQ?CgrsJWiVNX$|{a1K410xOX*DqP@d&bvxr`_}#-uzCn1ln&Hnw4lA z4QHX@Y|Gz&e^Jo3(6ANgFRQAw%h84v!rO8)YX1(Sl}Zbjn0tKBEVi%Hb;@{#lg4BE zvsr7I};JK{H;Oz%)OY4z|p#UTVr)E+*r4_q$zOJ3bo!}@-$VpRnuDj#t-b@ zfOUmF1xr+G`MNdKPZbchJ(a#UG1;KPJrLE3s)n}7$0wPiNHg$yxgQ_vJVXGWTWe+<8x)f5~tBjHzb!><%HEgEeKo^2{UGe7)# zK|@>C4K*Xn|8>coi}i%FeOKc-t2Uz36%FBxD(gFVO3Qg_-nk7qn|Eb}2eQl)A9I-bxs^+Scg2<|D$vYO}qadgyZ93BD7Y_G#O>^ziiXwR&$8e5Q5X*6DiKOK!R z{y&2qSUSpPIny1aaoR;UajBP{6% zRy|Trn;IZX=uxw_#8{>9A&%BHR<^>i4w%6w^Og;I^c>CN>usDVqqM**Z84S=8rE$5 zG(mupe2lV}Ju)M{H$iRmz_12MiAEd$dQW)|p~ss$J)XV%KYMf9lnkKyQE#s)*`#Nf z16J#E`o)ILBZka@?@bwZ7Wk-{{R9!s?(mBRQhQ5hdgK*V8OxBKXoGd-v~*4Z=Nt~d zGQ}4$Eg5z+F}u!)Z8NwfZ`qJXO<{93O6G0jd>ExgX6aVL%Ehx;T3{@jWtJ4WM{HPy zNCL;jjK(57aikyt*={d*j^p(m#UuJ788KeT7Uw$F93L?o)-w#rab{SJ7(9)c?@Y&_ z$w7yl5wik4VipG_IbzU~#l!X#-{JA*IGVR?*kh@wC83Pc*~YS~8rq6RElRbL@SMgE z;@R9rP4F}Y-KuSU;|FGFz^v(q5a%gtS3`yJj6-(K&2-%-46dG1p0G2d>p zdLaDrCE-^ug&>FeeFuG`=EheR5T)5rF8 zkV5^W&_Ho*PjPiGDWu4AGuq3uO7XA9Ka>}UPI~6v*Nf{Te|Pi89!~dnDY7UMO%HUN zfnhTgGnIBO1XUD~gf^Hwp_R9fof-z6KrPZE<~wCZNF30`H_A7yn7mh~K=`UVNu)M! zUvYIzajaR>Zl3d=F0LIeK4Atri}#tKE^2kMqxT%BHxacAe-xy8`EsEadIBODfgwdR zQmYG80>MBi^sZa+5@`l13BkN?fHOg;+8$7pT-EP8S6tiWL)W@}=X`yNTDNj`LeV@c zCV+HIEf54aKQNwi0S7 zej=fK9hyofyfwqcf#KqsA@hv4HdY*p71u_+T5LJ{?O1+lI`cXdV=1;W0)vL%7KZZe zHIFGS^X>=HAby-Z%~mxrny6u&8e`_+S+RVUU5~+?CYEn`3NS_3`Y<6)_v6b%>m`TxS)OXT* z+F_(2%%L@wPR*W@B#)^`c+8~vhwaifKM2rRU82*8{81|=%VJo}RHkMji47=iD z*sa8{oz&{2PN;74fuaMBB;G}WK~a59=+%=K=$&-^w$eB1BX{nq{CK*5IGI{8Gth4W z7GT*N$fUmVo)VVQBzrx>VW@sBh64^`DXs!_Kr<0T%3w6XScAoZ!D6UrFqT~$f=V55 zGuAK}E9NlPI2x;0lS=WE5$H+C;BcHuAX&^MgU2r+mB_Zx=pB^4PBK<2N!3POP#sG! z);+>ly?NGBAygL`t9#On?nzJ8M@~#XaYwp;D4A4IGtiU5Sfa7Ukg7+MsyCTbgQBkD zQfMbg)n6RwFRtk!sRoKe(5=00QVo$*(KxBjXyty4%H8mHJL<{^baDAZC0GDKAx8{{ z$XL;FWUw_^2HQxgVUh|;V5qn{njqB!LaOdOYnhO$ouulV^qt#g{G>jz`H36gz9AWGM^NU{Zq9}G#27A5d5(4Ow zz8YH-Dg<@a>F6mU1!pO2PnE$zcEyf!R}xql7HXNbLn4TvDA^0_R9rjgI|i*l8SHdq za0Lkl8C)s!YRwC@O)9AWa$kMqw~GsZl`gFMS8<@L7)v1-tGhS^-P-A9tbWgdI9JsSIBaG3!3hqia!A#hN~(TP ziCr1cjHRTIG#aGg5(sKj{00PN0bK!O`HuO{C{nE=ONC;2 zR+CU`h%y+RwDX~5{`yGzoeh@~WhFsSL0K-Yq7)`%FtG;491TD0kV=p!8LK_6tUBG0 zp|v>BT3j{ zp@vvk`iQRLkYSCG3g~LBkV>PgH9s3%SRdKh_~BU>MpsFsN`?$kc5x`Kt^_iG=MQ;L zLs0?rzeRNAkcucvf%anrS7QWlFjW92LRaJ^2w+bfFZ3z_Oh#8`$aZwqI#LU|YP}Gv zpXlnvt)suKkL<3^4vmkl!h#l*%F=it8MY55*vpP{l>^&{MFt7mqtxv&0acEIA5!W) z30(nIIRMM0-&16e&?-d+HO3-iwYp(@TLLnWROBTkfdU!2DSqt$RsyiVY}IWpbhS}a zYm-n*(A7i6S5MYQR#fj?K0dl~%iuufTSiw7DE%*>s}#sUXazJH=z;WstvbB>!EXTV zWOOq$Oa_mQFN5tOgK^e654ze+I)#9)o)ki9boJB!^UfRUBj5Oe_e3b=|uE0hF8fvyrV7){6^*dTQg>`IkE z4XwuPaI_3^f9e<)#z9wRsN0dj7R^;#gjnrFS0A~w`^kc6(q5rRtDce^CVyv(Sn9n=#SdyOl-AMkfD`a z6>7_~+JsuegbcAs&)#z4AL=9FFMen6f>x`EAcI(~s3uw+QpF*Iwpu|Du}{4?bd?Z5 z2QLf*Yhei^u+ruQ@Rk-ng-DgcqPpr~KYGVkSA)sw3d`z5=Bsug70^|ukV>Pgm)?8Z z_4Sc&EZF(_TSQj^8RAC17`r$;MqLqLIR?JY1gXx4uCQ1os70U^hYT=Yrm8D#wMvkx zi~Se}U1xz1BYVCZ&KVzAX|xcEn%dwu zk%?hQ3t-gYDkXr*%okViZ3$An728#+0MdY!f)_N#B3HG?844upOvVd@RwNlr3}C1O zFkHcA1&+lI5a_)dOArlM+Ai8d#sa$P6?zG}x_4vCqxF%l?_~psPH4NL=z(w`mD(|L zdiPzaYzB#I>HZ0gGE`v2gV{BJc>TzJ@r6J3OuHujZMk_LwxyK0f3tj-# z2pQa#zKLvC3SA``*%i7HXyw?h98&d@RH1=9Yd~n#Lv__VsU~C29rcliu6pG!E{Lua zjX*g~Gq`PWq|Zn+@30)<4mi|yEV zG{IXg+m+kIz)h;l%=cDoSMe1PHY}~K;&f8xEBb9T%3xPwhtut-D^N(u zUMjkZ11o`9Zgi!96=cu85W1obYmCOy7A&o-`ot>{8iWli;l`m1V#h^DMOzj4ERM6p zfTe6#4z-5KRRC31ObFIW8El)htzvpZeWb7H^k2vEl_E_l#u^`s5|MXuNu&(IcBP~+ z4jFpls$F9%C4=X)UAYC&FQYy41zow! zSE-P}=_i7M5rIT0_`58Au9!4WrIlK8O#-GY1`F}?>W1^KGOMq z&zA8a*7-~faZsfN@K53yG&Npue1qmO!3tas|74!Qd?6J)g9Sn=?HL@rK1LP*+7l zFR@+i`r9WzSs!`BJFw?G=!!s80R{xp?A9b3*E8sb4F3hsU_xER(N$bm8U71+2C3X* zVq!?Zb_ZR#Aj3H5%2@i@ZCkYPlf$|iFi2?iOw zUg#yB!DSC#GPOQ(-v>UB|5nkJKn7a+fUkhA5JW_L_5TUa;9SrL!B#>g@eGc9=-K)8 zk;bXNc)wyLkLW1|?h%4DX$HgzcA0&a!;~8{8hPQ&Q{!^Yov0b^n67UO(kSnSOJ%gn}Ep5Ac-`@A#RUf(jJy%>kKEz6f44PEQ!9;JbXHaZcE_9`U zD1ej-xuPq=LGR;0hKX!f+}>QgXYf`b72~diR2p6V#|PX0vOY5M<*Nc0Mpt7!gK^wG zv1gE>Y9|$B0A2{S(JOI5+f`x_{F8VFW#2qb+?C5Scsr>DUA0JvCAO>Qf8PAv`bgfi z>nqNKu9ES>pV>1gUI{1e%2@}KJcEw=2D-uJ8PpqPuveEOgXghb-6f0#+tufUR@!!T z?CzhyGJ5+XQ|~n+7y#m#&~>IzzkQ54R> zaf6Ap`D*OGnL93*SK>VA>K-8#(A82Ql}1-j{_N4Y^^spqy&ooq3&dT;b(Ihdn`Uio z=Q7w!4WH~8i9us4Wn#Es+*Lvby!pK^v zsdbT_!4)78Lau~R0<3QM$P@X13|B2jfLcEgm=jE-{DN;GnCQQYXHaP>*c#{=JV+KB zNC_t5rppN?iqgt1xM2{%>nv z&aHF87W#I323^p~!3)C@Zw1GngBIexL6{tztEB4N9Ny~^1- zuOYRduGR{%#69?T-->nDN0ynN-8DYC8Yh@&%p!Om&!7vlzb&3Y!3!$7-C-sewGw8& zi9CZKkyckd4qg}sT{(1GCtL;F)p{Y6wq0Gb^Y+R0k?)s1&~aXLrM(k+SEGn^UeBQ8 z5p<4AsJN@R4eLMY8B~x##a%hh!PH=)x5qQMQK$uUwMnR@(N#hD>(A9kX4wBbazS)8 zW*u}vhB2N&S1?hO;9)3^+=;G~XAnL?cn0Ag{BP(P+)Qdg2A>pSiDxkPU+xXnM`lgl z@Z4KRR~j-f&{`ZgJ;}kx|8qTqEuc@HwMD2zU4QZNR{=77OL*)oCR_hQUNIk(~%8x_HmvR&o_RgWH8*+B3L4@~dm1+&A5ydqLaPM3CX)JcDsF zA7t?2JcABiXr;d;)j2ppFj2o?s)^$B26fz(kP7IkO-QBDRcH6J+v_9m3~!$C z7SWZ~RdMeiZCI{A2Bof`r@EYAqPJqZO6+jns%Ma-8Yk`wi=dzdl5CJr;k?HN+d&}& zT?w7UGg$GXd1dvHtOq`J<@jbRCzvRHdT^p(qVsqLUHj&@-!q7%X6%_kmx;l3W>BH4 zL@*JdmEbCJEY80qN40keSHX7GE!5Jss}0A$`Fwrk)<;XLc4)w_JA~ zbQQP8XvlD}o=4CN{~!=nEjD5HcxMJ(L06&Hgm)gNKvIK=wAD&+7S7SYcI`Hv4`{uX9GZ;S&?h?R1r)O}O1cMC5gkEC6`u_HxVTbd^ zLsu`oAiC1(O0yR*yM_ybu97YoXm>6cU|_qW+}}!6djc{zp;tJvgb2J;bu~8bO3R>g zX3$|Qt*$Qe%-{&MQ6;Pl zJs0o{Y7+xu{c6tR8O#Kc#50&B)Y7)A&wc3fPa~rC?jNiiA7YKs?5>y(?Tq--dIl50 zMEc<1d4q}G9?u{zvQlwZLMn}}mj5XFz4}Q0%Fb06L|4fIn2g!S^9%x130`O;UTD>} zm=nH^Kc{EVg%`%LUAa7i`J@(fRe=yoJcC0Yzh@?Zdq&5a^P($Wy~NOC#9t}AKqE4# z#l?A0v)BKdGlLi78Dy{8#t$Y^M^M!@1LL48$I@3Qj0M|Ok%;?Uw z7qnf0Sbq-BU`jBNIy309u*Wf$-Yp~en1ROPm54c-U7!`X>f$_uvxHQPyAo1q&)_RN zo;X$?IsQO!a~xkekRcgkO&omO>o6ERgT7O8hm3P8%74N$=ng)1+=3x!D%{93b`iX2 z&)~(PtGH+IDpCtFc#RNCJcE}#w&!Q{kyGD4z4JWiO6_ox)l~v7bdywZya4y0zHx`g zb>lZO$elz3|8qQpymt^GS3)QOR&y$r1GAr8`S2daN=`5ly@Uks$tMSCxpMBWfa}KD z5>hvu$1~`1OuQ9zg}r|qV5P|5#AgP%8SHmM2KWWX1``cAj)^D_!i|3;!(4I|J%c4e zEp5Bn`1$v}S|2&^srT<2A7Uj#hB$Bi={$q51|;3c;28Uqa}a9kyun0o&zZrSg;b2Y z5>jb&HDy=Zdk}xT{TuK~yrml%k|D#yoMAJ263^h3#|NINk33h~_v(4jRdO(qX01f<@kF*O+1tQ?^*Ng z`bLI{;;tOLaJz67Y*&kfP}+92GV&mfm3;lS+CQ8JU5TC&2Zd5midYV`iaVVzjIQ2J z&md$G&cShmiM07@Y%mcGS1!nK9&~k=Pz&hlb3!eRu73H@<;!sp?!GAEkH@?3%B8O2 zx=Pf#*spMGP~A8;j%QH(f$?3lj=EC8L>G*^imP?pdlf&o3!I(0Z+@H43^JIgS2-r` zA+;cbONChC8T{w^WZacDF`WO*Ak-A@ zz~dq~!F^Yt4??blN&>8YJni&Og%s)|nd@(y4tNC|=EM`Jn06IU1R2yOf`SxyS}+limU4;c zMK}bBr(EGy5IS5m;JW6@zlQdzz*-M<5lCfEYTdc_viiu>o&PZRJa9@Yr-^I|=Ydl$ zuj$+83M8e(8wjD3dx(;*F4W$@u>j$sU4i~}uv+B>)_V{D3L*_mx^-Ia#`;M6eee48 zd4QB)DA%UX+3_V*Q_Ar`%|PM=6#t)(p1?Sa!aS-&f)Z(^oq&n6fo=yxn}!AYKR048TM{wbY*+G4wyy{pa7D z7kJX+CvF0yn)Jc1-8)hr+4zC4C$0+pZ|C?++Q6vW7E&%fNjVaz9Di_+;^rY*tEy9t%}QGqMNFJ@wEi!T*~2gKDEGyR=D*y!5L&YaD?GjN&lD^nB% z*pJT*_*z*pj7Z|vpGtfUhmA|^{+`AsM7kmKsz6bD0jxu{LADh|F`?JdJtNE*u9lJj(XcysELwnJZ#doT2kSsG2 z-$il03WD!tRPy^DzBXe)sj=WzV?l|r;AXj5e&;|*M(!IwSh6_&aUB$r?}yB;Fst+V z%@cEBsX6~v!#`BT>km6T_yCZ32w&I0S00Sepr_5#W*!Q^`mTnTzwYTY0=QinQB8wJ z2-jJ~I{f^KN}5qwYAn3inf~7N`6b(#=jl59GkyNe za}HkNAHJdY(P_&keLRr+$Yks0j|GoEiVx37hB*hz%K0W}S2m}oSqp=eMj*>>*$6o9 z&z{rKVAtWBG=@Lh^ba?5)HQUErq|}?_10WcUz^MNm(Mxwu|AWA?~_#8kD^e+m$OkI zv%#NR*KlaG@#v$!TmBB6^^rb&&qICkhMypMd0jfloQ%m?iu|PR8hf>*Ga=zc5`$$up-ht67%I zPdx=kp9OW$LG%ylW&j_Uvb)iadK)Kq7nnIlyTzN0qbp|Pvo959=FiMte4U|bhS}RR z4_`IO4ci4xcZ@d8JBKf^WZrOU`M=+}WO2L`;a4+!dmEo!acxCITSk>Rn)B4kb2(2f z!si^aJZE#BD$suyrOSgc_nY$?+A?q0xBT#uJEabx-*xypl8b)${0+Kw`j;gjtF?{M zMaB-sui1Z#0a%Us__w2xactShU}l-YF7rfqPmXycynCAOjqoc|p|19Y!6@pYQD z196G*gp+0ux+ZZ*E|s3 zo8dd+`K}~Dm4=HCdOM2uc*ADE_UtxKhYxx=x+%W1?CWITS?wI03C4=FE zlYP&7zRTz+vlf;C-idw2a~A5{T6a0WXMl!0XN+d)E&4N!ROAdr!uvAtO$0TJ=Zd4= z?&65|InN>URQQ0`eBP)X;27Ru{ii(J70n8xmHFOWVmp zoc04sJm4nW6Yk8wu+_}Fgm5rf2*Z0P`%ZgyNIXu!_Tlm%+(bKwzaB`_fVFNqucdY* zge`O{`G8$2Ne1^7KSwF_?lTXI2=4Q05jjM@I;@1l)0~BYV5}4m*j$DngmRZNg5Z@1 zf>*Q%o>3xrMvF(c_qpN-`gROo4LWRuhG-ab&3Sm=WXSc7Y|;tA-S|=gKB)uL!-lPO zpWqe95D?Ev-OTgmVT?tM(E+im6tVM7Y~R3n8>K$acn6y$q8v8P5$NbVNjM ztA&X4X%S%yMhN;215r{UO#mc&L17Ui-drs60yj0TpBhIN3spl$lq^Ej<(gM0l3Lt| zED8`hPu7v=)U<%`2?cx@JmbZrKF90mFf(o`6piB9!5|j%gs(=aAKwXT$9`+oWOYaC zppru}FxEM)Q&{96AEtjmi%7J%!+Q!E7*dG&fE4y(6bTuGY(o5s{e$?|vxD&|t7^C0~ke(B|1r(L;DgDbPoKbXol^&8+LEftTuS3I+j;A!ETRavdAe zsuyM*tRic?ygtzrc3Qy#L?XDf{E)N-Jc4F4{{{w0G`HrZz82-q;0&Bs?$bgjtB71U zhy_;>U6d9uOVz<_%=YzRKM0W~lEn9y-$ajt3z3nFHY;s|M%>-wNeY6rB$kHsiZ zK8l6Chl?XdGqEdP6(2U5<*g* zkHc5fhV^PG;6m_ZOvw+HKxscL(zDys!JAoNg^Z0SrO-*G7*4T&z~{F;ykF8_L zQ?+}5aiau@)65Q~3Od|NDJwA2@UN z0OQIKkE}svcFR@ zY))%rv*Ffj@lC)owxA_QVe{4-Y?wzti=+GQmePfqwGzypd?cxqYtPrNW@X9mQ(S*`P4>zd8 zWlD<|WJ@j>!D>@$PbglR!Q)%#355vTa>$Qo$Kz-SQiMsv*ZJ^iN63-f%i)$ISrBC- zor2<@lWnvhMI}X;CI_Afg0HPpBE)5g;scHbeCKTzW^F71->JM0UkJWKSs5WcR{)j7 z)&P~7yF}p;69sY$RSf$r0^A6wbPyeeedHI80L>-Zlsrjz&*7al_+s3f~gB@Q4H{# zr?0VLNx*OH53oZOW7qlMcWgZP9oPgl?W-Ls2KJ5;dk+J9^W<^@d-IfX92F}(4Ac!P zMR6cjTn%8&Wkd}_Pfrr|_S9}BjO_z5Q8tf26#9mTeG|ZYm~d$tT2a6l->r=$q2hio znhFe01iWjXbO{hJTYY6s}W53c%cgx0MJ1(K)JA*$ZJK6alK~_tb6`AyTIsDMVU=Wa%3ZXz%id zi}!$&kGMfyimZ5Qx5Iw}DRE>)Hr&_qP$0L&ow|+Z0kgr$vguAj+Pl1afV9uRa4Sd~ zANw4Gw4wgV!Twgl>|w%e*q^8HKwSc6_p4oi*44={8*vJ}V9MXGhM{#_63q70wh^%= z5Q2Xgt2WP|B`|i(jj<7I1p!F{zyn@?f3ZI{5yr+hI1+*axYl}uoZG^lPI0pAV%UEJ z zMo_~5WQDjoMqE80xLOY0xTQrF)C=J1ab?AWZz9vA?uZLaN7(*;ipN#3Q}FD$5U4&0 zX@m({$s&CPk@S-995 zYIDX2(R#H z$s_*`+*AcOby>OnWwemud6x0I{LSWqSrwJ$qeb{gI!xIJUdd_-5g;GWl3%$^q0>!) zvt~E^&jsOE3z`D4)tzQ{QvjcT&O-1+%N~#fUh`;E=uKFOZBJ9+4<UG?^xO~*N>&~D)Fh%zLMV0o$FCdFQt1@U$nJg}HI>@EHS+4fz zjLsz=1xOB*^z7t3hvH}6wJU0MRhv;zHQOM`f*w0C}6vv#iUnZMIt=x`EQw5q5s??bN1&UA}{9nL6g0$IXLFuOt z1W^6VU$PiI)iENs>OP=3w8pMhln1LS?ahyZaskL{nHjY^Ko2dE<{pvB%bc!Xbo)OC zD~!{A*B2C}QrjO!_IUdX#!7~+QMk7COW>0;7 zbaZv-Xx+W7vAP#;v>G2kOLhB7f=!{L_OX89UgYmDvznPwZlA$B(_a9_!i>wis&e~x zcxg3qpBl6*<}0(GIEJ!C*8HM|-9;5aqmds~v$rjWPH1MnhW65O`?aq@(K*HVDX}v9 zJGW5x@wvRxUi@jMv%a8RjwaCe5>ti*owEF`D9?28RR3p@>PKi%(0(Bkvn(`1BnN_sz|zw?(n=tZ z9VJK-7GQvfU2tzgkcD4!LGLWkJHM(j*s!~#!ru8Xas&#^+JdsGN+w#N{PGI>HVmwx zeO6;z&8&vn{F0h0KkhH6nW5r8ybXIw8V;u!*BC1^CA6k`w@N@C_&?nT7{(pCr0O%0AB3}kBXk5hvqQ~PeK8CrF8#dfQ)OD1kZA4-A~EdIp2J&oUH zGuy&j%#zCWeZEClO(@rt-XO9_l_;~DkYt56#;5u(`aG!SA}iDn#(J*@(-(sxfOeY!CDhP<6%Hjmi+nd6fMn;a z+a+u5sXp&6`{Q>frdR2seCQ*rD$#~LZDyq5+0m-TXCM6pR;P2ne$>~{*47Xi#ZE6! z2vq?cQe`bFsjLXv%h8dB_Iz;B9Oy4hTDkr6hh);mn^&cioH?H)Y(Qm{7hqhmN7qj z@e*_d^*0R7PAE6XO>41K zATAD3UAGmLSyj2^_BW*)v%&4ZKs=se&tOGaITnQ7o0I}@_pqV8z?h$F%+ELG7c4R6 z7s}Fb=5sFg0lm6FtF(xVUP_sw7sO6j-?GZh%es+?)5bZ=cZsv&KPunx^52v>ra;Fe zR+SrX@0DNuA}!6uGt^gL7JpXb-W5|3|FiN`r{hf`9dns`?Ow!_K{Fwe3+P}@ zbanpZ)dklcn!2y#wwgB<-zP&mB1g2P=X)Me#DzjD8Szw; zw(Rh$?`U{29iHD;@{Fonc^kH87`H(ej6_~V0i+rWihR#>EGR&IX+~8cLJIAX#qQbf zLVVu#?Hnk**}vu!-~>HqON#e&OyX~LCJ~O;^}Q~%!YFJl8DOtUJ(16gJi zKhvwsGaI~kO84T})Tyiu;ooO@4=?fVky?eJcMvcY%q@3x6;=Hr<$)Y0S$q z=4Bi6a*cWU#=HV!UZF9s$e1@P&Mg|RW%>4YaEp^>lx8Br&M3_`N^_0Ue516$C@nNf zi;PmIjLa9ff+(`WT$Jm%BNq{Sp0Z4%I@es3@3{kUw#%|S4`&(GBj%z4&m9F-=E`i( zW7$S^%v@CHxdVHvVRKQD=Z+!+acqlbdG45Hrg<(Cy4YAW%LuYl61?j42jR=?(y1d1UHnRxZ)KLbJhS+;QJZCcCEK?$+nmWC zx#m}LeJgW)%Ook={7SxWWq$DqCXJY1De$c<&`B}#D}}z5g-jSWzf$B|S;WVv`IT9| zm9xxTm|9@>f#~~6vwVFZ8JhNiYRf=AbR*jb@_}?1!ibM#1k=TQBql~M>?275k)ys+ zR6L91T{(+E&%eXGIOEASOEb+NHiC=tjp}SrQs@cdfr>>SG3W_0VjH+JmvjWG!n9?g zX@im6ARn7FR_20ycm*^L^0Cdu=vR>QsY8F!Uj0xVrd||9m9;R_{tOmoY_>M{O%A^_ z*;+W&2(=j1PukbwwF;GHoHVpPtmXLFy?qPbTMO~|fo>zzV`r;elfna&tcC9~co1g0 zeQIQM^u42_Ur&QF*c{GadBb0a-9TD;LnO`YHp4kByJm!AGqCjxZ87{??LVM|8Q5|^ z7Jbvd_5QqInGsrN{{+HG1J}B|T?Wnu`CICDOC8;5=Kh?PFl#{6wzY5ycDwcyQiY8! zvomI|2c+AT=O3vLvy6cQGyb85?zDKhsI_n!j7#<{D2GDQOAkyC`|@xhAe~A*IO}8v zx>b31$zE$=zTxk;b5+SH;ejb0tUf4+j~arnTk&~JGq8PZK~Qm~;cvGOpOq#vae~t9 z^aR@2&vx^$fkR^cb@jW@-wbJQ*UWGX1u*iB<{R};{rdeLd0(>$A%cc~GkoA<)7G0c zoigUGq%@;uEvht6pi=eT^skYoRF&w!uJFLjIiPu#aXpM=;g@I3IqV5NDMGpl?vR&f z&Vg?ws^3cx2G0+yHx^~VMD^0lIcLFyW-YE7m4e-7ZKp`{+IVV#S=**lw=aQY!vXc; z%sKlY7GQwz3p3`N@YK{9%VsG75&0;D{-N>_$$)%N(Jj-FljmW1j`Fz|kEjK)dndx@ zW_q6c;+vR(5!_@^&6wtMGd$vV)FF1upVhP|9ajFcxlM~Q znzB~+g-;h`9L+d7b$?T-2i}s?lbfJS@b{9Y(n*LeSbZepX!y)KCCj0v;A9wN&P-|w zPT}8aP1HC;;nSC`?rVE#603Lzi+Y+$r}D42sniR-wE9T+OkUedlc%D5HdQH#W;6vE z$#`aJ`1GYsL9~?vg~=CLO~L8>o7)t;6y~GVeNCmg(nZ#u$6BW}LBrv1PE+Y+(5bBb za&`}6Km+e&VRZ0v{zV51%)=bxWo<8IvW6Kfnb}l2lYg_DO0PiVRrpN)>La>;g)D~7 zy~~U&QN3H-6uh#j^xYGjmnZ^3v1b`5v#Y88p7IX>69Hm4peO-%lzvaL$DkzbFvV1( zf&Qiazz=2iM=XU_aX=#1URs&JB%-fx$6C#G4y&;6d!W_CstqNBCCHLs6|6rnYGdiK zBzgc#4-C^-cPxfQm=LX!Sw`bscov7%~SfQZ;VIM0pKQqg)hOkS*+TECqy1ukG?@MiY)LM<1{K@BbV~0fkHu7J*0XBt_ z{nBWj*~p@jr^Re!r;X5v=UJoqW;|NhBs8+;Ml%g-@{>KsA|TJw^O}ucdNa?c-b#_g z@tw^Zj7AA$Xk-mW^9uZnR}!`5Z&CGbF#J!NjjR%j8WJT04}FwYkk|%&YlPa7!iY9Q z>=I@N64sjj&1f76xss5Nf6d)Us53&V%sqHdPzcRyG~bE`Bn;p{kr_sUL>=I(IAX#; zf(;dmr4vt^;Jc81W3j+Z7omQ3s2o#NJ2duOk~am-O5b4 z;{Z$S2JF3!>h1R0D)0$-0kO6{<=o452g@q#ugKdj4}M4UkTeW)Tqn(OpO=J9OxPU6 zc8Q7oJRnrc1nY71foVh7;@C)Q=0M7EjO7DTZV+iQXeM#YsCIzWPLUW-2~|?2mH9>yROxzu%d+%EAMfV+VKf#Ng{*wvcjizQlJI8L6V+Cg`KSMACSbIQ_O>- zLnC?nJdHe>l4o&#RBVU#YVKEaXm%*vs%BBf;m^h7i1MsIs|%@mX|?wqiIeSo7kf> zKpn=XgNiFY7e8<^#HTxD7FIb;bU}Kg{!S3BiIsoRDGLLU;)H(8!WQ;?p_3G!g+a{1 z0B6DHRESwnbH`Z_881Lm@+=5FaUZxv5*v;5A(|48W5^}lDlRj1KR``mH1ewhrhmI( zwelgbk=^=585#%xzk|QKsCPsNMDc7y68Rphy?SS(1j(#3okw z1t)0`oXI3voWR5@(O-_%uQOQGKw4l#`Ji%5Muw2xLs zY~sbB>bWd-rhk=mgDMD3)Up3zodHX>b)C~h3!1nFKRAj_$dF8T(oB80!Dy88Y>hlO zXULu3rkvGFCAwkpBL-MA(CT)0SolN0jHn=4-tFtzj!1{ly-VhS@~7mZ+`e4*R2C#t zF5i>Ukm)gEiVsH|y)7|U8--w;tMwe~Z*>yqsz;JIS6IKx?O!@c&Rm6t!2(fM_>VeC zmQ!bzu4ij{mcFJlD4-J%LC{NTsEL(VJC(O!xsX0{GSTN;-RHSZ6D#mSGH{A;#2rnv zvFBGHL-HgDK|>qWhan7c7nqGa{pSQFb7oT46iln=9ihGHUvF4hY{#O~VMn(vqzH5i z>3h&W(Kuw>tB`@T_MUGQLr=wX2A4IbZ2UPd4c@M0W365))BRUBnM{AZg5LYzIPN&q9 z5O664{txw%Am2w)GTlkAJS!Xj5?V?)tO2wkZk$wd(}*A-eS^ZKrYy4>tjfY$ZUnGO ztpbyW>dNhpur&w>HBc+a&*4)TL~oEn`MRefCJJB)ow$I2LYyvA=s#}NWLR&?&P2w< z6=W$ga9CYV23SlwrG^wC`jO7SwS&+?YT(+zWtvT_c51-d!9_tbaP8nMv57A_8TxUl zf~3ahd7)Fe((r)lTOCpIIi)ZaYFX!02;wb3QqojlyAaxx7g#47ip^rcNmm9OF;NNO z6?ir<>9R<`E;bXE7Na>0y`0mnCg%#Q>TRCBJP^HojN%GJ9jyhkw`{LOWqr4WlGYgG zzs=lq*^hmebGJ(AF7&xh=`WIEF_I`WqPR#3(OqL!gOBNKqK4IGjhRIegYwjGqPnsl)(@rSsHdW!PFgiJn&?1MH18q1_*k6du zkw*I^6Xk^voZ1Z}N^GB6m>lS0+T%zA^(l(P|6Gu4OG-bWOH=rW`w8q^GPV?%xHZ_A zcLw{B^=32W$Q01dsrZaE(a$C*dwPP|L@%58pfo|rVjXG=2nemEO6+r;fTk2d9plf)gVXR=QKqpE{Q6K|s0l`w*PI@UPgXcvFftPG!NsVFq)RImQeY z!E6g#6ucYoDKE=MCo+w|Diha`X;#S(!J|Am!i8U$BF(;@?(NB@#3CL5*lh;!)FgHr7!?r3=e^t&%Xn1OkVISbawlT9_ah zmSXiJ<@5<@gQCqeWm$lna+#!_xf{t$SSNOpwY;>5_N^V%xq6=tOB61aP@8mGqJ;L! z7zINa>|yQPyAJ?uV3MuOD`aS1>CfgeI&QGn2%3F%>}C$Cot~S{ygp@9X=5LrmxNv> zaJRdbiR(z8J(AFZ1hE2wfW!w(mZAMnl6zQgJ72asRQe_oScro_!dT0NtX0sT=r>}o zsotQbL`5q|BY4JMAoXK_JVKAUH;L6wVAoYhN~aA^0cK=qN6*{X>FGKtuoYCD&azw3 z%v#RHo1fODwhMSb61yzm;V6;_9&B#mhq#3gvh!lb)%I7h5}60p!ej1sZQ*fv(Z3O_ zM&&^hV!+md7FffnX`->qzlw$~bkI<{nfuvb0Jbp1&~tF>#po!5yG^9q5pHHsFhl|?NR891%+ z(Kd1sRv$+&#FS!+Y1)u4ODKcr*Z~tj0FrEw$-;f)H*;YAXgLH4Q;4}^H#f}V=plv> z2kHhKW1@+S^Yt|(Y3qOpufQT~It8+jF{^b3=(&LGz=bjESnW+t(i$+56pVBF`<-gR z2A~;QBO3^(XCo;I3#&abCR(kLGB>6&wu%B&TfDKL>N*ZpESGAutFRohTR$q{XrGkP zwqUgDWwaYP+OM+no6iXGD#t4AFhB<&&Z%j{P@0EW8)-k2rAPbmPwz?M? zoM9+40jVp@`eU8M=~Ba>9+6@Ey-pIVtXgnnoN*Am92t@Z1C9^jezO*FL=1AP7#JAK zf(62Ybfe4uMDY0>tOb03#V#J4+LYG|dXFiIJ7Cp%9H_(QmFfr){adiya*V(G6lW1W z56lGj$eo0#nM&+_t%++^Zcu{$Ndmu1Nvqffj0PlQzsSy1~A3R6>*(G+<^i=E>sb;3yfoS zmeksV_M;|NC+MXuP|jq-*FMQ%;1{~G(OJ1fhsK94^J09`$zR&y0?!7=sD0Bk-xgE77=Z{NW}%54bz&DBS5 zM(5yXE~h!-c_Yt_q6I%AY(O>^K&E;n6%@@Ki*nnfqUb`T5H#oGub}7w0ksk_W5eSL z%!|DWRh+!{D61A?4Tzhm(lu@dDlP!mNhUalC9VZ1nuu9cuz+ULkKr%u-8FOe#gVh% zQ#*lAGXY2$Pm^URJ7g&X25>dvWl?;k#d1NM4l%#qp`;3ua}16Q~rh6?nAY z%DhxLAOKnMj4KiZ&cfy*8k`3@v+2C7?fKH^`;6)qxG)8%4qN%}0IU+f;UbPeR!~A$ zF62X8$kn82tV-7;xCvO5j;-wgs^XNi1)vV$RUJ`5Xw?RK14oo243oNoRm0r~j22eF zHGxviK7nyJ-;kbGQB_fem50GJAHh#}7`Q6Ie15^pTs8~leQtEQt!+`5r4+F8&{X0lhOY90yy1)|LeF$R(u9RyN422Vq)x-At z(5|8mjKPEU<82oc;Z?;)s zWhx0nd^+|O(ugQJj0l7qu7o0*ByR=sZ=s&!P-K}Jx?T`EHfw?PL9DDy9HD|cgajB- zi0OsmOxRAH_Dh!Nz|Z}v=N{#C)~=On&=by@GEM0P%SJIo*gv!y<~ofz_pE{GsZ> z6s)5);*THWs1jNGk+m29Ih-MxdXqD0)U1@c!%Vuqe{-_th1TKBS2{Mn9$-O)nsqV6 zPjIvHDr=9bc$UYm&Fcgx11i$U+fDjcNX`9NR5-a@w|YsQ+c_A_!gZ3onbWhLQ$wNL zL^yK=68Ls2>~u*Jnjs}*Lf4e(*{rmOJ_>>eYIe`G=xsLh43d*38=FGdQ3Y)%YnUM} z0cBAhGcEzy*qd!&|0#u=$dUNeLlMWs{X)4>yWXhYXy1c|u)}vIPfhG??3|jd7OmxE zQX!?zthR7g-=C6COP{O`$jY?}3?NxYK9XFeQill!kW7bDGC5o_f)P+VNT#1Tnesv{ z&eRKz#_X&8kY1fQW!j4?tGV*rjuCId()ebH=v3I?R%vA`$kqoygS7>yP!P0F66Abw zFK3f|jFJInN`i>`nqVc9up72B<*hA~oWvH#uhTOe+8CE;L~sy>Zqac-*rf>*CUqGn z*UiA1GT7`YvEqOW!L$E}x>T?Xbrj}YbR?&|hs;#Rrtm}e7q(6vdvPDj;r?c%wW4!v z>~uSYV4+la7YN9-Rj9QNX>oYiv{VqLy}6tA=3Hz|H2#&fjrccHiL=JI z$_x<40ginWZ$oxhrUYkguGwz3_KS+1?#kfdduu@BOT$D zJ9(tfM9o-EQOhdzc45tHQa#^>gZ6>vbaVJ^e_pcW?|cUeTK-lU3A`li^WjH z@;9M>x>%lSFMxd|dX=9jh^j2U?S?;%1$EUYZT3?)r+ax!-eslqa<;pdztY7%%wB#b z-qFp8lCF-v_FGp+>vhSySyFfN%ZXyHZvKMBbT>bn(w^?=b%}zUF1`glzs7=kUM|%I zIShbxNR)ETihYR0 zP_txzRToR)pL-G|T^;=~OFA8$r%Sr|=Pwe)T-|(>#dJ5lDedW=4*yPwoI=k}>7G)8 zo<$CaZ}#9B%CCxgB4`&NS3~>kC1aRH`Ij1oa_L-B=a1&d`J*|``J*}N{Lvie{LvhB z{%DSK{%DSK{%DSK{%8)*AICD1>}rmmG$Y_6}aF2xqYLf6?hI>Uz=Se=f%+7 zGHZSiD)1A}VyvOuhF!DEtE%j))GzC^E6W;ot+eK^q@6z7zKpN^xeaYW97qmURN9$% zfg{+JV0s+M{cF@;Wi4jHarharPq**JE2!j3PG^N3!mmnBQKh{P85-K>>H|h1Z*spo zPDVv>Zm&Ch#s8LRMP4QF${j{9C?&kkhlYMQbqGbio% zwR6rjjGn-|$FF{N;Dg;wmt)UoWlqPj+O>6abJJE_vL`b)twgo?+VM>MN}tzodh*w> zOBpFyfNecGAgMWPR$z5<&Pr*2BbcFKXr-K-tqNL;XII$o-w)RMTCmE#axWeNy%lBl zJ-DD^X^|hd83?;?3EDsFmb5BS=RaWdJ@pS)tOAx2DS`T*!<#K|W?AH`N!FT`QnIP6 z(!KpbJZ`!??G#U=J)rqMKC%jh!fM_LUqFIOM>9+ zcc8jTaC$x73WtM074{?eCCpxF+aNS(<7Nv7uBh`CWBU8ir2amM^5}0vc((5Hw+={` zIah1cV`_hE@H)}om-o2)dk3mxe*~iIR3@NFypGI!hXD#G- zX9w*k@d8J=@#Z$H)Zdom*}=+&j+IsR8mXFnRaK)th%Mr+e}hVHLg8QGp90kUgWmmt7abcWovjP5bF%6~JU{fN*|A^d` zRTcK5cvA`G`TA)#d2@v#>kIhBMFd||c}@}v#^DI3|2^%x|7ZDd3&wpC4^YA7_S1Z5 z&y9~aPQOZar^-H!^hyv8jQSwj!IxWb_#6b7hiP593<5_P65`#~#hR{)6Hl#QGPjrx zOvNQA!Yg;I+i?F*I@w|m=?=mS3pFu<@#wiyz&9t1fcT5Us6@0HRmxH7KD;KYgtp-3 zn0yO+B>K&pp)#I@fKZc8<=v<{CB$+TGB`KsbE?=0CfwRTA?Pa%h$+ClAELv z9C~iujf%++Kf;6X!*V{1;fI{3+H#)S>%;ML``tVyowKuT1I`$#ABsK|l{ruG>X%rh zc?6R^Q*OA2dSsZuUu%4DSQ5LC`ntSzci&f>*Sry%87 zNnziXmP&0eFeM8qjB3hxinR@>ln%VGDraNKSQWH?37S}e7NCSJe5%Ct2(a_mhmKL6OuW z_T*D+KHsuNu%4FLr%;>KJQ72luwP+$u^R8E1Thv07!hZA&)0sJFS$u^Q zW+=DQSdElKN-I-ri_j6Id*Rd=lBewML6#`2APP94H0Sw49P`$Tt^#7FTs z3J2|Bsg=*Eqc}tLA7SUhk6YP4DTHYXFl^!R ztcKSzj(Zwj^EAGh(}YmzvC$KQ=$5f-j7%K;2&VOI9+rM<0cx zp+~xwD*HM4owYE>U^pkQ0$aih$!@$t(OW?u`4{>O-vJudw?WU7dZ+>PX#- zIc=|GHr?)TZMxOJqog5L^!Uryw)ysgKhkcV*18)1WoEU-GVx=_>Tdkp@n&D!ieVUsEap-%%oX;xkPhL*=fg+Ml;J#p zp1Ft93g*-;RoF{dE-@TVqg=uB=#2P0%KUAjz;KIQW$w5C^_QqIR{!$o=)2Rb&t@1q zK1Z^(<84k$_oVl#*OQdP&#qOkGxTd*4AEw9Q17Pbceqh>i`}T+W$Jg+3^}l3e@eYh z*ROL>D;uj)uP5u*ml};}m@X^;*+nA!xCZ zuQSzaO^bz~MIB%N$99JnxG_Y$Qyt=Bq$;%2R2ir1pvqd7c^YLBRKew(jaeMgvv`er zl{8gUAg*mW^&?)@*M{w9h3`#!;wS zu*Kcc!-)ioomBMpx2am6j;FF#4pP@j{tMdQjwd5Z$SGWf>8VG8ip#qAL5QQ<=nOpoEI&s>0^&5vebq zg&M-RcVxR!(`ql?262$1*%UCdxf9jmnrCTADHcro$E8JTk$8*i~twd%E=(&yHYSlq$p`f(Xu0^csTD%(}2c~<=h(C*GX1aN_0%tOOC`MnlAwePp zqrqCd*UFMATtKjo0Lj5SK?mYLAd}}n6GQ{ffg;}&*{SJYWy=B4CX_(7@Jakc97emY z!X-}g5fysKciNdmgWYfnmtS%-6RNEY4y5{;r&2mBO`4ADWd$YT~`fn9S*Lrx+2PZ z*=s0H5ZCF;$)R?kPJq~s&eRO3S-ydV^$bwAzziHvGYt^ONjErChFiKgi6yhFW&ji2 zSITanfAGj~q6O`O>{T~B6uov2QxwwvlSS3JJoSEub2QJFr|x}>lj9O)$-Z%TE!dFo z$=V3pjG_9`|=}*h!A4r-0|l zHw8JuzHP7;{!9?)F%tw+L(Y|kzKK5et=|B*J|`gb2+>=}L>|X?9}tB2A1V{a;Uu53 zkaYoNPYH+I1GbMGwrp!SFRBc+h*bHM2>6ArrCA;;a=w0qSC;GH|c?B$tRDI2o^N%ESw3O%X`Ie8~ZX$8yAaaDF?J z1GX8cgE^~rEXUp_wNfY}zkgZf2)@8NS15cEUoASv&FpQIZE>|nd86Td47J17_w9;` zA5uyXWh;=JVz}M}vEdga7;-^RB0$`O#?0W(48(PzsEs&mT#n+?n%`(IxsCYLi0g7A zE-+yAU&3*BnTaNLhV$(%ly%v8M4A5#YN{KnLTlD8mCHKBw`!LN`)9ivvCC{1$`W=Z zX^eziZn1ci-4}(029fAiAYSdX_vC%(N;j$4r5Aa; ziqM6t2xq(!hr@i<-%QMD+#b%;0l4u~ z@Yo{u!SgXNXyN-fM);$OWo>D7y$6YEkD>s5#LhLJ} z&_3YWRFkRcku5Re@7pzHh5#h7j7~|^$BGma5LvBT)$@cI`cIWhCDWcEC8oDI83+&Q z7SVriw?}!W+e^(*Oy`DV8=)sj%bIT@k2Az8YNCw!-}9=2w$<(a?KB1v*`l1Ktp@dj-tWn4zW>fdZ_Mt7mT>j=Q0~J<}p&upz+BD?i*fR0UQ|k)3 zIAi;glr*d{OX7ppgI-@lW*U}Y_W8I-+8%;RDGS zPZqnzh4^d1;9k{GbRP#8Kt_;}8~9 zMe4vM4wYfZ-4fi!c9)=x#`SZ%#CQ1_YIK#rscwmorAzRB4V5rg386K0n%r*YPBtTV z`gA!1*DfTk3cyl@_^1S;dp%EBjcPpe)RW4B2*p+G=j>tNVW{dBT*ks7a9vjo(Tvuj zj@c{|B%4!+7aIgYY-pPs&K^!pbv;jX15I+G8}gY!<&+_!WXtp8l4r&xUjybg5zpJX zoGMS431WTxiyr-o;WCcyT&cW_q^`%T!DAS@$rs1rxb(PW7h6(B{K~%N1Lm&rNl$Z& zbafy8F3Zd78YmTVS}@=GSplY9f~k0WO66*Xx(aDZs1;+obh!OKb}J{hmA1bHT|vVD z)g@oYIM{6Eest9)jRRbH$O&tmIi<3t3R_=CS9sk_8Y|TiQe@RC;%F8IB;76_SXQ_G zeH{L79XB7tLF7$qB;k}Z&6Rt{zo??};XwJ5M4?Ar+K}Ky%VITS@=E0zC025q7lHL9 zHq0Fu!Lo&Wdj@b0UJR}<;Qb48^GsZF>?B?ej?2%h+`GUOf*$`o<9(jC{t{&v9R@nm zK0l(%3w|rn#)D{Z{AurYt|%Q$LE^c3wogLJ@;n8IJb4a4Zc|-B6%;_qKR+tZ1;};m zWIqP>JirmmEc@w=C3ygah*yA&U>RBXcw7nnbBik=hv$z!W4)En+yhACnf!hDmue@6 zMUa>00gKDNeRj0e>_F;*@c7f#zeuHasRWHO+Gon^f&N&(XS+`g!ao_Znl{&gc=z+0X-{pL=WGSzS;oTauBl;GI|>PXRP}qmx><`G3MWDo;_L#}SDg;&lI4Mc!YUo%JLiNcV{1+BIorLo@3(JzO-L*}8`3ejZm zK{yT|;V;mx;-mt+%}Et6v6eA6HJ=VAbf{Q38;7oS(dGe6{n;f z(Dx6>RgtqOX3XYI>hrYg(JtxB0PKD6LyFJyw2 zr7miHf^!3XVkrz;CLVr;DQ}PYBt^P`=7AusJ=pPbsXORUVWPSt8U{5N71g*B8Yl(( z_+1_y%!OY{2QQU<3^W9)F_^J;&T@U!3O5ec%L)(s&tTKsWW=^v&m&inEf<2+;dF9s zU~TTVN_BG`ttR}FVmsT|ROIwx(R2UGVzLO>5Bec`1KGac)GA{qnvIYeK9Z+qp> zV;PBdM*1aBa)`bXt?dE7jds9dt0a{}#5+OyWvD8kS8uZw7m0<@e@3sh>Ut4iZ4$|X zWBcD`!-y1uJDzcSo%D@A=xLsYRxm7)|0?*qM61BFS5u`57B11cCncgLNctWE_+3;c zfR71)ZKPikFY1i%^F%Fab$)5Adx7=V>C~_}!7^+|>OCX5je6Ky^{}bc3?4T5qzrol zDk)CN?m-;#2)$?f3VfAH5Iuzu6`)rbKZs>%Hb4}shxna^T;Y?LQ(Nd&H`#OdY1TzN z1K^_z!>-;l2)ba4fOfi%1@~}<{bxK;M4f3-@;Qc>?bZ>=rB5{jQYf^|Rx!V?nh`$|M?MP`^exoG=Js1z{Zyt<^&oZ_ zF5*gLs8WO);qBa2C^@*?Rzv%J8cK09UA!!r&K0mr??JzcE`a!AxW|+3i9kMuIG47j zekH_3nG>TJ42c4^A>dlu@zjdqj6w~zKv%2Y`X9_CcLSk{A_Amvf1>sxk0Uo-iv;Ty z)eQVmOK|bTho#^SPQXs;Bm#}_n+)wzDP|)62AMUKcBuf<+GXGS1fX7XU8t7TwW3#c zNqW1;ZuHq^vm#6}^+E`46)im0?>*svk_{`>(2Vw#N)%`FkQUv1#%Pz4+t}n%bW>(P zbjz8(@vWZf9#}U@d2hZutqLZ>+%DuH`m-TSj8PGVi#hfVb(@`1P)TgY5)l`ZWb763 zNc#sdCej0;bs+d>v zt*t1pWbIK~M+kO-ORSf6`YZQ>Kmas4-zv)iL9`kdSt})X1BKpto6->AgUkd0o?=~! zzxb<VWveN&(u`W>{_%L+P}ZuqtyIN}*YU`^@j^8f zIqxZd(tx;yoeF*)R1v9!I0=|&?}<_sj!3Ym=}%HraTGD6xEbAwvY zsmj{4Nlh)zY|HYN6eQrtj{%~}Q(U1v%Ln`im1bqip-!dT``^P+FiK3TkWgUZtH98E zvGAS#*_%zfFwuIJwjRo43c)Vo`6%&BZeYf?q~Ups^;verh?xy0IKiD5 zwEsPOjEpQ#rb2rt_efDhuUQYs;UhypI%O*bvwGuQ=~-=Dsgj^baQ$+q`a~{8l0B|7 z*1w{xC)l9&0Ooe84PZ@*>6C2QJ5CwLxn)FuvT!Tgz#x<3_j>SnPjr)NNqJ1>`+KZ! zvppSNrNexV+UrI!uQv-Iu}*_(YC-`{6lDnKHxJ;j^Z$`hg# z*$!Gd)s715T?;Af8%3^#c0fCU{}2+*M)Lw|&pQNBn&OK!ZG;i*04@OO+>EumVHC!o zh71^|5CE|o*)TGnF#ZW!MesotQMR=d@#5qRzXN2ulBxFoHq=HS+sE_39;OB_UbdHt zZlW4qU|lI-N{5mT8x{cVx7*W<1iVJ?mxaY(zf1-Lk&c)?a?@b>0CW>+Q#}I#C3|-_ z>3$l@Ih$O(T34tZF)C?1SQxLuS)G~4$8&Ki+6vHvSK@pnPlc?O*G7y133tcC`A~e2 z;l+b=^6gdMMR=G_z6;d1KY7|;@|3LD{Aco?L1hUk6AzD2p=8j+NIZ*Yr{a|K3UrOs zv3Mj8J#KAzZ4`TK82$dY4h3a#3^7Lr=9<9Dhlc!5juc76&0@m9S5062( zD%2~$({%D(q`r&!L>=B#l;B0kFH4>-1EgFb3Qp22AJP*Ui~^(Zl%D#)GkHqrA+-c= z<)OcgEw7Cnb1)t*0m)N5?#DBC2syJcc{*P{$Kd6?lBa~s^Iu4w9s^TvN#lu2^Iyn+ zVa)#Ixl7IM_prOFOEsMz##D^pn|wm{D0&GV-bO^Mz?b!z7h> z)u)hwvc8trN3d02yy`MdlLyUHC5`WS@nm1}oYz!eMe*?EC5@;3PaGl`$#X@?0sol& zrkuNF^H=co&x0dN8c&W*o*4-T8D8{z0?m%)oPcI~<9AI!w4@U#;5m3Me%F=yV>E_e zdT;XF=%w+yuEKQ0!-F6AQ}>5(Vok*tNX-4BbzPhjZ{O0Gdp)jWSV5I$-=mnSZ(V#r zCEHgizQV^)fBSu@Zr`K05x#0`q$+1$qL|4mDes}2d8VWhhX{^)6LbFnkI)&h4c7lg zvfhpIS+ezu!|I^64%J|pZIWFH{ywuRNjA{(1GNhke~guv#5 zfGmNlEX~V^%}=8N`yLY6W1gt>1lg{P8!d=GhCyi<*@HeL_?x!~JR*>(Tp9q7i}OE7 zXXr6O=M{`b7~Ep*SW6UQ!L;QIvIU-Aj3KZTe zP&5sK;x?x#8JC{y1=bfO9nfi)*)BtA>f*)9G!z&T!!3auLCs{9N|m;{>p2!4!BUX^DeJGAap3R zILfJ^z{iQ;E`j~s-zeDkF$Vep9-bgfeznO}(q8X|$Yr&08?PI~`7i!!r9CHj2a1x< zdah2!0ExpD-RFRA6EPoR$g>_pubksv41uN#fFkhUM`%JjNEyu1X0Te63> zz?vfEvEoRfjFm341FF5u)qx@wg;dN||4~KIy*QSuwv5mEU1yr*pi`qh*$kx=QT`Hh zKW*TVV7$cK^5>+L^_V2ZEIB=3H5+7@B$iGY-Ve})Cx`_vgDb_AgX$fE?OJ49Cm9^t z<@)3|qN;pbWgy-)C$7`x5%aCN$RKTXaDF-&qrOL8f5s-BQkIXXir^hYTcNJR+|Mx- zY%ySVh?{|DX`ZvO?r?@T-ymefBF2~Es6gcbJE^jiPIz=)f~x6+x5lQs6u{oG$S}Rpp?965uM&ki!XT>zbMyMULF31`i*ti)v3Ly-u&u;PfKz*<7 zQ_A)-dQ1>3QM8)**eaFE6yW^#>!jM9W@H0t8qq2s7O@rFVYjV)OhYwEX{egKgTJiB zXp)WX#E~ql3_W4+z{Pf0GuWNznOZq#m`SRoFyZxCR|+sYV7fVkC%++csyp=tEVxsV zqiwV~`I_wSUS=14%0b_+CE6ONK7*?KQ884@H z#X=b4U-CASk16W#Z~hj3(~zU2QfUOCPpuWID}c}~u+bD`0ff8Qd}t?3qJ6>y#-qdz zsNl0C9q6b;!FJ&TZ~?z;O+q>Wp{&P{fe$VY0UfQ4VJ&5=Mj7GFDPYIHDJxANPnH` z!X7^C>oR<@dURCebTrC{G|{Vfopb~nKpXh9#bIW2)bT1i+NwHg)CK``)Pi)zU{`n^ zZ+Aoqj*k9OYGKyA9WqE!cQ9*DAYCDCj1k#r#5P%5W26hV0vHMu-efUpMygRaUn;s# zN3MyEZnQh9><}aX5Fw9im@ckwfn3N?Nd$N_Ew643!y;sjHhCO-#Rl&ZBT{R`>aCZ) zOA^390G)>_n?g_Df^;eNFQ}-n1Zu<+UcExYAnP9>Ly0Sy2peC5M}YsQtS5)INXQU% zYHi6-fUdixSr9peTDwcAeGM5RHL+X-DfS4ilNu5U7iT#kdebcGU|^7Ap6V8Genc)S z4I&=vwzLwG4X+3Sv-i*l9<1lfR&ofyvB!DOz+6XZLsgRo%d9FE1fqV!rd^PxR4%Aj zyqrX~XvKal+{Ip^l>RiUkrsfOdy$($jlK4T^#C)sFqgUwm;X)~y)6Ihq!t+rnZSMc z*Y957-e#_OMrhqk!;mKyFO96DF;)(fr3oLnaQnu|G%B6UKI^6onN|zrJ1h?1VN@z4 z-mNg*tw<-wrP&0a!JeYdx%JUwLWN3suFTCKtO4(7zxYZ#&0E+2a<^y&i zXqMs`cyZZq*b4wNgcIPk`dIuj_oEwRHqXb2-xM_SERyNVPto5>2a=Sy2( zVbF@(Ey83mi?l$9E{k3!7HNwMa(~T6nK=PV_fo$ah1=)Il@a(auLA91b3N94i;RF zf*s;Rk@#XD2R;#~{{7a!vJvR+pguT<^s)l8!wv#x?K=FhFbo)xLNn6hjTDOFjI}dA z3`NKKE~@hW8N^ViSFSFQ*frS+r@~5Oo3h66l;Suef~PBlc4z5sV@0a%04Ltk(}`Uz+=oUfC+t(gPkJ zDTMkJ>IJ#8W9gHxs-->x-yLm4Gr^uXeS5xt8$!N@Tqmfel}_9^hI$6^`cyC~yfC z*v`AZlD@%nViv|m5YVHbvm4^tHvbc+k#4I6r9DC14H1F*ELjW=`BA8`UZ-Nv$GxmzY8SIv{Z)P*)pWP(EneXqg9+Acxw9Oi%32lUn z+2*xi1mZQc2OsLd$i+FMe0a(~Bc_?uu&-gGE~Aa|_i;N!xneghdW7xB)LszbC_pVi zAZqui4W`;xLbXWs(R_$~tUx)f(}cR{Mm-uIt4-=qKjU*fU?xC~c7C*I=f|G-BJ6A+ zPqgy`-*A5LTRA`Q4d(~HmGdLiq&**@ZSh4TpuQqUd>;IU(U{-b+W{|vcsn-57hz8W z4}tP_;1lwVpUT^TZ+JWSt-T$!+S^edUj%PQEi#n11K;p=@SENaoRx_$Dpudh+Yz$F z)d7FbqDz#&W)yuJ{2r^mm3u>^l=5&aDp6PADf!^!ScE58xg_Z4V9J>IBKSF~<8*OI zEjSp&%OQS_flJNUy7=5nA;!!Xf$0(Pxqkdc8F6n6TmqM9d@kG@4d#pSxi0s{UFae{ z7w(M)l-2Hy0bhJB+#Ae-L*pzS6ZXazl_v+_-Y98&$uB8I@kN(E0g=X;PM-BY@lyVg z{3BxyB?pjk1sDiQCNbq_M#dM7N4e0xWZ&2)l8D}!92hwU0_8w_(F9fOjOXA$e9@Kq z11^qaX>wrnz41j?CHqR%VXLL_LMQHH-Ax^B`M$j9nAQ>e;mOO(V^~h-e|G+ew-JDP zVE2K#eFHjaRk5mq8n7tZmEbQ~WVLdr(c-#u1CNZbHc^cV79zOfJ@~b>z9l*`^7i-f zQzDz9JVH|TFE0pNi0cUH+K=dWt_Y($Ha6$`?8X6A$y2v@5xqD+*;QDy3qL$1PmPw} z{FB^$Anr+8h4EX|-+cM)byZKr11^(bVz~Rx&{}xTqEF(5Cgv94q=WT$NKVWxL?joS zr{~XNoK~@F`SW%ZIBG>%FH&qiOYuy)lJq_+B0o{F*IFPyi*bLTiiJaEb;&}r!9+c!hCgX8bytSv={e>iVT^}q?6pT^r{*xqi6Ra`VHLF(tMiP5j zy_?m0RjYrdsx!Hl$z7_|Zy`B=g@lLNrm033NI?usP}!pzy-5-UR9u^OK>e&F0{{(b zw`#P6jRK=by}^Ff=rvJ04kQa^fq`<> z?vJHp7fT9eyHvY>iR3^J^vy)U6gB+sGNmI?5Kv7$Buy0)@w-$@cS}n`8o*_zYG<}2 zV%|w=fKSi-7bRKd6BcvMrw~#(k^^l6h|RY4n+PBYjRUdfpSQl*kKe)m#JmD)k^C%9 z%qz5R$IryPVsn>uizN0X=J~8I%g^4#yfUj&^2^P=);02Lg1Ote48O7kCkKB3KdI}6 z5JD((aG=B-&tmOUO3e0F9&Qcx6@t`{jn>gOO~lcC9RIEOUCC2RJb_asPdzpeI9u{m zc@cij%;(R``SY=JfwM)YMo0VWS_oBN{v$3-OQH*OA3r71!kl-u%ysf zg8f8yU`esDq!?EEz!INuXuq+f4Ey%LlJWqKbS){zBS->%vS@EZ3f;}2=xZAL!BZI)^rDO$T(ae&($0agz#>S4>p_tuKMz*KzjiDT|Vu2 zAb`i2@g#U?PaxD1i0#C)G{$2L^6Ziewtxb@*L24*r$Bj+IU7FaA1wRY5fAN_Yxd*i z0_+TC7h?m02NLb{KbG`1b2iSTA&#PxK3~$enzJV`z0pa(QPMli*%O)G>7-x7bkXcb zpnnhU$zr3o3j!Ap%U@2%7JSQgVcz9A@eJ4gJ{bGKQV%Tm(@58E9g)-~?L(�OrD^ zG)TKr5`1=}w71EeU6^Xn`Vs1R@jgx)ve1$VRkxRVnEW9b}Rv)i0q!J56PnwItcK0<5Z z(I`Lm9?#+>N}Ljsy7_Xy_@KNP#V`s-d7d{JTXPsLeF?e+!e1DasHt{k)Qb%FR#Fp zH=VdWxf2S2b>%@sF%O=ty}z?>;?0S~9;6k|S~Nc%JZnvaQj)-vh1U27@FTb_Y`uOz zen=U_b>&s31=e4E8%bhw30o(T{q#BD2`{e%j;JtI$424MYztM54<4>84m*5iWYK{&68`$|GUbd9xbCYPG z4M~VGZl|j9)*r4$!Qi^EPPXRo8#Q!83Ayby-}1AYRI<4r@5%iDNp`OSGvToQm_4T@ z^AVASEL^<}VQ~ePqu;})3^XD3GK=p)=)U+G&K~5OtpKWwBVfwkVC0-QJ!j^N3#@_f zuo0H9p(livh2*+DQwL$s<2>@cXy_ODc25wd#D|Ot>akb$!C5P?-UxNUYI>jCF!v#f{IV(e#HtYM%gj;BGF%%{O1TZm)0A`~?VbpI?r~dr)+FMcDcb zY@pKjw0q6H5(D|+!F_9dpg}WKZdR9tp|&lD^B8RxqwPY5p1B8Os@qfHyc!|%lLl`& z7)hDHx-A9kZ{8!UK4_1Gl`t;ABNYE|IbRY3fb+47`SW`~IG!0ErP4BevoGRD{$qwODs zxGp;OoYSsk#;GZ^Q{^VBA{g18qX6r2lfAX!0KEg95(SpD;$F3@QstSztvHav~g+3MI$ivbt z3yhEq{E)uUEdZ;+%!kL=8KW__LiykyjBTb37uN#+C;S1*g_}PFeGrx>C^la<4;HnN z){V`yxwT3x4HV8Rskg8ZsO!6VW$~jU>Q1-7PSFeH#`K!E7nlnR^|`jM;}K|K61V%* zyTRtdG96m)>*a>rWH)c0V2ESWTv(yJ4CdD->ZhY`pQPSMHWyCT4^Mx6igu*jKGhI2 zsJU>OQX$Q+&r~mt8*=tm#l}Yb`MQ`d@6mZHN7luPXFYt|@>vNw%I90x5m-E`w5K@m z%-R=LUi;{X8$-p9Oh^QGR`w(qvVOvpczK}ZmKnQN_B=XbMyPn@+llB-qghN5=giGw zQB(m`)CkqFv&aTzIHo0gGguN#$s5hhkMReiYu+I#^~$lz*K@^>YQR1#7ibK)#WvH- z1#Mf|R}C1*1WkBy*W4VItR_<;ZScBBq_#Dg{z5>M_f_U>jv zAstrr=KjF5W@L*u(wIz+;x)z4*1)qUP5)M83y%W5WIh|f8?qQcWJ@wx%zH15x)dBv zSxT+*Mz%>~D7Hy81~+wNJ!|4++CXTVnDf`^#M_k{M~BiV??VDM%aO}Zd}tA)86Wz`T^3Q$O6Qbv%13blHG$STIzh_PCQpqBt> zAc9*KwLoHWH;$b&3a++kK!6`KPAsZXp?w=g0Jol{Fe+UtvXT2Xlnrf_rnZ?8V5&)) z;#6$KRB$N73r4{!0WuXbPe3zJ2xP={sRnx-z%XWs^J}9wE^~zfGFO;U6cI$AR3S=5 zHh>H;6@a4dp&j|yGr<13_7Lm|UZK^Jr{;pmEfsF_EPe>L!H=uRDV`00t<4?#V z7VTC)MilMFX8|pRyJg?A#VaQv%*c*F2g(;A2Z%tKF;Y?ti>y+Zs6L|Qe@;kMZ1O6v zhVJWH9CSj(PunH0v42$9KRz44)?v}-C=?H&bwEkd>LgcK>#TE z2X@Kp>>rbyDsbwr_o{gCtno!;U?rf^7m5zyH-3EH{xP}ekaV)qtGKT2Bp_8bqT~$% z*Y(miKBveJsZy_YX+#ox`HEDjN0~26l`q*CH{bQz6MYyRqQX4olAU25T$N_bE_od{q!uY)&>_d}FW6%Wzd)=Ur~T6rWcPNTt$vToj1kdxg%sl6e#C zysP8)UKPjQd0+DF!uY&v_^&TM@6$=EJbv4?JV>iDKBF@}8^3L$TNU3UGaI-VKIaufZsPkdyg-knfxu<{3?`{_~M(P;z1di^~hy) z$(Fh{JdH97ug$~UU$=MG+b8j6%-WVmC#+p&nMbYFnntF8hO9-TRqx?q|$ri$d~<)~XrU#_hB|)64xq8A7g} zu)cv;TNWAip$NyJ%e4hPn z=iVt;7I*>uJ#P0mHN8KN)8AY0wP4fZ>hDWw{oUanpRXubrnhVpsT!{O6SEo{stUeB zR~Qd&=fA#q^)^mS@Tqw9znV$&U_7(~sUXtiZ&BtK@zB4Sdx4jD_0!fE+2)mFR}EM{ zPdwq)Yqf9w0z6g&tlSOg6;^H$D9iL@5G#N6FPYP!?AKK};H?mN5k7OmFfcvB?Od=t z0mgd5x^WOptxS=b`ssXP&JEMqpQpiu_hB*O`5v;h}HB;guV3zJ40vkR?&B;m%>%{@peHf9%F6-Wvk z!7O|R?(;nO0gTx`V`-VOwA@%a!B|>hES+fezkz-HyfS0^!FI-X{Imm%(N=R2(`>`Vab!ph2~vlun_^CwMFL`;ps@y>6gYX2#U4)?; z=Xb&rVVXB$h{VVvo{x@!4(}`Z^W|P37EaN#Ay`^?|*Pb;JzhO5nB!MT5vmKsU#v^pbG6A>{XFYZ$mQEaCMj$DPU*l2wOKu zS%mgtIuVQbNInadqB^ZCMB&!2_sPKFdo`T(-kTaOUqFQhcbF=aYFBS~b!k ziJeSD3j8VxiX@U%LAgAGjGnO7C}lUP^ltSI2V#Ol@LPYa+BSk)*)Grg9R-AchE%z# zZ24c7)_YXz|4pSc5#Y%nJHuAUF5QoF9Mx?az^_QlTM=qBxP|FB%f0uCq;&P)Y@}6&~UoJH;k}X@Pv6Jx<$xZ>IPa$KG%8mKs>$;R>T_qRWv4eqhd? zX3m{yw$*h{EIPRAYkO~EAzN|D)qqj;`zTJzSRf*2KK{og-%<)4v~* zy3e<=XTfq`V)m@?qP?@j9_t_E;XI$YsGLIN3h(R*3(ciaSr%hB*eT7OXr3h!p4VeN zjgQ34UwXe+CgU_>{cG;W00wa3^9t{axWx#@Qr9{H2g4+RC(D0(g%@vs_2OS1+6Stk z*Y@yBa1DQLV%->k-4Aw^eEaHeT!SdI2UTy^fYCWWfqgr*H}$jU)Isy~wS7f!MJBJ^ zT?GHm!E39#is0eHLBp5f)_$>YUEFOruoniLrD z!nZcrh;_mD(i)`g7AIhP{BUNz;BCc_?6hh`qw7NH-ToKN?@hr$KMW=C0$f>v7B9;0 z^2RLxu4{Sps;>yc`K_3n_Ivkv=S~HWfbSfS!O@;g_x9}t)&iQgZ!a`kttv_EOWa;; z-GraZ?^$@&gjL2Z2LeJc3rL$)Sj@-C(CWOWJra@S&MV|{nT8m~O z;tI!u2Z@zbTRRY?G03;_N~&pi?zrxa9zHpLEed%eg(!Djq89&&m@L%R${&RM!`+eNbg#Ni)ChQy2XC zTFfdsE#yqz0&5vpF&z&+(H@st-~MNi&~GB<8+Y%)Sz7r?yV4SyLL zO@z7;P3~z5cJY(YD*kU<1y)<@u#SBLcE=GD59a$CUhv>~cAm82SIa#fAC66-zcta%V1=I)}sMl4=Z|1MmDW@w`L|KQcTb~`?U)I%H!ffD`qh8Oy#b`N{_k-2zw?@^}#*~yF{7q z1{{Jah?kY?^O8@Med>m=r*3aSd=cEh*kRu}rvHX8PVV7s#+^3~+z`g8xpTjm1ys%0 zzj8;(Q^8(@GkAYiJlG3}y-9q05D&g@|2`EDeqjHGE4VBkJY|2LjtBef-)G{%)ABnW zJcEDnIwzz|OWolCPjwveEEzWu*|P`o3HDLA%H%-+r)q1RO@deu7{9|;GGq_CtOp{PMQs|Z7qu<9q_w1~PYX!rVX;6q;dryO4 z82BB#ki@sx@?l^Y4v{!(zSVv!@L7Fec?I4R!q0SkUiM>!Pvt)t`n*s9G4YX}7h2O@ zfhkH&p?3BcFMhfLeJ3O^+sPlBRWKFTn^l;vxLJi=$U3vCjFEM)|G;VXD(pWtnpG2N zaj%+0lYG@=T5GZYhaskFDlN`c)A*o5)lAdJb4kz~t8O*#nq@v#vF60;1!L#qESytLeSs6yIN-^#z)<^v`_L|KUNQ})s3O$eI}m zp~^XPdoe$7SD?BcaU5`97-7&NTfK+D4*VyfL%=XLGq#brS%on= z>q{Q@p^KAv9ncY27k9Yf;`=Mfd!rshQ3Hgh}Ottqr^Alh~iE=_MK z*vmW{nrPgY##$7sess1j8>VQ6ho9F}0ZOK5NIBc^oR=R>+i?yD(i<9cWNjim-bfquV zz!5Iic47_F)rGiqL|mtkeFr9we5X)V$yh!q#NC+_aagEyJUfU}91aWB&EYg$n!}MR zWb~9;sJSVwOGAuNlHwXOP$o|EXx4>#kD!bT70eB5HO1e^+@eRpe2JuVv_aN9$1}pw zZGj#+0lXt{`g~CJt(>UB;#`)7D&c8ZoNk^5VE}$>3xgq1wJpoy-0Oc{gaCkrP=J_B zUzRHfT=r%8pde`zmL*u3AHHOAGw?G1SqK5}G4eCK%3?GBeYPaU>bSDNAQUby%MbMzk|nWD2HwY1?;mQ`j_GlLEl zX)ndy73gQ8uDOttE*J_0c>v6&vI0Tbd1z@$y~s{$K-?fLZFsRjah)A;SfCX14&oe+ zh8(Q8j1_b>UiC0G8jgz!Q*#on0DrL$2*Bl$VtI@}|t zK4gQecAqJFOV`qzg$Z>n-Tsi;K;>3CfW$PVL!c5Y)_T))M=~_1S6E@nP&B0KkxNVC z7LkgY($do8Cpl|6@*H76j_i#=7&LIp4(5@erHR&HvoNi*MJ+NmAv{U}%pMk|ZJWwT zb>uMRRISNC2a^L7Axq;w)P_XWkXjnywo-6sY^T&h%_;!YLLJ7Y z_Tg!1gHeU)02)OkAP_u483Auf24KT!nd<87QN! zm_q=p5-R&xwKO-{urF5f-`p(vBJf3(GDT;@YH35GEVs-Y#+JrST#ijUH-EA)bKBCW zY^1DdLUHy=6+@b+x75U^nuD^y z2bs$hJw@8H%Ts8zX~Uv_S(9 z0^<%OC#s;Oam$igoENU8QAY!ggil_KmR2RG$*QH@Do7eqOT)4hb|FI2p{opA8Z?~% z3 zTAH7riyH&6rP-D=7ZNj;OG-=Qyk%dSKqBY+0-#Fumz$^M($chwMr&H81y^x1w~Fg5 zO~uV}XlZUsTEK-WXKz4T+WBf}SeCMEl+!nt5J+F3!+SnIDlKgpXEnPsMFf3no1Vta zu$@w#&kNVmvMB;7D$8x(0H386?ob(AvDi?-nd0^kXt>U27cjIfZ3r#xE+f$&u zvFC%bi`LS7z)MPkLSIYC%wj>=hhs}i)6#4eAh*Yp`bKt+1}!a3*>L#m5L(*rvoEj> zX(^9q4j%ykvz3jBg1AO=^CbMh*#w+W$k=^qEln9Avs0Y*c+z^Sot`<=+cX2w9XSz) zMH(hJ&d~(nY(sfGvzgXYsz+{$!$uwUdHQI1%QV{QxK&bg=04lu($c693}Z_p$}(2y zMS485Z`FDH1#2LEtNxK|Y38XwM-E5IW1La1vWq!akX}t+m=T8nL|rc^7Hq%rO}z9 zeV)ThPO?zO+S!xo1S(yEvqe!Vkn0#jpEBC z0J5J=yD%+nc$AT$W=9zW0oiX#W)5LXvr(1>Va^^jx6B;ImbOArmUYwqprGu-($Yw4 z+M4G02Xb<>f}DuM0xfhSPB@yJAkF3-=oOErJyC=9oyycPB*hKZ($ahcYG07m;H~xr z<~>rPhwa5LMDg7dZ5psRCfHvJA-hz=jis-^Kf_wcqf3PlBOx}uR`OQTqI*zWN9 zkOG<7SvtMh9#8TzZlfA>!9gBRy)*%nDFNXJV|38c93HCkp}L`MX&qP@+(4yqGqp6c z+S1h0{1CJ>o`s|ZH>(g(4i)+Z?!YwVGgx%9Pw3g;j-5^`B9Nieo&p!Mu0;< zRv{?MDh{ZY599Gv{!Uxv5E`=yj@XL!iQiL>rnznpWL=lb26%b}$`CMe6QrL7W_<<`>l`otXqy+l*C4eZu0@bto7N%N>k2mP1QZ`Vi3qZ&Nxu%Xuzs_~m$OPE=(&>pl`&nysbT zr_$g%%W~_E8_w0|lm z?Ex1zb0Lox8f?#iqBrVia(jT3`SIKyAUCk5agZ&ITqJ{+!JFc~359Kx>#pYzwluDk z!G>bzuIHfJ0}jek2%}y}mH~Y&g*Y-8*O8ZOTAEs@*-v!io=?`>1KHSeN=y4zPNcz> zwk!>4L)g;JN9DK>kEhH3WM7ZXDgczq;Rt|qTblM0=*x;?(Xiz}y2o?S?SX7wp%luv zA;>}i%BhmR8yHqg%huC*$V|D{sjd+Wb9+GT3Lv5E<*D`pAC4`}HltxjkYgK_rlnEC z$&NUz0%}Ii0(CgrMSDEc`q8%sc<)B8UdROkN5LF?$8!n+&3-IlGO}EYbdW90buuj* z#a*1ovn(g#ut>|@h#UG`T29{-O#|41(tR?Bvr)xNKbn^9j%SsiEbAhrHsIPyrSY}& z+XI7a8%j%4wB@=zFoZ3wI47#G27GC#x;QOOqw1n=52*cSRxM5KH*;udnv)HE$CD?7 zF6j2aojFkjX?|B4s)UchQ|CUpp>1hOX&l0hGL%(gcw1UZJ`HJ0OOsR%_sHp6OxJW% zy8_#m<~F23M;n6PvLjLJj%i2~sIuK2kjvq<_FhHZA;@MR%I=P$`4;9g-c7ad516l( zY&q%iVohJo7!tyEVIftKD^6mxC$UX8CHgxwGo`3yy*HpVB3o$69zpF2=2(a&cDYY)KtH1*=^)PGutVr)YDR~0 z73ba&4CCv3OwgCrindD7cV1SsA@no*I2!D?_F-XQY4B#9`#A1Fa{31b9Y`3`isqm! z8~gvT?hPm<1c2E)0;PI5o2WFtmad;=wW1BaH;@xmRGxF93i=uKZimK{S766*zRv8Z z+Ll{b^3qt27#!3Ob6C-+R}O1MOJ`y(dFhJ5VSAnIxz2HK;EthC1^q0XhN>a-Gn;;g zT8KIA8N=&moKnSfht$u~X4c^z!|7+P!|>3^5Ftlj9pEiV+*3&nEw>eo{2)yz$o_^W zAb~{k5r^J}hbq-yZnyE!zD{~N2ia~{5X7{rvCh(b5Lr2IcPi~n)0`Vs2#s^Ww>zDC z0~D0m5e6ZUE>PU0o$CGaR}i)K#{W9rX5gLdx0UfG~3rHRHms;3s6mKItIrD-W#AK z%8ocJP)aZdaSlftLQBhO5^@g0T!@zDvfH{52f&oF!CM0I;2`!<;vHB?MUTE{{S2%k!+Zmd1^sYb%w;*R+<# zMVdiq7kO`hI&F3u!{V$+L)FDu(Hv9_b0SSgO4D?eLXo;$K_w_{SyF4tsimEl70tGx zX`kWnzE1AfvZD$L2KRZ+a{M@aowj!AqAOAp@uGlCS2QwCq!FJExDNNosiozjH)Tb$ zeVxjFmTe=Qa;fCzX+UCjo|Y4;;e1hpCkytly)=Etx`wznK#`P=JZEXD4HP||Sz>T# z8=5E^Sv~9*b#EZ|0tK&4U!X&3X+yLBG*mg(l%v!x&*$C%d1?+Vjl4LgEsYD6_d69Y z8$wHy(4w}gHvCeZOn3(QH@Fu7pPhnChxRYxM4BC0==ZxfpxJ(k(d85X*=%W=`74W> zTd_?=N=v)lsmf=uYdE`s+}sUr^~lJf1eOak1KGARS*P;CBpaQYQ<=IY!6_b0AI(-YeXQ(^^`3bRp;7fIUUI z_n*USX*o`$l?%$U`Z~)5WfyHrV|-B}=lpz~+=L9~XIKmNxh03jqU#W9`s2MfuuPDY z)x`ZRLDKKz>%3qsExTzO@|>v3BdF?#*C%>!AO&Q)qLHDc5nN|xhu6|lyv<&u>Yj@2 z>lAuBzk36NiA_B&KqOLfT>wi7atvupbC8yC!ZPK)%Wc`f9c+f*#buwYw>zmT=CGwv$8>Ge zvuSCdH6a^&Il5)$#oZg2N_es=w$Re3g*yxLqqn8;;QL4a>VP0nb_TdKfU5&yQOmqH zU{gSD?*KKN?1;-Wb~}7_2wR%nUv8BnH^mL3rG*J-_G1ZmAj`E#)3r3=YEGmyP~3-f zZy>vRtG7~V>g`8wOUt39-6tr^s-;nJaxKy{zIGlS&;Q?E9mpQfRx>f%k&+=V>G8}s zk>+Uqs%DleW2-o2S@~F|vu=1>+R!I!bNQYi_9FH(F36M>AUI!?;_Mm+hBm!FEDk_l%xC9gb@f8Zq& z1SS^nc%ZWufw>t)Q4#`?PU;qfDZ8V1czsOW;tX`SyiPeZF{%}E>THYmoPM;bnOcZH zw$+|}zG3Bw)tiyHf3sYBj@1WZM~z54g*Wvuh{79?d^Cky(}hMpY_-q-Do9c7Jl`zj ztxueKlb5heNi1o61<$RQ;WW{E?nO^nej*3m^=XgXO{aRmhb!o)x)`X7ghjxqy8gk-HwJ zKWGSytP4cChd?ge31SDa$LqfSk@l37yduT*o$|Zz*t5&e!p*4}f%_BA&}}K4ay#qC zC43<*d-Mpdw!uat)SitgH0on*9YdqB&u*w4DvUqU7l^hS5j+WiyZ*8gL9DsZNIf$< zjczlWsh@JS?=T}9h{lbu%JJ2v$c8|qcX%}BHjE55#knYyJTLbZGH~NJHGn5jR}=?A zZAREH4aQ zKs6sr4TQGxMb40AgeRHdDS_%nqiQl9P7Q>b0@dBdG=rV=7~v^qcv_%(n^83tFR2DX zI|9|c0X{U3XH`S}OATcHXQ1V7Yugvp6SMYv**zKWWlze_%ftKFUoSRqFFaXN^Vkaf znS%eqz6VNHOUxjA+&ATck_X>o!g}Ba|B6P2jK?eR5$oYE6j>c>!LJ*c-}O6|Yd?qo zBzpx4KG=(#?Ng+MnoDYbna?g_wXBR^s%#6B)-!2V;&GqROttWr_n|0~zVblH@l*IO zDpkS{l&tvR=O`X+Lf&1o9w@09v0fEi7XIamj4JDO=^7Cztkr;8Uz#OAv`3qm()G)* zQ>#7Nz@#>`8tq}9T@RE@%R9j|eAYw(=W*uhG+!33 z1xAeyAFDu_e!C26t!8!&`^T{GxtW^=j^{qs>JcnX%ag`XriM@*XBC}>&l-YekARSE#hcMMJ}{xkiZLHUdH67r+cBBMeGRjwmz*o)-)Im1 z(aiz|3=*)j3?R^*H?cM3)x;WH^%|re@&sp~8vJVJz(FIwLuc6#*rdqjEQ$#6>0)w+ zh25=ZYiLr?XcGq0(70esrvZ-h#h7cpbvH8QtD9RCEE?oY638_IpEH7o+#?`>8Q_1! z6TZ5+L4lGv@q#Qh`-8K3IF*3anf;bwXCJuMvyX{!>NTm^2a%I0=-KBE4^xY7g(k9+ z0QoQSHQFBnDM+#NCxO+?wbI0gMhY2IUxTTCP=l9s1Z`>rU;xugga&{LRqzuag8Q-) z!6&i$R|r?0Ya@fC);vqV0}s`NLgYRq^s)YeC;p?F@YT%~3W;P(xmbUmGwCVm@Ci&e zHGZrfU{Dx6IX;>FMXY(v=Tn^D&GCm^^LllwH4mIkFZrFb0yybfVA_Jx%_h%(m4Aej ztx#x-Dy>RnS?M*Ph3R9@Vo0CR>K-)BpTw%>ik&|8jDrQ&41*BB1Rrwq$`5sdYc%O( z=s-CndIlSR&{5E+*ivN!x27FJ1XnGIc>O0jvuWDeMJ!;Y!gi6%4p!ODX92g%`B2~> zL(LUU4(s|MTL=36Ce;!3j?xa8b1Jy!H@R6>Yy0*VVbSd$zbeLcnl_ss*E&tpzJyzy z!}4_NUN-5Em8F!nb_IvO-_&Bab>UOFx;gB~QkiY;7Gxu{x|!Cu^Z}&g3DPwMrE6y` zEP|z++BkgVns<*HK<2c6tcy5p?rm*aRDoKXt))#jrJ1uY`vj&vgFV}GIelzDz|6Xy z6`B11w4OETr|qVukNsVC1X1?71oMA^DHUDNma)#1+Wgad*v{B<(NceEyNt1kIBmsuQ`~Qt-(H|CD$hjvyFs$T z26p;b5ho;l$M*SOYl_9{IavA<;=zmc_tukB8{M%ojp)WLPdN#k1Na~Gu|*{txRvO;B=%*6ExD%E1bHmSI{i+EE$`C0zBcD8!O=HO(C zBkN6%W3v4O7ppB-_52Ox0cn>@Gi1)6{A_-5YZlBm%>yVxT4OIo^{!>|2~Mu2MVlws zi!+N3;21D8X-+RW^Knbt=_NmPjGQ1a$2Xj|uSs9U7pW*a9^sUi*xBu5^9{$=gXn2e zv=#4%n(%RwE5T;kI5Yr%#=8U?uv2eeaSn$AWOudV9sFv4pR&4(Np1K-fs3*Yucjkg ziBOl~u9bNqzvBujwW4f~erQ&v4zUjSD+a zGt-p0`;?6*HTw4L%7vLiyo^+FRjkak-HKp*Jpge0^3%uum_rBbT0EjBHDO|g>`VvyBOo zb+Y2HEDcnBaI;cr)ke2Vm6Ki-aj{pK5uVNw7>?hMBtbd%@PhI7pDL>+@q7GNXyd2r z#mYG?Pl~>Vn)jox8lt%U4g4ox5%%^nYCzLkUEPHIY7{@t`T6k_UB3q(hnbC>niuh( zAX+fP>NTj|{sORlh@_4WmJ!;2feBIp#a36N_vs~v1j`YjeT6o(Itar}8MNPijD^rZ z`|D^QLl#tFR#&`fBay3a^+W5C^sdcvNM;=V+$o9ps;vP>j%lvnj){ z@Qv;2+)q1*xTYEdAYW?-nruhKaZrtBEDcB{(X}@>X`G>adpnzDMZpp~;Jhg6#CQ~S z9ziNT1|tzgZEXFTG|9!F*B5%yJPB+tJhDn_LhFIuggQUbz8^V|HwtBn&ti6kwX)ptSMeV?Z_Ts#FR7-5O$jG{xD8a( z{tUj5Q-h|2ln#=V@S#ZQSF(z@mTG>iPEj6z^JK~DYoKFPpxj&dU4h@<#77O-$Mlj8 zsgNWMS@`caDDX{|xMd2y4&94CyO4Ctlt9Z;^EVOmjk|Xr!2b@l$Upok|Eo5;>$;aM z#8J>#nfbhReMzGfiwpjyT=2xK+>@k7|tP}E8Yy`WF z;6CdOe3?Coc?Cvvk9C+Ikz(^%BiLsITdch-dOMo2jNnddhkT)#qegIt^%IuuOWa;; z#I{+R`4K9sOCq<+2sT;km=N(LZuc2Mbk%76nN%(_f}KWitM!nS_n94a$#NsI#fqr! zy5s~S*kJ@4tUDwD4Wcrd*ksLMLYq;&u{BsW2A9LkV83_w)1&aq>HW#KeFrB^%}{q>A5x4^w`#RR5|6TaH>)32ttKV8lgUS! z+^Jgq36cYCL}Rd9HM&s>%CP!buvay^Mv?{CUM6c`mrF80(7<-9W)~t^AvaMlMYTIi zO3JVVxt418I!P9AEf=_#A{jA0f=5|j<||K{1YV21!7e@ienAO47rb_=CJ!SSN56xe ziGpcr@Oz{nCN@!UBd1jw{69zpHi`UQs=dvu2nxAEwey!!3-g{RxZIib$0bqbQfA$! z8v7$wZ5s%cm8@OH5n%yVmzmY=i2|QlU1;vNei&eCuQ%A1m{VZ=t$g)(g9j3G3az#B z)$I-LOUx;@9+j^>-o1%AK5Myrw0MJki8*D~J@U2F8?+L0%B{KbwZjV}&6!}$;#X)} zqM#%(r^32H^18giro^0y)f9izW1W}@9lo? z?Ki!7d0s6RRiMuDtg7IjQvMkcXqk0viuGqB^74+2u&VJ-$x|)It{#Z`2BL*l zITq-WW4^kWFR$du7UYDj%RESm>?+P%?ps#zANfA30_%Typ7qbDx}?4(Ix5ieb{E(6 z&kFML;=womuO}XPtM2~(yta+PpkR=#i9KK!vti_ztcO)270dQ8w}rXy z6fswIgp>MtHyi;htwG*D8nA9fLj94FwY+m`b)t2%r|zqHtFBsP{Y(vIm46^wygVOx zFu!a*XH5ZY1BVAj^uL(z1JbNN!T0VbB5w^u->^oHL`S=q7cZMdjEqIj4_Rz&%j$wa z%dvS?KjN3Q2@??y9!()5Uy$(tM>ZYM*L_AXWCJ)J04~qtG_(?+C4pz^TS}fxnn~*< z(6HP315_D^9<|mnm!03o&i@*D?7@r`*9~}9jj0>RUp2yv6dt=CbGjHm7FsW(VY3H; z3fF#6QvdJxV9r8I7?z#d@loU*<9V_-kM_8{2D(wz_4*ICuQ_Lb-zMHpBEHv+!=quWXzs1qf_%1m0 z+Ad$-!c(uA9n3xTt%+q`_3Jh7p}Uc#tI9vMr)NDpuJ&}r!y`+6+IHx-yUksJ7iwR- z`jE9d&=PoI_W@V_k9QsF+HJN4TA2U3$_JnzgmF_r(?Gz$xGALY(FJ)e1LLOQXLw*- z1^$U}=xe$sg36X(X8jxJU~S8)5zF(J{O5H)%6lo_XI#?XfxqMW`1i_Q{{2!9|K8M% zzYnY%SB^ptth=OG{<+eU#1Tj3pU-s3KcDZ!Kb!t)gbzi`VBU`kri`J-ZdfsFB&aJP;qkX&*BpZ>MH;I8kqbhwqS~_*C>s_`7m@dShxtcdA$2d{`Ho!#Qbt^8-9%ZPR}Io zdovENy4(yE&zMuZa*X-4Lhsj#XY5;9T(@h|jKk62LbZ!vMvk4fR)IFaOT0I=ldj)uu87??hFr#f1mc6;Zr{w2jQKsjHmZiI!JWF?5H%p~eqpT;f4#7j8z;}~p zH(28HXraDk2{`9&Ya>#@9oe+CPW>Kb)#1mncTm`R9KZe_dv6|B)A#+4uZ{d86OrcmW(mZ_f@M|n-!Ug|HE>h=6 zqDLR}kmg9UW|o!<(m2<7_1#4r71?+>GTh5)oI&gqm?E~Z7dvyuBgws7Ey|9pi-2{4 z;31H~r^7YEoMJ|n)4+(ZF?LZ;VH97red)^Dm9dYOudB}z9-9MdMa;uI1esRz1yJJQ z!_jT)TbZewQd|EK+y)GMjMD45j;h=tt!0;*9$WTKp((E?9p=tX5grY511kF6> zzoklMKqfxQ)ZpX(#`Bb!&)a(R=+T?p_H7qf6L#$^VOJM&-SIRhJ`VR0tZXMiM?bFd z+sUxcHGWr(tpFB}VFCeh51Yd}?Bsuy`^Cfnz+>h$VK(h79uvYuEbgJlaC43On@XyQ zDd!?HN8Aa;UAbw3BD?D6A^+9BoF%XOF;^}OH17J3W|ULJb~0PcwL)JjsHbqOSpPiv zDYFWUh?pOj%R(9{*SI(eZeyU|I4;k@o!@Vr2zW5z3Ub!6>fG~6cDA(q%4i;!!Wc)m z915spbF$#0%z$R}5a8&tdas4sn61QZ!&obu(v+)UAjp#um)3P=h}$kdHvB3$$pD82 z#-C>3kv5-ubH>XFiEIKu)V|~1Eil7fs3|vwt6#4dM;J6Pv!ck%3iH7=xPeP6_7rDd znKy|&CEGqk7}T;#(%eeM^$`ZG8q5b~o`0rxj)%atnT+;p@xTWjB4MfBx=#T}tzh;u zl&8;4d7(T#Zj&Mpjm!>91dQXFZN@M*e(X7z(Pa}Y*9<#OU@X9%BJ4^U?=uG&U@gFn6cpf zAsQbecy`M%o8igaBvi|SHQ$ootKj6+g1-kN1N0;$Vr@21q{w;y~*(uDROK9Lu_924%A)0--!y5tDW`<2@ zbutMJ+{*hk&fJ89Y*efP8_OMoxSjwL?ij>#U8hiki`Y|;vBf;X0-H)B4B}X<*mftq zOPS1QTHCY{VbIJZ#ho{pRp};nIj!ljY>GS|_a?1*o}-!h4RNMHiQI$shgH&Z!*tn1 zEyRM^(X1OR3drc9{b6OzLhE^Htg1A(D);bxRc?6qT~!>I#JbGRcGMQ>ByNvd>~~<^sJZWRJ*hni`WP88}pfmLwo%Gu4T za4n=ex2i)eXru&m&arYS-cg099H+3iICcuS zEx6G7uR!dp6IdUVGmxfZWhoCkc3bf%_E3mDE#xJK8hlT#;0=|@~oPXv2z}YvyLN`0s0j@1!XfLfCI#Ma9I-#VivPSN0zDoXz-!gi}lim!-`)Z zN0rs!2zQ{^z&Y2#9b0>YzhRMigY3u0Uv0r-W(UGL4Mm+pTe!~yf-qbSK)VG=E{O=*dHP1X-!_0G=t?~KVcbjDD ze`XRk17*7ooE_iO&e{~P~wzIO31{L>oHk${|Hss!LnGcwujMRf0m#4O@n_a{17}?aO7$EC9;Lth z*p5DSn}73r-U#=FUH|L}@v|Bn(xLAL&T$rv{L&NpP5&l=E!2GTZ|3kQ{uv&bSI%{D z>bd@nzYmKnf5*R(e(@W4)L_(p2amE0Uw`7izy9zY9)G~M%KJZ~_xasF(+gw${mJ;| zeLucm+V7v}i_yR8%fV;Ar7w{(-*8dO8OKde{;U1|xxN^8pj6NKz`~IZao_*2zT~ka zc{{orIIDSm|H9J$roQ-gpxDeA$fNjQ=nF&6Z|MuC;&=3gQTrWznO^s2c$8)JJNOen z=*#D~e?~8h)$c&>U*Z+b?;-tTypsL29nt^jFQ)hEhy`aBMe4{8In#dH7>~DdcN6ZLxiO@b!bw z?a#9J?fO*D8N>pe4v|_%UFODoTxl?u@8>)m)qzq2=Lw4{zE%EC8o!-is*?ORwq{Nk zkK$kRG7HH6%6c+-+>-azS7^~ zd0sxKEqTZb>bVI-ksk%j%ek@y;@6z(v9jOLl5EbQ9^bgwz%gLa%CEG9<$=CN^*@vF z2F{!gl$tr8S=hkC3a|3tqEc=ih%5fK3~oKk;6C_S23Lh!?YjhvnR#1ckmEJ`*N%rY zFDv+!4TnnG#r7A6neQv!v2auvv4Gp+?Rk|SA_uC{41pXPS3b`y?>NvUWG+5A>Y>ke>Fx(3AZ`datX#8y^pEKg;L;#DCBK9(O;< zzmylY>-|6GUy9gLbNftP%USoFi@Sfzzx>>eM|7Z6&(Yvf`fFb8IOqQl#Jk-)^xeR@ z!a|q-Hr~C-_FT5}rJ3Wvqxhfu@3t>~ttQ{)Uj`R^QwiQn<|_ZS-7K6j-2j~-GxNW#UpaT6)XeG5qtqTk{Y`qm zT)(>W^w$FG7!N+p{Xt&UVXSQj{{0{iVJ5RbkwGXQxoebpY~yCTjO#h8SqSv4@^`xbukmVd2TBc`VLVFh!R_C4 z{x|nSXmz02%qeAYNr$}H@5ieLpZpp)9plww588oK?aZ2Vz{{WbCn4Vp<|qD%?$>_W zW`uT^<8PA5=8R(7sM;k{%Xz|rng7&3dBZkSwo|I-?BG%Q-||n~I`rMZk>d5;J_-H} z6~CEZ?AL)}Gbfz|8Q-S2zuA&s>^JC`kKb1MYjAw$pM+$$qvId;8%$)>{y*?fe7?uf zPyCaXtak1GNB`slcVFq({b3x=UKWY`r~b)_4wPy++?eKn&p#Q~q3?RmqoVI5{vY`# zrEEiJJ6{?&{yd8RoqrPAfnqaf1drl><)2s-{~DOz`6qREzX9{B&H9yZ!oAMH$3OTd z0kvP9iQA!U_pbR|M?(g{JChVeV%c6bb`~c}>`{(Mijt0OqZvC(i{IkzYr4qxe{pvs zavuRwh+t32m|)MH+-uF;Yv?f`fFC`&vb|^fc^k#LEaX0A%O11Q$mWb?gATYYnKoyx zqXp~BaXve+ihFn!#lzU66RaiMy!N%_-v}`xeZtZgu;=2kIrW)slscH}Y|{uMMM3t& zL8VydRN1@2sg~8f*_PD5t_rkYWtN2Z91r)GE-IP4E%upVw+8~-OQ@asex-0&s^!iw z2<@WyK5ZD=J_~{H`W0*Q|Ns3N2l`|(4Q7AVcRPV0E3NGNOIEU3Im^m^RwUT9|!72v7|EA2#v-_bs^EUO<*UgCv|7`4)O8FT<)i zZbW@jFm`tm0`(;XO-LM3*dvGHao5ObOAb0bY=t2$Qy}}~OcWhnhbG2#kX$$&yGMKi zB@-jOF~{PAu-$H^PKUaf~prBSxj~A>+tO7+$gmqEz0|sAxyjNIVLi zx+j4C>}b+7Rt^;Ow5UPtO&Au`NMgL7Kz|oW)Ra03*9)gW^;sKuUpJDBvD^e^FdIXn z?P30AFY4%X11_D@#g-v+V9y3+V$=E&A|uDsx=OvLSpy`U?qxEs{vqmhQJdnm0 z3Zdb|AhJEB5Qi$c!lbLy@qyK6*!4OBE}a|;?hSP?|3D{LeA*7GLv4t!S{Cp{?F6Zv zLilK{2n-17jiuQk5F|JRE6h~r9GNCq?hEuph$_5YU5c+x7r+AvN(HYA;CdkevO?Jq zR%q5zoAD8_N zSf;$0u4vtiQBSvmM%8hg+8Tw{?jpD+x&-&X9f%idaxfh(;J(-hsJ!h6T@_?#UjBQu z{ZdY^I#fZqgfEE=2}Ze`aJt?01Nf)(rgvw{c%)HD;*yNx1fxwJth`hq}?r+f?3`{!neu}x}4FWr$)J8 z@ulu4nRF7i?_W&k7M{VCYokc@(39XRSVszLYaoHI6x5u?A-{euc5-q-)0!SwA}f#%BL4#A*T+L#zx~!X-DF(?~f_!b5QWY zCRA#<3#&|`A>gGOth3kwR+H|)7o&sF2R*^9g@XR{;qbCV1**KRlbw@PAjIb_Npw!f z*;*e_!=3)0*~v+6J+b_{HtGw!BL@72V7`AD9d+CimTQ*MZfB+N_E0CR+cgDa!`I=V z(*!hzyn=1I@~~;e0&>mF0Jg3g4z`ivDE*>0s1-j0x9K%Bb*~6sR<%LVZoR=T?GrUf z)rO4&P3bx(9Vo0nL1Lfl!~d0i-lF>i{4EP{R2UzfBzY4>v(FQSv|<>c@qj3OJPHqF z`RT#AicqY%9oKmm;T)ZQc&eKKlr&ne-FB2LJjoZc7&|-TnEjq zE#P2%8#5xFQ4QtG5G*1@m&ZTE^x9kGo!&lB-9G~NP8@|LM+eYp7cW4G{uE-db`j)X zD#iKAudojGL8I{%v^G6SBnlqGsRdI&)4CZXA`jE_JbgUVAV!ay&Hdrt(uObe8c+l0F+4WUz>EDmjq zBV}*8quisVG~(?O@H1(oE2t=xFT6pX3cZ8vM)|nxz9WwJ*b7??R6*gz3P_PpMc2X` zbk&|nkXmd38q=DYU$%v2A9283+ho${C;=}&%AwRw18}j>q|Yu{V(thly7I6I%DXqi zg*|i8VCy+z@wPXP?e9j2u>okW(ZoG(%|KHy6Adm`7v7Io)+BF%xukUl<+x_6xgS8Ls9 ze%37#;D71>`6((Tu={zv;JqArxUFh*E zK4{e^n&th=;hIsH(>JAG-_h6CFAu@9Tz|L6*XxfI) zIQX6seqLVz2Nvnl9Dz!-nUn_M*AHMok5|ZC2+%AYM;B)qLWF=YUXLw+ers1l;QMWm z+seUt1($J4mlpb|Xc`=qTR~0(jHHG2LG6ke5YoK~maXXzGh33; z=6NesdZ!DzXVziViUrv99CV$HbGIJTG{w)j{;?RtI4Q@Vl!-tyy_ZHr-HuSalOY%d)?n>`ylU^{u$G7V*e z?8*GZG(4H%3SNDbA#d_T=xf>!`o`Il6Q`rWS-}HzZM4x;#DE0X-N3R|35Y2hi=m-O zXtsDc*zg-cP5DE-cIH0OIaq+*-W5W;e>f~~d=Dx6KZA;g98{Vf1Yh>9kY|0epjtK( zJyiJcz32#(m*j`g&qJwfzec=LxQ-59vkR^>8kU03!N3A&R&XWUJ(C4Hrd$PyHAAR1 z-wdcItY><05k3dMA~Bot;o&EDYT=&jajhV;{{#vGz+Jf!Q&H>zJcmf72se_E_5#ZjV3tn{+!Ylq} zpm_Er8L-3|uShl)C4l)EbL^eIAJsU)bjjse z@Wt;e*?WF1G`4hwTIur;>a-CC_Z=rIUTmBoP?nPk~p-R6u!YIa%j0d zx?Yk-$FgNG>QMkJ5-Gqvk=yVs-yyJ#5(N2Wra(F`0o~;TuqEmu^q-Q3N|U#dLkka{RaGaVi8<&ukH;^2@z1MQ5nV92$1L?P7&6r!Xs zM=J${*Ts-e2d`n509EL@UQ>7+4g$3c5;q!v6en8ZA(aD;tf7 zhqgDIy=6`d#LmKu2RXFBUmL1F_aZi<_FyfFVY=mPT(Bz;&P2OG&7u>y$K3+9#+u>6 zz8N@RgcvUJ3PQfxRQS}i9(SLjsnX-NBgKFTt%j7M{@(NY2Z`*rsxd3P+(!(g)&uVG0a-vzU9A&!c}MR`I8jVU9Atq6y2smo?X}*!lr@h z2e3XNLuFs~1+lQzB<|#Oj88Yh*6B$Q>T??md)eTy$WNqfn>|cOIfPzy&%poucJ^Y8 zb?|yq4O}^}9n9{e;MI6ToLcDz#{4aKU2h^de$0WS{+qBw`aMkCWJ=%ouR+P2-DJoe zapafugF~Kta7la%mMMvW)Px;$YVU3^_r(~J?c)b3(odmu(H-bDT@ri9u7?A2UA185uiaB5WG5g4}NM!_bNfdeAio_Squ|8t4oeelyWGFA8Jm zHL4@}0s57?(!RxdklD?hdh6WA3hk@(z2$B27N1Uci9Y}Xs!CqDKgGOgJ(6i+2Lo?i zr8~zDVDEg=!{V(QakSDfddKS_EIW4$quEQ7%GR%@N1dZ_R#`K7mk11nxLlaMjUSwhI?8DK~j_$a5Q$)!D1Y&uM)!01X-Lu)s~dx3qvs95#oHU06Kkg z#=-9=LusZA$Q!5g8*T?{qn$+hvmW$Ik%FuFchS502x&TP2uJ3Y z()B@>a7b8$9$VTMrpmq`ZeCYVKV~_}-y)2MUd0jgvw|_AW2vQPSNtLvLtxuO?CL*< z48J4`OG2XQdba32Kw$+Gl}cm2a3OWz^8`+MAU-|59N(Cj% zBRB^?b(JNT)>Xs8_-^!M!C@R!y9pOAn2R_37Q#BQ2XJinA$Tr!5FZ@$LgffQa5(Qp z_Ad{B649ZwDeeL0yY>Os?k~_vd>~0LUj{VC427&d;D?5I=xwnI@4K&sn=e!0qP;LF z9%zI6X6%R8HU03x`SZ9dpRWvt`)aG{S=|XZOQSo! zll*`ix(QOh;tzO4mQX9tcknUeBaRaH#LPnnAf`KkaOX8J=ddR#!7kW8;w=arJ4UA4 zT|)I~GN3Ou0>UZ}fl=qpDEjs)2GBzs{HTQXP-7Gd9q{X?>w(qbpU&+6s;*;vnaX96AnGqSh}g z;DNXw=8sy7PkYtC({x|>a_JJ;w%8MD_bOt3u^~z-Nz+Zby09m_3%o2_grsLa?!5a7 z8oY%loO%cvV+SHZQEa%fjl%n5&}-U8So3%YEKFNT1EnKz|Fc)v#Al2jcdnw9^G&hb zj2lF9d!VGvI za|+;P7~K5gLYFihhY5oR6Xm1tP<#3d>hmEPZ_huE8^X;o!g4g(k->*q%?Ih$BM~4R z{|q)L_e64h3vN|ZL{*De8h5e`?4}gLyxf^^$mlLPrd)^y2*_S5gf~byY&R>xqPK=* zt*;g&yC22bg8Eq4ITs6D0wFDKC^`7b0FF$^r1Qo)!sW4v^wWzF$Oa-lNVhFkI*Yn(XR*w z3)w?&@r7hk@mvs$l*HclWAW_KETZW-83g)M+CUre_|{vv&P@u>uDJvuv-4mRzZ_bs z_5t}QQIx6Q2NCy|lgX=eQF5>|-L&rxT0eb;2TmzL%sCl4%(F9!mP?Vm8>Z;#%^}?i z9MF4|HI3e`w!z-~@@S`FM)nr4 z+!lLDtW1e8s7?GxC3YENHyJ}vo>>UWU6hDy*jC(?CQf`pE>stqN(SMpl3HlG=?iJwXT#C6^RY+1HFlr&94rR!B^f&iKA6}@>pPd= zxQGI@z2d}PYWSKiG~Wk#s`sg9-aD-KdP)lV9)cL(9oVg80nCw}L?=s{U@~7gnPp3H z(W0BUadsd&m$;+r+TpOYdw;kUJsu~O4!pS*Z376AZ7!KbG%{7 z?w*)CH3?FLkCTGN0U-2pJ{;LR2#c&_!OqqatYe!lRE zvx>|y+W-%zHsXjaD{+!uI%pf};+4SDbio==)IMj5H3p~gzTY`0YtVq+4Q$<@+!$j7 zZRqY@kKiE9A|rjn;BbXDbXndD%aw;=?)Jr4yEzfm?>A!CX_i!V=4L22+JJJ$#zU=0 zH&naY0Eupkp~T?}>>YEN_TQ}xG8x;+n|Grjd=ekbi;c$_+pg1zn)jhJ$Db@;)g4A| z8B2OLd*KC?gVb7mW)lVQONuJ?FeoOS3jrrMY$s3GDd6j=7x2^ZXHc2YmDnB`3r?=5 z$n1DN3=BVyL5-Wi)@mJI&$@~4lQBPc6MlKr0%z5Zf|!vv4lIer`N86( z>-v>AnXdwyK1IMJNgeX;VKSx_Cz8m{ZV);D47}1kkEK^CK}t#s##h-w4cnaBd z<1Q2@ETnEbCgP?ocUX)D6|?avk*M7%hzBL}Nw&MOd=`C`9gM zZ|&^1lcc*@!md7tX^xZ}EZz7VN4~Viz|+|{|H=Wh*xE`L%Ui)A+s<&QBnssCxZw{$q&vPfeNTT)!Uc<+dE5x~1 zDn5u_2u5`~Am^|LXdLT=njT&0)X~pzPiiG>v<^Xsn|aXRxe7B>E)x5Pp0IM_UX-%v zjh6f7lQOSx+`2LcTUWK7{KPU=XAT5BW^1|;-5Gf zOIMEs(&ID?+r5)Cn;Ak@^$D`sc{DVAQX<3No1k3v60n&y8Kmy60MkKio%zZPyj3HQ zlb(mu#}C|Lvg~Vg+&U9tUoN19!TVuf;(MAT8Gwq*&%hzqcfjYeizpr~fn$Apkn_W* z!UC24v^n_=ERKqRy9?6rUDy{CTVaRG^kQh#%o{lMOe+02SQVWugy}uoIrt@80^Q#0 zgG=-+nk#V~Ond4;lH+moIc7^A25yGY??dU36&qkhZG^()0@!+WDLkdkfII3@ zD#;K8G)6*_%tcyu5zs7J5@M1+;MlxdICrWJI7zEP4uC&_L9l zH3BXsnnTjgTw*^k5CvRQ!9Qs~&UW(0z{-0dTDt-VOFBS&yc*RQISoHwyNU~ zZx_%LH#8@8kMR8$NajW;G`7L4 zy~-rKb~`xFZzjoIJHz;p_wZ4YXPQ zZwM(-y#*!@-^0VC97sHTm_BClOGu>~v|JqzPvz{WaoiEm)=GdLce`R(#Vy#8x)1Y{ zMJUNUgX)29#QK62oW2|k*5}+n#j*%PTAk24$c*we>f`oBSm_AsslD2{znpVjI$c< zQ0@s%87gGVw7IZ8%M|^CS&qHmId~Fw3p$qGLI` zJnA_-J3JJmYF$8M*JiSSuNF$KIMIpH#xS@s56jo=g`;ESz-*lkOjepjs`!&&dHDex z7=ZGtjnnFN7p|p`_omA6w^PE8jSz%d_1DplhlNLd-*s*ivZ5)>qNFh3-%4h@>5 z;BA#3o>Oy$IJ?idW=b=B5Zp#TKHrO_C(hB&VMkFU<{HKhK81S^$GfpuEc+`q{|?UQ~7=USkFDN$PCs8Ym0NnRhWPcsJCHI*a4f^2-K6aVFB?ok+{^8!+3y2V}V&!%zK^ zso{Zpm^)=E=13btS8)mQ(9{Sma#L{q2WNa6;e~hB)L^KHHY%2R0uD2Bpuaf~9d3Q8p@qhRAA=QZaj#}(KLWw(Gb+F zCkSbd!tR#gkT5_SW*=RR70+r=z>w|JScME%AE8BH3MnqT2Kfp`sI)Etl5HQt%C|#t zNYV*9?D{@1o!<*KMyJA>Ts!P$Iux_^&w?pqS5>$&30osDD`tPNmOB zXQ66x$7?IjQ0t1zPcB8PF8#1igBwm8D-612A92mTOuF^qd0aonpQyBmqQiq;H1|=`uRyG7`EXcU20v7?=>&df=R>B zQK=6!95jN%nKo=4q8lbo+zBT~HpA*8=g~}37?hP$$o4}qc!je9jH`O$6s;{FYcv2p zhK#_{L6>oU#a+^4vnU!|eupuhjUcq#iJZLDgfm9G#n}(fVu5oQ3{nyX!=AgraDxHd zH;$s41AD@h74h_$vNm3TA|mhi4r*Gvk*hNW!9{K=evC+llQ|`1+CBr^^id6?`rE_0 zX^i)C#-T#hIn-U3ihIqI$W*U<$n*Y4gY%Rya_mqt-=GjhMJ%v&?=k$6n%<#F7 z3tY5mMJ*3`8e29ChY(p*iamz0i%!sSTPI`6F$r*P(T8+Bbs!fc5FH7s>{URM6>;E_ z5C=n+4#QH>`xsjtPF08}lS3MupLGZ~&0UN6Z>?~vf)AAmkA=@gLuue-XLNXOLg$|t z1x3ngu=3&@l;_Bj_yQTwJ?l;T2TEh*O%$t=To`{M58+`ZR3r{1Q;*C7p|e}Chfyl_ z)jm$7RrW!j8#f>#$sahcv#DKPX?=ud!#bnm-#FAVD!t4K+Lbe@*B$VVbU40{KY=VJhzr8L|?&ks&mk)OmW6KC(8 zOw<*`p=H`*TAQqodp~u9m*>0SlOb=x^Z6RIo*Pc->iICW?j!Y>>jym^ZNqcJTcK*+ zM`GaDg!ha`5^1qxXPilRkUh(G+;BIH|oS$ zl44PoH!9>KkB(Cq%72e~NwC#1x2t&dX&4MXx0+t|wqp6Wv(RLG3uJ4&;1v|2_8>V} zw`d#go-~O>lsCg(?}zB(y9cY}3rX;kbSCdkAhIPJO@}1Gs+k+my7NZZlaPo$5nXY8 z$^{f&k^#H;he7F_MDkMNJQ`l!1D!LjqMvI8u|50}=prQ?dng?u^-Dq9U<+Pz{RFF- z4tsCd2ys&uqTAWQgmdjQaCznaxv&r{DdrSc?=n6rqeN_ zB;Yw;EvYIv4R@E95Vh($kQ;T7$eeeAs1-@XdVM$)^?OT<>&2nqy$n&B5(oFMFQ96V z!jWaNFv#;LMDGYfm+K0c@quAw`)UY!9Yeaj3&zypsdUN3nONC{t@E&$czf_!QnNx1 z4D<`=5F2k;W37zmZ>Hh!4a(p#TM(<&3hCRm%YlE2C8>Sn4GrB?>FS-Lu(FRiPJbhV z+p5H=zr7X)?e?N^2h>5z&=D)tL;<~fVdS+)JZW;31YMSdA@&Yrue>-I?OQ?2AJkw< zbSyo}@-F0YES=ar0Z#~P9bnGYk+0q5oyIN2UmB(QBK7%wyy1^;7 z|0%uS9jr1{B15z#;fv!-*t0+sKfd2i3s2Ou|Dm~zMjx+*eQdtkU40K_d$f>FQvxwT zWIWk+B?BD2R^f)vP0&66IO=+D#3uV>hcAkq1r{s|1%nwT1 z>zMKN9jVZ9b*GQ<)A4y+cufjo>Z^O>d8&EG` z8TBL4a6;`ky+e26XZ`?4r2aT(mM)x^8jExzAH)_6f}qyzbfA_gMg`4+`^Vp+^{|V$ zaqwZFD_qIQ>BVs2X%MrDYNYpD&z>M>I{n`6Y$pC7f4(3A@R)`nE2U-q{|7z zm4(@GpY5-n@MIvtr*e2+^%1_yw8G=U20r{?Anc_?Yb7kRDu^7JE#ewkJ z^{`dw4vNgYM0Z@ihdq{GMz#D(m===-3DrBHx2p!ErV620@F&>)d;?@dDUp7EAKvDA zV6O&O=v_4c2AU+H(y#}#-l`J&b{>Js6-IFV!*S@pQ4IMs)@q5H>j3-6}SW0vsmQU5*m<;n> zS^g=v090pRz{EALsO;nAc;tyQom!WME&ayeu3$^NCbf!mjyZr5t}P%lCJ_(Y#KH*i z49bcbBXth(Srdwp>MOB~#UD9!d<2x7anxIDI(d&Z z6qFegukHpoC-w$r)tG{k+Cb)K_(13kH5%8X3JYUfVd;rr`0^qMCLEg%rc)&`=V4c< zaoUa61FVo98biwh7eUyLi&R?b8VngDhsm}^pqKa*E8+*@0NsV;)z*42c3Mv-?3o9B z?V{k>XDzIG#D`n6$}#rr6Uz5W8ZNB8&*Io1TsJwA+P-LId!?kPs#PBDu9!t;2g$*^ z+rwdrTMZ6(SVIThR`v?$+Zv&wZ&(MqcThY>P z9~oc~2FH)M5)$U@$qXSSLz6U(dXnl zn|tBv^Nldq|1P|b$b)+;=fKWhhha&YD#)mv!c7x5z~!lH=}zel@Th7Z^p83SvzLy8 zbGL->n58)xX_g3|27BQ0&PAxTeJB|};sV@y+?k5b4uOz!-O%vEb@b@I4j+wugbQXq z1uKh249i=Algmdy*N-zXqxd{LuM{RT)?YwfmG!V>rvms39;J=-J7Jv0W-!eS0bl#Ivj z?uHz%-elxewqNaiHVxE&2?Lb+k*lsI&|l*yz4>7jjGeljMjgM8_Ktfno#x=_9!>DJ z>piH8d=J*zgRs8uV(dD9A6CsS#l!w4kZYAq>Pyey!NuE%)_Eh;Toy;n%AMhn;&SjD zI~oN0_JTLjci?2+0SKKk0=Agb)jf4l+hRQPG! zZ8i8JF^_DWx(3E=A4JAGO5@d|u5|rtJ1{A*LvNkAI4iu7JoP$_Vi`f?fleK~s+o`S zZ+qZTx6S12Y9)wPIY#T|Ed}Rsw}?!aNo-%12E=Z3!U68XY1~5Q7w1cX;r_7@>S{oD zFWrZ=cMpJ6{yq$u^_)J_eu4{krGTlYCd#W65z!sf(Q^M9Dk?D(cI1YU$P!)LZy`t? zUbREX1aDkyRt05x?qqjM7o5Mj3asBY zc?x(h8cWnGqOg1AQ(9JGjG{xmNrYblG_yR?ZtW|$!?rK%&b|a+22O!(b5_H?%ZIFWu_h;a@VlRe|yi9b~iec2)F=V8A z2Aq1-7o9g9$J5StaQWd>urz#+4^~-X3*Ai5-LpnhOLY)fs*f@hY1Ra(|3}ez#^w07 zar`)CRYr+QwuX_U6s2KR2$e)=7Zp-TLy}~a5Gf5wv?L`XA|>=EA*obkWVD4eQJlZ$ zdfxY@`*YvdbsWF(Jz!erhG~CRQpa9(YL7ciA3C;DRIDRaC;9VEwJ>T`{KIG79!Gcl znov7A1?Bw>EKL6(Oc#lA+c`T)@~Aq02xM>h7-<2d@*kO*MFfS!6WQ)v=QKGqw zw=WGOxleWUG0X-NKRtu!`$JT$)W)2{*ORXD8X+$#N39A&`OD7Jl$yMd=Fa{EuQ{d= zz50dfL>@x*LJMHCm5-R&L~=*XS-EmIjhm(f$MPU7iq6HLz6#3M9naOqzD0)j3;2!N zO$pvnIHb7)!-e01``UCm_bv>BMGinceIj(vt0H*+Ic_U*k3=pS(Atl$DP~g<8+mCl z)vuk47nw^iFJG6%Ijg`g{xkjjCHQBLZ{XOnk0>>lU^B{QBH_UYyb0*W+qZehzuS(# zSr_o1hBQTF-{SegKL5((8paKINKw_U*d*xoW&ul>^pv~AhB)zwWn$F$br5MztfZqu z-}7bj(`otV^~|@hfcV2LEb~tmY!h#Dn*naPX0r+_oJ?W7D46%oUQA9MLA*EC8M8FY zxsLN!iocqQ4V{z8Lx~Y`3$g3kNS>h^Onbg}(56)jN#A%k_Wb8gegkh%U1ud7m@=Cu zOrJsNk)BK~#RLA=+Gy3w5fB+$%x1Jz(>jG3re?hZC9)IwrEmYyscC!JxBsGPlfoS= zU3XcS$>sRLy#{DME01|kQ|Xbk57b7!AlXk7*uI?+2|d?AG|te4mr4iY=3jkY$*d6R&hajJJbnCI&2)xN z5%N>r6#QxqZNE@Xo;nq@WAz(8bxa<5owSg1ObU4u)M2qvij4al@p)zm9@M@-r9?V8 z9kOG}P1?l8pTntOA^aZpbF;1UsO8o@cH7Vs<(X+v-Ek8EYttw)Sp)In0})%l36WtA zT(5NoY-i4gTk1E8GvFk4T$o?Bnewj0Fj{(J3BMjtjFI|N>G6h{h_<_qI4VXo--*wg zQV>}Ci$+O)qWw#D^9N%^q5k0osZ|+KD^i8Ni70&#dPG^Dx?!tT#yWEbV3JiTYnpu& z_nO|5wyGXH#Lv-nt2i3p1&1DWscjr$4JNunDbAh#9k<>xw_5SVaY%7t@7? zsVsX}4S~D>LX3MAWQO!GWV*3~#D0c|g zBy=B9oj(`x15H_n!^aLd(8?mf(0Mhfz&*8QQO#0 zer`u;np6NksMbb@EKbpZrqdW}oKJ&#<{|H02OCw?fR1O*v~}k*I@@MSV?=Ktfwfb~ zX+7HYs}gD-LWO-~D3hNpN^2Gi9-ok-4)mT4DpvqXizRm-W|dJh4l*l#ld1`S@VPr)Y#F7=sJ>7%H)sTt4Vp+asK+cFWGA>VLpl9X~d#j%w8zW z0d|G>{Ng&42c1BcQx&@NqiDk1GidOZqI&^mSP@>vgZt!Zx#>6dzHu~WJi7v?RX)F% zyZy*dJc`{^{S4QVJf1edi&mRdkxKk^$UgGt=I3&0lSCmK7WJ6y+T$SJwv5(C`XMW% zhI9`;qNLBkH0i)@ZdWpwwl-H{f#_){U;2Q(-%n8SD|J4*$AwaVTTq-)Id1w~CnM85 z>N1{8(Frf<@uMWxquEX)ZF>2!pK5rtK>^810kps59Lrwif-4zQ`5TrAuep+Rq@jr1 zW3>3T-t#1N#}qNEf8wlJGLdEmCd95M16v8Q9^%d&eWLMabrb%H9jBVT)?D7VjW*8F zX40A$=}v^6kdZ2)n%{a{$Gw;Q$E@KOMH*<^A~_hggrM`iCZGL&9qo+%1&16L6b1|YSIBYuZRyl> zP5R`~%;m>-O&)Ta87h8&Y%cBzXX@!=x9dn`eRCd|TSLm%k2ZQ#?_T%l!!ddS~! zPS8MP=(|rUeZKS&@m}6YiR)ytlP^=C<4%4_Y86SId(Y2m3?eCyBrMzX7gyYWl31}n z{^6&vQ&rN3qSxF%MG~KfI8mWmF6|OM%v$7*qT1jP&GOrW9LKp7y=faQQ*VOU8DCsC z8cS)GFKE8d7x*#Jfp(sKNsecNXzKQN+{Y^mt8RjKYUkj?)GV%*tcJBBz|If%P%yTE zOKjMPW#fyvv}828{5iHibAo8u2=-p}3~dja#vIg&5Iy%Xzq>#j+m%~b$>0&Vb$BuV z(2$7${bpAGH3w7gRq^ipKU6r}j2+m>=;PrlBwAODB|*lN{>BDpFBP*Tr-%%kXS1wL z@$_c@N?e#eml7q%vAsXl=-abvh~4`DN)HRkE?19IqV@u5b1{CpB!6vhLhDbx<&zYi zVZG=e@|aPA?`{sHtv`!=?u=wA9o`hj-s4{4nw$2c*{ldh zf`izYQTb3=Abj3YQz`q8IF}EfK0czhZv;O-}7l` zH<<_7h4Z0QXG`C24?|gLB}q-|0SI-qt)5kBo^PM5x8R5swleNezU^FGW ztb3e;bS0mYfb6^8RucgyHBx*<;&X=2PfX5COl*2TJ9|8NPO z&d{O2A3Ja}yNASm_H{kI(p^I#*Ep*v)$s%bT-H6;v7cUOMnDUR5klRUT{qtyT zLN<>V`UQ8_wy=*2{4qgh1IY&l;qstpo;vIR&OVwzN%lu*i2MYWtk_Q?vpYex1*Dz* zg_X4qAnp7kJm8)vp85pgu0km_k9|XCCR<4=Bp9ck8&LR~K>j<58TldI_JU8(3srCqdt}hb8ik0jd+T1MJ*PqTaQgE64>*s zd@_qwg>6_f6z=QbbHyzfzpmzkeRFU|$i)guGP#~x&W_i|Bln90xBqO7U867a!F?w5 zd*u~qZX67{=}XInUi_k=Wi0#fN6OPbj2dxAdOWF|GSj;u-F%D1yv?Iu84fJ%*e|H= zj%F^u=aR1Bcs@4g5$zrFmM_@8hVHZ`;IFX)`4%jta63)Z*{1Q?dYV+GvkR)<`d~b0 zG7FCXfo+q&kbk}&L@svlrI)iX;mr-mJpD`m+`FhGP@8rT2@6_hh2bq2NTrSSnGoh1GII=@~jEp(u(`6Tffp!5Fx!dNdPo0ZVHSaFIEF=21! z{iQiBhiU8k$Mo;gYOa|hL$ZR-gxU7kt{ci7U;M$sKYrM)^9o)!=X0xd-zmCkHPc$5 zM(c}X$fn`~8NSFx_aX|>C;#RQvJA!@^l4{ z=3)RlG%N{v*~;{vVFU%K-e*&1nUMBZ4I$J!9lf?fhpk~h>6&~3*;nGs)o*lu%nG`> z>k&WrS-9z*9zy4bdSbUh5w1NdhVPt9u*qQLS>nu$WjRTO59BBG7t+C;e5_753%~WL zbh$hd<%i>O`s@NE?{49LtItx$@R?+*evoo5d(!c>v6$~?!H&zflns6CXg*L+UV4yyc(yd;c7jF^kUCQO&RLU-m2r<8ZKSTmU6!(}(#T5dy|pZ?+N z735J7Katfx?Z<$7>3p-c2o2vqgC1{QDQGcmIF+qKyMISx-TcqwRjJ9Jl={(#U&d@< ze=sd|jb)(+%*b@(eE2k-!&v*1RDEg?8FuVu(Oq`%nmL*dE+0VgnxnBoe+NBsj-wgF zO0h>tob`#FqL+cEskgfhGbXQNp4Iy(`OkW`tT+g3M8`14X#-KZOogrV2&1n$%`|zA z9ZDy8u?dfaJ-8#C+w3?&t`REKHtZHOJX2{Ee}?Q4$)wj71?5F~yfv_vx;CV9W3{1_ zP`ZTn)&!8!2VWNH`Hj3BuT$*CCuI59lk%dIXqJ5%{mvPUf_HEDbp7Gfbt0S^>&uY! zOddYhKEvEyiOuX8fzfM)JuFjjhvYkJZ?8l!`J#g}Ar#l04 z(EEKZ6x+ja(eonK6ih*0-w8nvxkBZhPpMbAfw|tlt*HIt2XBcF&I9#&yaO6SFB*Wsu1F*kv^KeBCLBGswrA>Py*tVoxy1MEcD+$j+iqI))y{Unp z|3)BLK*uY!ALSaeFH*;rDg5Bw1o9}?$6nhDn3ToIGg}Jum*v}H;vTs~$*6Un*(&+^uj>$%o0EkGM9WpG+^T zhqvMlGGATI%q_x&9DNnLy5R|WY=5yV-yOJ^S4anM?IufZgOHyqFuCU>jwQPYbLA*r zTk1(sA9q7jtcxxlkD)-b5{%gK4pwg&{{B_vI}*IHOD%|%TE4>RXSI-9_Xi_-LfEQ} z8R%4LA|1ypc&#|ZP0p$zai0aLT~&ret_9Ay$djwk&s+K-o}OkgJ~Vj)rLhSJJllwg z@$Z>SyEVOMJMgTSk?nvJOkv(EYWtFi%l$RTTP;cs#+~#oB^p2Feeuz21Ap*DgtjTl z;%SmE95y&+#0sr4`9J)~i6p-gfmmdxs396i%Aq7y?`lbP66_V>vul6E&oFe@VQ{d&xD z#3}r%D`X>=XVbcr^?ZTTEU3s9aJd&FNvg4u{}!@5PD1BD{)r-DyZoUvcO{;#JHTfx z?j!A}E*`0@gN@@gX^Mp`a^{ZW_l(w(ctQ?y4Dq8x4-fK-UxAW^$GOVz;fQ}QfUj9T zlV)GYN#409|NRB^s zw7g{jPc{f$%4e+LfZ)4sDHooPDzxgMEVGmAKw5G>DR>>HXG;b_Q7ek##D%`}x)$_> zRz@7^^Rpr}ABL%z5Zn^4xHeMf@2-KNGX~W=}sV3O~cAYpoUZ%h@zj za}^Tr$CH(iC%Q6Rf(=^!1*JTi%9cJBRX*h=2HmrcjlEo;hZx;L%OI#7y0iNFP z_!}j09K3s&Y^(BVv#J#dq!iSRdd;sGxS_tm6iOeeDWyJ!PbwTm7jndz*d|-N&)z^O zuNv{>hz1?7`+(yG&HUKT{qV{B#?{`BhR*0rH0&s#(Vr*t|0J@ho*!YM)3#E~>M~UR z_(Jom4)J#Fy>z+A27e@eK{i$&VzFXmdh7;vcRnMzch#gX^sXWVt-5K-TC7od!#?Rt zQcZ23upSjYfRQcRpo%~s!W|8gnfhSUjj zo*F&87>O3CTiAL+11=Kd*uRoA`mc5%H10(p;Oqr%d0-w)cI#n;)(jduBM_FCdZ;GN zh^jIMG=JJ~Z?-gf2{E(u&e_4aGzHKTUcA_GZr_dGr@ zAp{C~nLO>tW>mF|piS9g^rm<*hKVd9@148w+hr&<2advp@sDZZ9eMU~#UBKWp3OtG z6meyEBlAm|iXOcc^diuc!dyeBws#`FpEya2&pQiRcOkv=xq_*RyLpMB8BN;zmK%?| zfV)BmblH=?$fCdq5Hd~(-P>hx%(S@NQ&_gcdSE*l1= zt@mk&k2=txpxyS_u{K2K&V zb03gj$0y4BJ`Q)bn6oy;)%X`8MJ>I1NOxNf{U<(*b}VUQfjf0bZ+a?Sb^S>LRP&)e zES!pt-9l)eD>+!#p*H%d@Ok>eG=&&?F zse=@|ULlQN^+DXpZ!M|>zege`hfb}|;v3J5z}tK2w7pda@>9Bm_nIe?f=pTIRWX`A z<|uddZlEW2VK`iP36W`E*=?t3bo>20j=IB?{LY^*Y#&UYo#&x{R4)yj8q7i!4pN!A z4jbukn=;>oVoRJdV*0H}>69H!Emh(-j<3bo;4Z9+P^4`Na@<$Y6%2&z+4iJr%uX>t z^yPn)FFBIlUPwcxd@r*fE=6h2eW*s@;B^OFXPZS_=|-mm`h`yEtJO1EpQ#ZRcAK%S zTY5?F$O87ms+vaVI3nF*HAxbYKyQZCpc_ zo^OG2bp>ndnMV`lzd&@K5q$}??HWy$)Pm(aI2!MXO)h@NqY7Wj`R)0_}A`-dXC!Vz723MupA3##&- zLlfMVAh|A=jqn>o)zkife{8_*v|>73IEVI@?||Rcbr7xaXLB-zzUs0NcI4e$$*QG)8X`zHen0sqx1O#2Sm>lI3eUTW}Z`gICZ`)5Ey&Y7L$*PvH+QrO}AgCwwAG zY4LI^cG1`wB{PMt%FcQ$ktwIfH=7~bE=OXm`Unua!Q>RO;rH$dQ%NZ#iAyu-qINOH z%gNEcv}Wpe+YG%%e=P7lD)^g{G<>Qj-zag36w9|l{^@H>-?oJhZug?;E?;+ktrNEW5a*iv1U?Phg4t06=;_|Q2p{&AhF=tNNc{qxsc$c~ zosLBA@Mkn$FP&=RhVu6ZR8Z0cHh-}tZ0T_`Sv^QQU zeGzHe+UQZp8o7h;5DjNK87>;#m z(=fqL?7e*-)%Ok2H1Y(o{~qCJrYaUK+{Pp_YVa~!i)~nO7`ca!)AbQ!5p_osV)aSb z{iYi$R=3i*AIF%^;#M;1f5T?nIfKd1Jt;EkA6ahM!Db1(xo$y6w2f~%?lHFyXaM2xB-#%c;S8wu=>gHksD|_0;dKZ77~Exq_G?q1XIRV4gYq(~JA=q**kO)=XF7^3EqQ%&k~p z;yBD1h@nJhxpCGA($IM#wFKM<&vw5T1Xtw-4e5(qk0rus*=4Vk&Q-e`P#*r1g^TE-UP}(53W<=VUp& zi=-zuurkUOUlL`*cY)L4PULOvs#uL&T7N^oANaD_YgR%TI1&nXY6ds zWRvoqqNvvhdnEHI#OMj4MPlHy(1jMe2B2!56dN4Af!2m};fSUSWsJK?*VSanV_rJ_ zr}r9@m>bibX+(J<$ta2zbh02NUg)O;J-?yMKIt81KL5b3cc!98&@;Rr5e>3=!S@H{ zP~i7oL?_q6QgJjt{^J3Oiar#YT3={_k{HpEg=kz}$(2H^Ve+<|xhhy-*r12#ysC%$ z8M9eVR1|g=9^-*olSpLcU;bKQBt0A_%U~D{_h4UoZu5tZhArYsMuX{$cO4}e2hc+y z6XL653%y~=Eb{s}njLkYBm~-x_}ocMcYz~3idM6Dt8O|zJ%U{k>!;@x>fG&s2{qi@ zMA<|7Xi~N=&B&Zc-cIK5eRvG{SBm*t4=u_Ictd{@HAv&84W5cxK}+Q#KXmpv7PPpN z#;c9^N6Ado@;041GK<;n>7jK6nJmh>m}sagO|;9V4^0~E*RM8&WotryOevPt*RU3I zVNah^FLc=Npt%39Fkc8-$(Dmyekq?0g$ClNs{?-Axr8oP3xvM;f@2ZisUR$dnQ0xz z`iyiGnT*AI$(xWI{!n0@R3ZFREU9&06+VNjkaCJ=SS;kbD(9oKTLgV0qA77hEzZ3R z;r%V+X?8+CGPUlYeXfv=IC@aXJ#{g$r$w~TVI@_zC)0zHNzA9Fi)5G8@p;o*utp!4QXkJG(U0HmL{Eh#j9sd#K4AM{O`5l0`FlG z?+}jEUse(0|LXowo!23P>R`d2$YBD$!rtm&MF$6&uv@z|;bXTD-+l7v{=OrK@N1>p+73+hX+F}^{Fut-eKf{#Fh3Pz zhu!l8Pu0cm)u9n&x2Oz~`oGY6 z(4X1Jtfm*G8hp*YBXslKQFd~aEtM)C=LJ{ak%!|+#4b$6EA?euDpHdoRo3zw=06~D z*aBaQh3}%nXzccthnIRYk{c87N^Tf=P1}XCst|fydja0Tw*1NKcUbxH5BukR3oWl* z(Qu#_c|Pj}cJWYZpPKE(Kw1%QNw8LydgAeg1hZ{X zpJ}{}3jY>95|VmJ^nTX^`gTf|dtAz(PYV_zD7AphzpiAT5<{rMz@61ztRmF?G5CG!#fcv!fA=4AWekj!M7E*8S1=Z?is z@oR9`jwGLx?d;qZU(B;uMKA7*g6hEKF!5Z6hw4k1jf4hyP~fl|@=e>K;r}wYAvRXKQJHR5=ZH5rgs3#c(j&jGF8zP!BPNd+Y)3 zVUa}^Pge5ZI-VH%<^~hJZAU-G4ds_#yr=&fL&3aSb)ndk;>H4$3k8lr88^#aj`H45Oj1{r zl3prN_zo+2Gx8OVn7ET;iYvyL`Qyrs+brnhKtW?Q=QR?;VgF+aOFZ3#pkRH>MJ4pZ zzwvYPg`DNIZhBDAC-i4E$F^~nE*+YH8Dm{A=h*_f8@&bvA??(5>mIK?RsiEU=ED7hM!NT} zpVS+N;Z^v3w!eEUGMe}B^{V%9=(Y?LZ^U43*gq;UnMBfC)#-pP(V-M8F4LS(4@}$X zoO3E3n2%<%w_Na3^eeG|R2*(4JnQTv-Mc;1A@KyCyc7BTKr4J&|C0O_n^7++Wc1Fo z!@s4Luk5xGXbT(8K#e1rBf!XF~ov zTEU3Td-N2R-o)E7V`yB;S9%tDlgjOg2PD|gZT*!L)|N}9!r{c4W0e@NYbYiR^B|1` zM*qh2LosaXQFV_s4L|&aRBsJp)+t*tc;aE+ zazhVI(Xtq&mPieo^|*IeKUS}_W^3L>VO0v};nls=q8P^=W^JaYit4PYTbr^oZ{v@# zCtXaKNOd0^&}FlN_ix#ZH?=n4NAm^ljs|p|Uq^|p2JP}nL|U9H+voHJzUR8x_7Vwt zxT%r+2XCN+$u>N&@ zxUi?e1L?;J6Vezw9q!d>T=(Ta(tmA8_6`I!m1nfuUm9)4=CERwsmN6y#|~CnVfMWJ zbTweBz%Mu9OS{|AGtY&aD0O0hwb1L!w#2c85hUw24pW^+v8mE=w4~9OtDkhDh>U3F zJ28%K4E1M2&yS)_Tbd9&!H*0S#97Pj`6L5-B+Yn;g~#e>WKI`h^GKL$hEkYzG;{iS z8PZJ+Tz;(=c?_n zaox-!%4Fak6VDo4^60IaIuBia40+6iDCsmDCbZ(^>1bTIL}ax;6NAS~adD4dG%Y^{ zM_Z!FN8bPo-xQ$j>tsGb`vS%e>tGqW_i6IMaC|=>0Ncm5+?$KQI>&+gJ6Pkg=Li}T z{hNN*i;$W94qExZm<`+X1xecsSh-h_;Jbnw%NgTWP7L=yu1iryLMCE*5|Rpptis|@ zbnf~~`l`?9rhxW1F#kKHN#2E6MiC|Kcni_kzYUI27kqO*uQ!dJltfM)q|Ck z-6M^G-}D84^$e@O!s+{-Lo8_HU7RrA&;MFv)25v3tgFTnF1pDmKQIE~2aZu$@H^Pn zo`h=Xjz8Pa#qA6*U(|+YpBaqOEPFD%;7-*_YJ8koHTed5 zk&)3-j6AUd2lwQmYqUB)@O?T}oiJWR||bc zldtqidmk573#ZPgH0GLn9O;LKu>gZ0`fNFk^}7wF;)f@hzrYpEk~SiB18;1(Ivu~a zw$jDtLxhtYZ|OCczM5tKOosR*7uh z)~gs`a*8|0+tL1)L0n_+Hyp57MjyIfQQYO1RJZL3-L|mh)+Jgvn!Aggu#cp}M;gds zR1D4UUqZ*72huZ_SehvH8*>GZ=UMSan*aDS&#?Q0C5sJE@KxYKeB8`D+q)@3rid0y z%cbsFvizz+4&r>{*|p>YRO?mBuk16yl)8yf7+{3~C%Z_r?KO#H#xmL0J-DQr$~z)o zl40;oGU(b#5t_?+zsd-bala&Z0;X^`vg6rS5tveDhp-t#AeDcH%fl0*r}DV^IYkop zR1Xl7E7g@WJIM{_V-)Gp?mSF&9E>yn>BFIE8JzW2kzCa` zq&i4Yu}&nV$UGt~Sv|Vt_>lUe8sT_PR(LLz;j4Ks-F8u8JM>FQ?B^$;zw3h&@w>^e zO7P2^e3?H)uzYY4`A9xS;7A#YH$6i&wt@yaKMMNXga?_QA$RA6a5GQB*8D1Fc61u) z!UoHS-KEO@NvtqE0u8gI`2M9WRP$#$KXPh44$Zzs_wwseE`6M8UkN?7HFx>3zDYE3 zQ85cWyO}PO$?*K-SCA-JNW5H))>+)Ajt8AMv+or9nw*R#hc|3d=sqeu{gG*qGR9B4 z#}*y=L02E;k>i(Y1fB0@U#{xXh1L`7rjT1VjN8a2Z5cxmV^jGtf$e2A)}JOv4Wemx z1F&#c6Kv9pakBb7auTF4cF7*9xloG%D*D)F8p-E|Il*#sv(N<;?l;|G$QBJ0@_0#1 z!7Chdgsx+cz#|!y{eds^I*ZWx7ns?kP}-hY_nO$$&$lDpXyv;bgs>`?lW1L-|%N z*EktY82y-#2oABOaTVj}1qO`ZZ`&_wY*<4s?Ine3~k- zhFc;Qusq{?boWFwUt}1Bq~}q%7JHwLG(YDlxnY#Fub;P0UP1;hm(axVie%$u&$#t3 z%*r&RXMW-+@KtA6dj!kO<;bXh08MW5XSZg=)3T68A&sLzGvWqO{K6HOn!A_$Kg2+; zw3^Mx5@w=vk=SuyG%ed+f-dKKf)09$&)(oeL(A&vzWYDy*gu+wtecB4;raY#sx%Ht zl*2#zGI>eKu#0Asu=D30YRTy%1NWP}^0FU&bW6oc(^iaY{K6|7*TB?Z2p!DZOqVAl zLo#_gw8x0E%_X-GwD=-R3n?Yn^h?B2O$A0qm&Z zdzu_Om>o=)#&-ju_kS;jQZkQXZoy!I?HNl}>)jHNM&gZ8*K@XMXgsk1Z`&&Ic6 z@!%jjAF+%!AGafunBmmF-IwJq2n17dL-@T43_TmjcBWXtUtk1=w7X%I>TgPEXeUVz zG1?Hl60h6ZDgC@GRAb$+y`q3xo1OT0LFdVqKMHqQ0}L>{McbXt$@%*bvR3$x56NrU z{AYs^`ZW}?eRZ_h_7d;;U5RwJryNNh^kOKdBg4IE^R+`ze!UOZ41n|iQDno) z#z23nJNxc>6=sJ#a3Xs-Sv20{TXT-nqTeIYdQ^gXmIm;*{#96L_m)*34#UY@bIchp zPTgMNtm5h_oVyi+yNx19J?Fui?;Rt*XN7pXZ4nIG7IJ&JEaa<-v)7}94!6Zd;qKIa zvfU}{CBfxz7`c{Q=%nNxqa8ZCG4Me@WmNuzVwN>O{i+8! zi#6!q@LiObUB)6;c+M{u|tOl7{>$S~B)DJ`5a!*bXlgRhYx~!!f-3BAmR6>4|#^ z9txbsz3sis-9VZKw9RH!y4z{%%&lxhQydK$bdbVb<54X#m<~21BWLq?)*P@7Wg|Ya zfA&qJ6c^1L!-V(T3&GQl%faBoVU%ci96OWaakDE6Rk$=UBw9r79bCKytIav1A0 z5~WST?ce6hQ6!@!#kB&@VNT9lnCAY2Qq_H-@3)uiW5-kI=|Xz(XByYAKZ2@RN3gWw zApRbZ!MpvVQ7q`sZd=5K9CswsAF>&@zS;8eazbWNTF8bvo6@rfjjZI_YnrjB8)bJ) zC|zYCyC6@{8Fm7tq7is!+sO8e^Q3!QcVJPuz;z6o&jx%crdg{hC^l|2DrIJ~rt3j7Ir5HpXx|ZhT3i(>V|6T@h!~%jTllx{oat@_=Jb zchF@+bNG*HhP9Un-O+EN&rdA{&psKqI^WZk{15Dl{$*(Gt7SgJ>ya}{&=bPXBI3hZ zYO5%xnUZO&f6Zaq+w+t=ypqM*nu`>#Ad26rLA2@JaU|6$@v5kWSYmOSy%b^86ta;8 zne4%*vkc|}XXHjgIqr^CL!C$^Ym6zxI-yHA!y+6j0xY7))S?}45E&Gu!VM{hO+u^&MxS8Hp_+J%x9vS`Q zFfEGrJyJtjjywNoln?0>fh=CQv*vqkCKJm(LkZT-bZK}JPD)IszQ;H5JY^QMoUA}e zXGSC7??@V3Zzgb_PLQPZQx^YXwQw6tm4@yKr~NXyymXor26|3_jKXnLe)Z;>>I>nY z>52>QY{}Am1aF=>h|-P)@%6%;sRO!8`16S>nEhxE`=gnRBPU(>>q;+b9&&+a#La|< zN-v!_7>a|-N}=#YoM?O}J|C^dui0;SkH~Fu`)ka~vQ4O?U!*$w|)`-7rpW;)xJ-dD<5uICe*ze+>ba=^g z#uZdaMRphb^f^8He3u7=3pWbZ&A=Cr+f>~gE8IyMf*(gu(~_tunA97Nig0hZdQPGZ zNm;b^brV~;$6JjLc# z#8ZsOE?%4Z7xNPg1)UyfS)sy4OI5-Cu@1NIpF&DcRd|d9wKUh1!6*? zxLwN%+EK5;?}rI_j?#&Im4*XS+x8LPTR_ISW0?O0d2~!1!q>g6!4QiWzGj>cE}W0! zOJqNwUQCyThdNXFy%^?`V@-#ok6^dJ=N%Jl%5y~*)7z8`>WwPKcBKera`gpj2K8~( z4HJ;6S_$Mn89lXl8LR_rksDyAlESEU%8sWfop_?qnM`%3;wG5AGc|lai*5!#p!i+bVW3m%R>x z&*g|o&4cO2gWY^&?Q`r+G$qedzsS084okW95Q?fNc*MOl%#Qp?gIDgS8Rts4rGGWC zo!@AXN*_fGGp9*HALw(tAJT{1Lw>^{xCrL86m)Ry(c#n`smjVW&!xca zr}?=5R#V)+O>D@?2+UQSL{kM{a>!#}I#ck9Vn=)Pp2Zrt`OlrHUaz1eCrN>!@{9s= z{<461`*2G8KFz=DPhwq1>4lI73{2?YrxWr}FI9lOGbUrw)G{uqy_QO)6*1G^k^G$R zFpcC!q456J)DQv&KCpm44qLlx{ zXv&LRsu_`uV5Meg=6)o{ndk9q_;f0$yv_!XAwcgm4^s)oMmcZ3Psxk=)jPR=$bOXm zN#|*TUhwRC7ab1Qr=JH3nfp}`jgDlBmHrqq?EszVvp{&sOA@tng?M*1g|zIWbQZuR z9!8_3wVhQPdE&441>RZr9xv9_(~!Ics!&TD@N8;;)O4Uv0s zr}q~v_F0dA&Xt_|D^S=j5$MXkp^#A%Nau?KT|2RihEycc|H`}e@TjV5zh{z=2T2$( ziYRK(K!lR8&-={loM6Huyc{7suF?*Z$w@LYFJ~q|5EQFOE1=;M3szdAaxXq;8}NY? zD7gsbVS0@k#0R!P5onDq_JX1xopXO@CSx9G?X7?GkM8eV+2{A$>#V)bT6^#F$jqEQ zRb2eBFDH|uRUhjmzwab}UjLB(>=;3xv893hn%t-_-&UtjIB^r+l{U{bb?X4~Ov#5P zOVLD~HD~Lq|N9E^(2mpbcPFriZFNDZQ|D$>t z{y$9SDe?bU-=y!Y=rny=ERfN!exk3tYoGr9n7`@6yc>w;Z+|dNi8A_tUrpBSEw7Sg zJH9ZTJn1F(UYF+9*Z#36K6=g`I=#-ir=Ut z+8B-C#p0m=UMSxG9pm4IjdzfL*Y}=(TOZ%mJ~5fo(lZ8RW@YCL%+1Rml%JEHl|3kL zfGL-l2WDoZ4a~|V8RkKGIr(V=GELd(nb~Qkyj;BTeLzM|IvJFeO)@gg={bY)O#^cW zWToYqbMfNMfmvzkd6^j?vvV^{gYxszb4ccZv}|+Mz`TK(`MIVve4BSrde#7Q&VWJr z*#px^R<1cO)0Cc*kz>wE&m3UN7&I_DKP@+pe4BqpjE(YLCdTejacqHCu*_Bcu>~ z47jNqx1(MI>>3As*j@qL3`~Fx{W8=q11}hx?61AlY&is6I40R21>XtWK02vy0e=yA zMK^9mePsvy>BeEy6u6}r`wzi&AF#t{AC<&dTP!Z%&XL{zg7*V2C`z`sf?ok_0XF<| zq22_%;(D~>cnNR|aM=i8L@o^e5b)W;q&@}T3EbR`J5iIPuzxM|5r8mo0k8!)VIMUQ z+;g9IH=%9?whT@B zTLydvxc%}ZCg3e^AzuZ+IJ{QyMZm5hNqY;x%fJLUkuTI$z%7H5_$=x$aG?eI_{?83 z@EYK=`M?Yeg5L<-3_Pp^@7qJ&V(h;cHv1L0e@U@k!X%0Re8V%(*fLTgn+fUIhu_y~ z!3N6w#NU7J&)C-yn|)@B3A^tu>*u3EqS$lIWKuakw1Fd-{HckQ#EmLo89b%rs@qxBGBa3BBWexrAOPxUPHPZ^SqTtO1C1!v6#=iY`ggi5wWY zNa__WUAnZU8iz-652W;C-lEF!iGD17xLXtb6!hk*l6qJ6;gWiV;FiH|X`lEH;PL976 z?&#Ofr+st#73pS+05R;Q_J6zA{!eF_ExRB_yHPfG&!;yKHxqD54gt48+96H(X7WeW zgt()j8ec=8)Enl+lEuCt$Cid0Yly3{GU$uByIaXc#5Ps(51Gjtj3Xo7sY1aKa7%h| zk;7(^1+JwRH~fg1l!0sS#dU#O0PgF`aKkRj4`6FD#3-K`V+qm*c?t3~ z+`^)^Hz8iw{MgIVP9 zyJvY8X;t+R%~M03!QRtC!8-TENGM=z5G}*)YIM&C1+`hRNP`#4R7-E2*}RSaAM%okW>@3Q)wgTjn7bUzbS+jNsv+BZ?>#5vt8v95#wzS;93QE3&klw? z-i1{SVR-ye%D{x8ly2SFtH)%jrl@B%86ur#hmf3 zN{p$5M!R{E7E3Bs25Up)hZ(bj^|);HYc&&=c(t&x*F>K`rooT$j9FUj8=JEYOXWuR zf-zSl|Gmj*C6SUP@~%qH}T_3#H}TAjf} zzULB8HHJTt2`u?y2yK8^d$!#g8IScxt9REXlI}JJ3G5rX${T++NI43>z3C zLxB<-#1rFQ%LK4 zlV#hmy%jS~mEo?w$F^ZFbuM;$YioV(YOJtY(73j_8IlPU9mQNt>1tjqL5ZWflyfIM?BQ{S z`)WUIyR{lt5E!dFywHu%;C#ABOAzSxYxSbvqL&I%$~U8rZWsF4i)vbpJEe@ik`G_1 z8)I5DrT5R~DV4jVv7Y0AJtOx^wMeLcKRupw`)%k3LpYK8(-C zudxwLR_Y;gjZ4PNmxa z?wIY9rJiw^+Q-P&H`(i*wf=pj2I3`ruZXAG`p=X8{rxNFPXB?`%V%5+7>hwqfcmz5 z^PEfG#3WYG)Ex~*AFgI`m!5|Ec)UW84C=N#e?0hWI6QN_M9<(1-EY!Q zwe@dLyF9s1YX4OKi5hEcqL!2XrusFZAJcFM?pclV$-l0c$d%|EvZAjKCSu$H(XZjN z;qzp)8TZrpxy4J0LtPT^7cSBwQQQhRi#P+V^Rb`~gs-n6@CCy2_YFbNI z(H-;+`WBtf)G$k#gN&CAu*=zX><-)OHqwsAkg!)`yUyOEV55_eLR6D!HC;x3LR;t& zdXmOz4!c@t7siN$nh2%}xrv*~RdWs8UEB%oUB@c*Pb$c=WZPoeM2i^*lfzGP?r;_> z#|*nJyt5P4T4}DlMSf0xQQj#Zk?*yxv}M{o_S5zw4m7r#63?I*Za%k?dx(39JAmg< zc)o#2Ig{DJoMJv>^4M$HaV*bHXJ@i*b}M_BeUm-G{x6%wP2{fQXY!+jae_@4FL;DH zAs|GBM&WMZeqpV!UU*D+Qg}va5ndKrg+oGz@V@YYa7Os6a8bw=hl^uHLHwyWMY>Zu zBE2u2l`cxT@-=d~JWqDZn!H%PNB%(mi+qLkTB~He!@9=0(YnRjV%=lyvYxYM+b*|_ zuoc@%Y*w3UtF}FCQ|#Zj*V&iYSK1%2x7c5_ziU5XkK3m@Tn?|}4#yhD2FEDpIH%;i z%emIM-g(6NrnA%evGbfWP03dZl&h3NWt1{jVHH_XlnP~v;!@@)^Ob6)UI{8OWvQ}R z*{bYRT9t#!=ZZxgrk1OfYFKSlf2=;P{z83GJ)pL!udDAFIkDh58{$_?%jq=QK<}ha z(&y-2dVqG(^E8jSj8T|L%sgfRvxr&F+{e7Y>}C!#?aVPIoy}#3v4w0gJC|L?u4I43 zzQ`VDKVrXNb#@dtj+?|y=az8yaF1}m~OG9jo35ol6}5ivp-~i#J<~p z!2XW?5B33$L5>?74#zEyxsD$?Ry&?@Jmctayzlt4<1@!)&a0dwoH6Gz=Z~FD&JE7} z&Nk;e$lYmYfiheftx!c$>XbFgBg$UofYPO$SF+VXn2)MDOZBVwsH@cN>Mr$7^&OQI z;#nK=l1}sJwR9A+GlkwlH`A?jCvBw%={b6lW-}IMD6->b>X~)S2Ie=IyYDe4m@IZM zdp$dbb+Q%gRCX2n6z0KE_C40j;oI5VNNyT8n+tGJ?&sVV?q#l(dy9LYJINLB!}-xX zhLYMFbeu^kll*B6W`{F8b zt@whtM|@p8CdS1~>1v6QRB4hlU-C+~OFN_v%$iI&Pv&K(e2Y9+zDr&uKPGRJ_sR$5 zw$irM_M+{u?KRtH zHhk#FKGr_oKFJ=j-)Vo?{-}Mw{de}?+fUik9664`jygw^<59;ej>C?#jxQW}&dZ!O zXSs8(GwfXHTtHCDcc5Ab*K_w!Hj&+^;&Oo0kk!7KQLCgD-x7sB(xyTXUU=YmBXDSl6!Bf7=K z;&Sn6ahrHpd`bE$_zFLs(;7v#`_ +tool for installing Python packages. + +* `Installation `_ +* `Documentation `_ +* `Changelog `_ +* `Github Page `_ +* `Issue Tracking `_ +* `User mailing list `_ +* `Dev mailing list `_ +* User IRC: #pypa on Freenode. +* Dev IRC: #pypa-dev on Freenode. + + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.python.org/pypi/pip + +.. image:: https://img.shields.io/travis/pypa/pip/master.svg + :target: http://travis-ci.org/pypa/pip + +.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg + :target: https://ci.appveyor.com/project/pypa/pip/history + +.. image:: https://readthedocs.org/projects/pip/badge/?version=stable + :target: https://pip.pypa.io/en/stable + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/INSTALLER b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/METADATA b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/METADATA new file mode 100644 index 0000000..600a905 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/METADATA @@ -0,0 +1,69 @@ +Metadata-Version: 2.0 +Name: pip +Version: 9.0.1 +Summary: The PyPA recommended tool for installing Python packages. +Home-page: https://pip.pypa.io/ +Author: The pip developers +Author-email: python-virtualenv@groups.google.com +License: MIT +Keywords: easy_install distutils setuptools egg virtualenv +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=2.6,!=3.0.*,!=3.1.*,!=3.2.* +Provides-Extra: testing +Requires-Dist: mock; extra == 'testing' +Requires-Dist: pretend; extra == 'testing' +Requires-Dist: pytest; extra == 'testing' +Requires-Dist: scripttest (>=1.3); extra == 'testing' +Requires-Dist: virtualenv (>=1.10); extra == 'testing' + +pip +=== + +The `PyPA recommended +`_ +tool for installing Python packages. + +* `Installation `_ +* `Documentation `_ +* `Changelog `_ +* `Github Page `_ +* `Issue Tracking `_ +* `User mailing list `_ +* `Dev mailing list `_ +* User IRC: #pypa on Freenode. +* Dev IRC: #pypa-dev on Freenode. + + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.python.org/pypi/pip + +.. image:: https://img.shields.io/travis/pypa/pip/master.svg + :target: http://travis-ci.org/pypa/pip + +.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg + :target: https://ci.appveyor.com/project/pypa/pip/history + +.. image:: https://readthedocs.org/projects/pip/badge/?version=stable + :target: https://pip.pypa.io/en/stable + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/RECORD b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/RECORD new file mode 100644 index 0000000..39f5394 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/RECORD @@ -0,0 +1,501 @@ +pip/__init__.py,sha256=00QWSreEBjb8Y8sPs8HeqgLXSB-3UrONJxo4J5APxEc,11348 +pip/__main__.py,sha256=V6Kh-IEDEFpt1cahRE6MajUF_14qJR_Qsvn4MjWZXzE,584 +pip/basecommand.py,sha256=TTlmZesQ4Vuxcto2KqwZGmgmN5ioHEl_DeFev9ie_SA,11910 +pip/baseparser.py,sha256=AKMOeF3fTrRroiv0DmTQbdiLW0DQux2KqGC_dJJB9d0,10465 +pip/cmdoptions.py,sha256=8JCcF2kKAF2cFnV77oW-3DsHJifr9jF2WuChzzwgcwg,16474 +pip/download.py,sha256=rA0wbmqC2n9ejX481YJSidmKgQqQDjdaxkHkHlAN68k,32171 +pip/exceptions.py,sha256=BvqH-Jw3tP2b-2IJ2kjrQemOAPMqKrQMLRIZHZQpJXk,8121 +pip/index.py,sha256=L6UhtAEZc2qw7BqfQrkPQcw2gCgEw3GukLRSA95BNyI,39950 +pip/locations.py,sha256=9rJRlgonC6QC2zGDIn_7mXaoZ9_tF_IHM2BQhWVRgbo,5626 +pip/pep425tags.py,sha256=q3kec4f6NHszuGYIhGIbVvs896D06uJAnKFgJ_wce44,10980 +pip/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 +pip/wheel.py,sha256=QSWmGs2ui-n4UMWm0JUY6aMCcwNKungVzbWsxI9KlJQ,32010 +pip/_vendor/__init__.py,sha256=WaaSJ3roSSJ_Uv4yKAxlGohKEH9YUA3aIh1Xg2IjfgU,4670 +pip/_vendor/appdirs.py,sha256=-9UOIZy62ahCQVY9-b7Nn6_5_4Y6ooHnv72tM8iHi9Y,22368 +pip/_vendor/distro.py,sha256=A4Douw9pcqdYxDTp5b-OR02fxVXnfWs-wC1wA89rhRk,38349 +pip/_vendor/ipaddress.py,sha256=wimbqcE7rwwETlucn8A_4Qd_-NKXPOBcNxJHarUoXng,80176 +pip/_vendor/ordereddict.py,sha256=4KsFuc6V8IgHROCHUu-4vCrr21ZPPea7Z0cvX9AjQ7w,4094 +pip/_vendor/pyparsing.py,sha256=7vAuUVbh6txUKQR2IzJ8_9DKmD5vtm5MDssWkI0ka8o,224171 +pip/_vendor/re-vendor.py,sha256=PcdZ40d0ohMsdJmA4t0AeAWbPXi1tFsvAwA5KE5FGeY,773 +pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972 +pip/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +pip/_vendor/cachecontrol/__init__.py,sha256=UPyFlz0dIjxusu5ITig9UDFJdSY5LTwijhldn0AfyzU,302 +pip/_vendor/cachecontrol/_cmd.py,sha256=MPxZfZd2LKDzVrs55X3wA1rsI2YuP8evLZSwQj0dIk0,1320 +pip/_vendor/cachecontrol/adapter.py,sha256=RaGYyRA-RA1J0AnE67GzEYFPBu4YH4EQUvQqTKa57iM,4608 +pip/_vendor/cachecontrol/cache.py,sha256=xtl-V-pr9KSt9VvFDRCB9yrHPEvqvbk-5M1vAInZb5k,790 +pip/_vendor/cachecontrol/compat.py,sha256=uyovOpd1ehI3J1XeBqJvcsIp6fvkjBpoQmu_0J2st8c,416 +pip/_vendor/cachecontrol/controller.py,sha256=elDsLcaYA15ncodRmHnWQp6ekU_ocEGtDeGLbsnTjzo,13024 +pip/_vendor/cachecontrol/filewrapper.py,sha256=_K8cStmXqD33m15PfsQ8rlpo6FfXjVbKmjvLXyICRgI,2531 +pip/_vendor/cachecontrol/heuristics.py,sha256=WtJrVsyWjpP9WoUiDVdTZZRNBCz5ZVptaQpYnqofDQU,4141 +pip/_vendor/cachecontrol/serialize.py,sha256=XM6elG9DSNexwaOCgMjUtfrHHW5NAB6TSbIf3x235xs,6536 +pip/_vendor/cachecontrol/wrapper.py,sha256=Kqyu_3TW_54XDudha4-HF21vyEOAJ4ZnRXFysTiLmXA,498 +pip/_vendor/cachecontrol/caches/__init__.py,sha256=uWnUtyMvHY_LULaL_4_IR1F_xPgK5zHfJyRnBq4DnPE,369 +pip/_vendor/cachecontrol/caches/file_cache.py,sha256=FsDug3bwUAQ3okjjfGzxlDaBf2fwVSn1iBKMTL6SyGU,3532 +pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=XywqxkS9MkCaflTOY_wjrE02neKdywB9YwlOBbP7Ywc,973 +pip/_vendor/colorama/__init__.py,sha256=9xByrTvk9upkL5NGV5It2Eje4-kzNLwa_1lGPWpXoNU,240 +pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524 +pip/_vendor/colorama/ansitowin32.py,sha256=gJZB35Lbdjatykd2zrUUnokMzkvcFgscyn_tNxxMFHA,9668 +pip/_vendor/colorama/initialise.py,sha256=cHqVJtb82OG7HUCxvQ2joG7N_CoxbIKbI_fgryZkj20,1917 +pip/_vendor/colorama/win32.py,sha256=_SCEoTK_GA2tU1nhbayKKac-v9Jn98lCPIFOeFMGCHQ,5365 +pip/_vendor/colorama/winterm.py,sha256=V7U7ojwG1q4n6PKripjEvW_htYQi5ueXSM3LUUoqqDY,6290 +pip/_vendor/distlib/__init__.py,sha256=-aUeNNCfiIG_1Tqf19BH0xLNuBKGX1I7lNhcLYgFUEA,581 +pip/_vendor/distlib/compat.py,sha256=FzKlP9dNUMH-j_1LCVnjgx6KgUbpnRjTjYkTkDYRPlI,40801 +pip/_vendor/distlib/database.py,sha256=jniJmYk0Mj2t6gZYbnn68TvQwnVZ0kXyeuf_3AxFclk,49672 +pip/_vendor/distlib/index.py,sha256=Cw8gxFq_7xXvdgExL3efjLAY3EAPDMSL3VA42RkbQBs,21085 +pip/_vendor/distlib/locators.py,sha256=hD_Hm3aSL9DklY9Cxyct2n_74gZ0xNFFGB5L7M6ds14,51013 +pip/_vendor/distlib/manifest.py,sha256=3qEuZhHlDbvyYZ1BZbdapDAivgMgUwWpZ00cmXqcn18,14810 +pip/_vendor/distlib/markers.py,sha256=iRrVWwpyVwjkKJSX8NEQ92_MRMwpROcfNGKCD-Ch1QM,6282 +pip/_vendor/distlib/metadata.py,sha256=hUsf7Qh2Ae4CCkL33qK8ppwC8ZTzT7ep6Hj9RKpijKU,38833 +pip/_vendor/distlib/resources.py,sha256=VFBVbFqLVqDBSQDXcFQHrX1KEcuoDxTK699Ydi_beyc,10766 +pip/_vendor/distlib/scripts.py,sha256=xpehNfISGPTNxQZu02K9Rw2QbNx_2Q4emePv3W5X0iw,15224 +pip/_vendor/distlib/t32.exe,sha256=cp0UAUDDr1tGAx8adlKxWbCHIa-oB3bxev5zYzgAr8E,89088 +pip/_vendor/distlib/t64.exe,sha256=FiljDPcX9qvoe9FYE_9pNEHqbqMnhcCOuI_oLJ4F9F8,97792 +pip/_vendor/distlib/util.py,sha256=E2wU-RZShPMFUMJr9kPmemTULinM4qDzosNPihCuKE0,52991 +pip/_vendor/distlib/version.py,sha256=CgghOUylxGD7dEA2S3MvWjx7mY_2bWsluF0Of3Yxl4Y,23711 +pip/_vendor/distlib/w32.exe,sha256=LItrBJesEqt2QTQuB-yha2YbMegURHmHmdSxhjBqmnc,85504 +pip/_vendor/distlib/w64.exe,sha256=n_PioBC7ltz7sAk1WLbLzZJgS4R2axSy_0HPf8ZCsEg,94208 +pip/_vendor/distlib/wheel.py,sha256=UP53cKxOM5r7bHSS-n5prF6hwJEVsMW9ZNJutOuC26c,39115 +pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 +pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 +pip/_vendor/distlib/_backport/shutil.py,sha256=VW1t3uYqUjWZH7jV-6QiimLhnldoV5uIpH4EuiT1jfw,25647 +pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 +pip/_vendor/distlib/_backport/sysconfig.py,sha256=eSEyJg7jxF_eHlHG8IOtl93kb07UoMIRp1wYsPeGi9k,26955 +pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 +pip/_vendor/html5lib/__init__.py,sha256=JsIwmFldk-9raBadPSTS74JrfmJvozc-3aekMi7Hr9s,780 +pip/_vendor/html5lib/_ihatexml.py,sha256=tzXygYmisUmiEUt2v7E1Ab50AKQsrD-SglPRnY75vME,16705 +pip/_vendor/html5lib/_inputstream.py,sha256=C4lX5gUBwebOWy41hYP2ZBpkPVNvxk_hZBm3OVyPZM4,32532 +pip/_vendor/html5lib/_tokenizer.py,sha256=YAaOEBD6qc5ISq9Xt9Nif1OFgcybTTfMdwqBkZhpAq4,76580 +pip/_vendor/html5lib/_utils.py,sha256=bS6THVlL8ZyTcI6CIxiM6xxuHsE8i1j5Ogd3Ha1G84U,4096 +pip/_vendor/html5lib/constants.py,sha256=Dfc1Fv3_9frktgWjg4tbj-CjMMp02Ko9qMe4il1BVdo,83387 +pip/_vendor/html5lib/html5parser.py,sha256=Dmlu9hlq5w_id6mBZyY_sE5LukIACgvG4kpgIsded8Q,117170 +pip/_vendor/html5lib/serializer.py,sha256=Urrsa0cPPLqNX-UbJWS2gUhs_06qVbNxZvUnrmGZK6E,14177 +pip/_vendor/html5lib/_trie/__init__.py,sha256=8VR1bcgD2OpeS2XExpu5yBhP_Q1K-lwKbBKICBPf1kU,289 +pip/_vendor/html5lib/_trie/_base.py,sha256=6P_AcIoGjtwB2qAlhV8H4VP-ztQxoXFGwt4NyMqG_Kw,979 +pip/_vendor/html5lib/_trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178 +pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 +pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=DXv-P2vdQ5F3OTWM6QZ6KhyDlAWm90pbfrD1Bk9D_l0,621 +pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 +pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=2Q_JnMscn_tNbV_qpgYN_5M3PnBGfmuvECMKDExHUcY,2742 +pip/_vendor/html5lib/filters/lint.py,sha256=qf5cLrT6xXd8V7GH1R_3lKxIjuJSfpbWTpSwaglYdDw,3365 +pip/_vendor/html5lib/filters/optionaltags.py,sha256=EHig4kM-QiLjuxVJ3FAAFNy-10k4aV6HJbQzHKZ_3u8,10534 +pip/_vendor/html5lib/filters/sanitizer.py,sha256=7PqJrhm6mo3JvaHk2IQW7i74Or7Qtd-FV8UftJIyDys,25112 +pip/_vendor/html5lib/filters/whitespace.py,sha256=KPt067nYTqqi8KLTClyynn4eVzNDC_-MApXNVHRXVX0,1139 +pip/_vendor/html5lib/treeadapters/__init__.py,sha256=l3LcqMSEyoh99Jh_eWjGexHnIvKhLAXoP-LDz88whuM,208 +pip/_vendor/html5lib/treeadapters/genshi.py,sha256=6VIuHDNoExv1JWv3ePj6V5CM-tcyiUSWe5_Hd2ejbwY,1555 +pip/_vendor/html5lib/treeadapters/sax.py,sha256=3of4vvaUYIAic7pngebwJV24hpOS7Zg9ggJa_WQegy4,1661 +pip/_vendor/html5lib/treebuilders/__init__.py,sha256=UlB4orkTgZhFIKQdXrtiWn9cpKSsuhnOQOIHeD0Fv4k,3406 +pip/_vendor/html5lib/treebuilders/base.py,sha256=4vdjm_Z2f_GTQBwKnWlrzVcctTb-K5sfN8pXDaWODiA,13942 +pip/_vendor/html5lib/treebuilders/dom.py,sha256=SY3MsijXyzdNPc8aK5IQsupBoM8J67y56DgNtGvsb9g,8835 +pip/_vendor/html5lib/treebuilders/etree.py,sha256=aqIBOGj_dFYqBURIcTegGNBhAIJOw5iFDHb4jrkYH-8,12764 +pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=CEgwHMIQZvIDFAqct4kqPkVtyKIm9efHFq_VeExEPCA,14161 +pip/_vendor/html5lib/treewalkers/__init__.py,sha256=CFpUOCfLuhAgVJ8NYk9wviCu1khYnv7XRStvyzU1Fws,5544 +pip/_vendor/html5lib/treewalkers/base.py,sha256=ei-2cFbNFd0gRjyaFmxnxZGLNID4o0bHFCH9bMyZ5Bk,4939 +pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 +pip/_vendor/html5lib/treewalkers/etree.py,sha256=8jVLEY2FjgN4RFugwhAh44l9ScVYoDStQFCnlPwvafI,4684 +pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=sY6wfRshWTllu6n48TPWpKsQRPp-0CQrT0hj_AdzHSU,6309 +pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 +pip/_vendor/lockfile/__init__.py,sha256=Tqpz90DwKYfhPsfzVOJl84TL87pdFE5ePNHdXAxs4Tk,9371 +pip/_vendor/lockfile/linklockfile.py,sha256=C7OH3H4GdK68u4FQgp8fkP2kO4fyUTSyj3X6blgfobc,2652 +pip/_vendor/lockfile/mkdirlockfile.py,sha256=e3qgIL-etZMLsS-3ft19iW_8IQ360HNkGOqE3yBKsUw,3096 +pip/_vendor/lockfile/pidlockfile.py,sha256=ukH9uk6NFuxyVmG5QiWw4iKq3fT7MjqUguX95avYPIY,6090 +pip/_vendor/lockfile/sqlitelockfile.py,sha256=o2TMkMRY0iwn-iL1XMRRIFStMUkS4i3ajceeYNntKFg,5506 +pip/_vendor/lockfile/symlinklockfile.py,sha256=ABwXXmvTHvCl5viPblShL3PG-gGsLiT1roAMfDRwhi8,2616 +pip/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720 +pip/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 +pip/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 +pip/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 +pip/_vendor/packaging/markers.py,sha256=mtg2nphJE1oQO39g1DgsdPsMO-guBBClpR-AEYFrbMg,8230 +pip/_vendor/packaging/requirements.py,sha256=SD7dVJGjdPUqtoHb47qwK6wWJTQd-ZXWjxpJg83UcBA,4327 +pip/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 +pip/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 +pip/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 +pip/_vendor/pkg_resources/__init__.py,sha256=CcwuHtCBZn9OTkmgF9cFpadIAMhlrnZTVKTOo4V2p58,103230 +pip/_vendor/progress/__init__.py,sha256=Wn1074LUDZovd4zfoVYojnPBgOc6ctHbQX7rp_p8lRA,3023 +pip/_vendor/progress/bar.py,sha256=YNPJeRrwYVKFO2nyaEwsQjYByamMWTgJMvQO1NpD-AY,2685 +pip/_vendor/progress/counter.py,sha256=kEqA8jWEdwrc6P_9VaRx7bjOHwk9gxl-Q9oVbQ08v5c,1502 +pip/_vendor/progress/helpers.py,sha256=FehfwZTv-5cCfsbcMlvlUkm3xZ0cRhsev6XVpmeTF4c,2854 +pip/_vendor/progress/spinner.py,sha256=iCVtUQbaJUFHTjn1ZLPQLPYeao4lC9aXAa_HxIeUK6k,1314 +pip/_vendor/requests/__init__.py,sha256=Cde-qxOWcslaEcPvKAJQPFbY8_va8PMbU7Rssr7vViI,2326 +pip/_vendor/requests/adapters.py,sha256=DJdgax91PyS2s6_oZPELbuLWNlM2xGguNu62sqcOUik,19740 +pip/_vendor/requests/api.py,sha256=PgminOpD8hLLKLNs0RWLKr1HpNc4Qxr_6uen8q2c9CI,5794 +pip/_vendor/requests/auth.py,sha256=eBLtJlcTZxRG7xKXCvGQBLO9a-PxFgMf2qTUbtZwMJM,8175 +pip/_vendor/requests/cacert.pem,sha256=5xzWFRrSP0ZsXiW6emg8UQ_w497lT4qWCv32OO8R1ME,344712 +pip/_vendor/requests/certs.py,sha256=Aa-oStu9f2lVi8VM9Aw1xaAtTIz7bhu5CGKNPEW1waM,625 +pip/_vendor/requests/compat.py,sha256=0cgWB43LEX5OrX1O4k-bPbFlIbWXgEd412DSDJtF1Y8,1687 +pip/_vendor/requests/cookies.py,sha256=awMI0hm3SKheMEDTqO8AIadc2XmnCGKPCTNw_4hlM3Q,18208 +pip/_vendor/requests/exceptions.py,sha256=x-MGvDASYKSstuCNYTA5IT_EAcxTp5knE3WPMrgkrlI,2860 +pip/_vendor/requests/hooks.py,sha256=HXAHoC1FNTFRZX6-lNdvPM7Tst4kvGwYTN-AOKRxoRU,767 +pip/_vendor/requests/models.py,sha256=YHuL2khGDFxeWc-NMJIcfFqvYJ0dKs1mXfj1Fuff1J8,30532 +pip/_vendor/requests/sessions.py,sha256=H7HpKRLKeu1MSH5W1-PI2GMCFLN4bz5i3OFqjjgzE5k,25609 +pip/_vendor/requests/status_codes.py,sha256=uwVHcMPkHV3FElDLlnDTH3KULZIAGxaovbBxrjWm8N0,3316 +pip/_vendor/requests/structures.py,sha256=yexCvWbX40M6E8mLQOpAGZZ-ZoAnyaT2dni-Bp-b42g,3012 +pip/_vendor/requests/utils.py,sha256=9d3jqnA8avsF9N1QPmsk2pJgo2pxuExrN2hoIhtLggY,24163 +pip/_vendor/requests/packages/__init__.py,sha256=CVheqNRcXIkAi5037RhxeqbAqd0QhrK1o9R9kS2xvuI,1384 +pip/_vendor/requests/packages/chardet/__init__.py,sha256=XuTKCYOR7JwsoHxqZTYH86LVyMDbDI3s1s0W_qoGEBM,1295 +pip/_vendor/requests/packages/chardet/big5freq.py,sha256=D8oTdz-GM7Jg8TsaWJDm65vM_OLHC3xub6qUJ3rOgsQ,82594 +pip/_vendor/requests/packages/chardet/big5prober.py,sha256=XX96C--6WKYW36mL-z7pJSAtc169Z8ZImByCP4pEN9A,1684 +pip/_vendor/requests/packages/chardet/chardetect.py,sha256=f4299UZG6uWd3i3r_N0OdrFj2sA9JFI54PAmDLAFmWA,2504 +pip/_vendor/requests/packages/chardet/chardistribution.py,sha256=cUARQFr1oTLXeJCDQrDRkUP778AvSMzhSCnG8VLCV58,9226 +pip/_vendor/requests/packages/chardet/charsetgroupprober.py,sha256=0lKk7VE516fgMw119tNefFqLOxKfIE9WfdkpIT69OKU,3791 +pip/_vendor/requests/packages/chardet/charsetprober.py,sha256=Z48o2KiOj23FNqYH8FqzhH5m1qdm3rI8DcTm2Yqtklg,1902 +pip/_vendor/requests/packages/chardet/codingstatemachine.py,sha256=E85rYhHVMw9xDEJVgiQhp0OnLGr6i2r8_7QOWMKTH08,2318 +pip/_vendor/requests/packages/chardet/compat.py,sha256=5mm6yrHwef1JEG5OxkPJlSq5lkjLVpEGh3iPgFBkpkM,1157 +pip/_vendor/requests/packages/chardet/constants.py,sha256=-UnY8U7EP7z9fTyd09yq35BEkSFEAUAiv9ohd1DW1s4,1335 +pip/_vendor/requests/packages/chardet/cp949prober.py,sha256=FMvdLyB7fejPXRsTbca7LK1P3RUvvssmjUNyaEfz8zY,1782 +pip/_vendor/requests/packages/chardet/escprober.py,sha256=q5TcQKeVq31WxrW7Sv8yjpZkjEoaHO8S92EJZ9hodys,3187 +pip/_vendor/requests/packages/chardet/escsm.py,sha256=7iljEKN8lXTh8JFXPUSwlibMno6R6ksq4evLxbkzfro,7839 +pip/_vendor/requests/packages/chardet/eucjpprober.py,sha256=5IpfSEjAb7h3hcGMd6dkU80O900C2N6xku28rdYFKuc,3678 +pip/_vendor/requests/packages/chardet/euckrfreq.py,sha256=T5saK5mImySG5ygQPtsp6o2uKulouCwYm2ElOyFkJqU,45978 +pip/_vendor/requests/packages/chardet/euckrprober.py,sha256=Wo7dnZ5Erw_nB4H-m5alMiOxOuJUmGHlwCSaGqExDZA,1675 +pip/_vendor/requests/packages/chardet/euctwfreq.py,sha256=G_I0BW9i1w0ONeeUwIYqV7_U09buIHdqh-wNHVaql7I,34872 +pip/_vendor/requests/packages/chardet/euctwprober.py,sha256=upS2P6GuT5ujOxXYw-RJLcT7A4PTuo27KGUKU4UZpIQ,1676 +pip/_vendor/requests/packages/chardet/gb2312freq.py,sha256=M2gFdo_qQ_BslStEchrPW5CrPEZEacC0uyDLw4ok-kY,36011 +pip/_vendor/requests/packages/chardet/gb2312prober.py,sha256=VWnjoRa83Y6V6oczMaxyUr0uy48iCnC2nzk9zfEIRHc,1681 +pip/_vendor/requests/packages/chardet/hebrewprober.py,sha256=8pdoUfsVXf_L4BnJde_BewS6H2yInV5688eu0nFhLHY,13359 +pip/_vendor/requests/packages/chardet/jisfreq.py,sha256=ZcL4R5ekHHbP2KCYGakVMBsiKqZZZAABzhwi-uRkOps,47315 +pip/_vendor/requests/packages/chardet/jpcntx.py,sha256=yftmp0QaF6RJO5SJs8I7LU5AF4rwP23ebeCQL4BM1OY,19348 +pip/_vendor/requests/packages/chardet/langbulgarianmodel.py,sha256=ZyPsA796MSVhYdfWhMCgKWckupAKAnKqWcE3Cl3ej6o,12784 +pip/_vendor/requests/packages/chardet/langcyrillicmodel.py,sha256=fkcd5OvogUp-GrNDWAZPgkYsSRCD2omotAEvqjlmLKE,17725 +pip/_vendor/requests/packages/chardet/langgreekmodel.py,sha256=QHMy31CH_ot67UCtmurCEKqKx2WwoaKrw2YCYYBK2Lw,12628 +pip/_vendor/requests/packages/chardet/langhebrewmodel.py,sha256=4ASl5vzKJPng4H278VHKtRYC03TpQpenlHTcsmZH1rE,11318 +pip/_vendor/requests/packages/chardet/langhungarianmodel.py,sha256=SXwuUzh49_cBeMXhshRHdrhlkz0T8_pZWV_pdqBKNFk,12536 +pip/_vendor/requests/packages/chardet/langthaimodel.py,sha256=-k7djh3dGKngAGnt3WfuoJN7acDcWcmHAPojhaUd7q4,11275 +pip/_vendor/requests/packages/chardet/latin1prober.py,sha256=238JHOxH8aRudJY2NmeSv5s7i0Qe3GuklIU3HlYybvg,5232 +pip/_vendor/requests/packages/chardet/mbcharsetprober.py,sha256=9rOCjDVsmSMp6e7q2syqak22j7lrbUZhJhMee2gbVL0,3268 +pip/_vendor/requests/packages/chardet/mbcsgroupprober.py,sha256=SHRzNPLpDXfMJLA8phCHVU0WgqbgDCNxDQMolGX_7yk,1967 +pip/_vendor/requests/packages/chardet/mbcssm.py,sha256=IKwJXyxu34n6NojmxVxC60MLFtJKm-hIfxaFEnb3uBA,19590 +pip/_vendor/requests/packages/chardet/sbcharsetprober.py,sha256=Xq0lODqJnDgxglBiQI4BqTFiPbn63-0a5XNA5-hVu7U,4793 +pip/_vendor/requests/packages/chardet/sbcsgroupprober.py,sha256=8hLyH8RAG-aohBo7o_KciWVgRo42ZE_zEtuNG1JMRYI,3291 +pip/_vendor/requests/packages/chardet/sjisprober.py,sha256=UYOmiMDzttYIkSDoOB08UEagivJpUXz4tuWiWzTiOr8,3764 +pip/_vendor/requests/packages/chardet/universaldetector.py,sha256=h-E2x6XSCzlNjycYWG0Fe4Cf1SGdaIzUNu2HCphpMZA,6840 +pip/_vendor/requests/packages/chardet/utf8prober.py,sha256=7tdNZGrJY7jZUBD483GGMkiP0Tx8Fp-cGvWHoAsilHg,2652 +pip/_vendor/requests/packages/urllib3/__init__.py,sha256=EF9pbHgMzqQek2Y6EZ82A8B6wETFeW7bK0K-HoZ3Ffo,2852 +pip/_vendor/requests/packages/urllib3/_collections.py,sha256=RP-cHyTx4AgYwvoETK8q1IVRbWFJnE0VV692ZHSbU68,10553 +pip/_vendor/requests/packages/urllib3/connection.py,sha256=QCmkelYgtbc06DfJtgs22na78kRTLCTbLb-OSWLbt-A,11617 +pip/_vendor/requests/packages/urllib3/connectionpool.py,sha256=fls19n1Y4jnwOBsZz_9F01i08xH2gZXEIyyDmWd-mKU,33591 +pip/_vendor/requests/packages/urllib3/exceptions.py,sha256=zGjhZCR1wefEnCN5b7WouQ3UhXesJ2bRKYIeWusaFJs,5599 +pip/_vendor/requests/packages/urllib3/fields.py,sha256=WUMvCLvnw7XemBq6AmCgNPJwyIJL_vWaMHaA2FLlscM,5931 +pip/_vendor/requests/packages/urllib3/filepost.py,sha256=NvLlFsdt8ih_Q4S2ekQF3CJG0nOXs32YI-G04_AdT2g,2320 +pip/_vendor/requests/packages/urllib3/poolmanager.py,sha256=9Uf0fUk0aR_s1auXgwceoN2gbaIQ08lrum_cGEA9-_U,13092 +pip/_vendor/requests/packages/urllib3/request.py,sha256=jET7OvA3FSjxABBRGhCyMdPvM9XuJA6df9gRhkJiJiY,5988 +pip/_vendor/requests/packages/urllib3/response.py,sha256=wxJSV_6pyh6Cgx7XFVGpNhpZCbh4eL7lCSFaU4ixXXc,18615 +pip/_vendor/requests/packages/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/requests/packages/urllib3/contrib/appengine.py,sha256=NdN_xOgDLMadUPe_dN3wdan_DH9-fxVNqFgq19tbqQs,7937 +pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.py,sha256=r-vMDMXAGbix9a7-IhbKVTATmAst-5g4hKYOLf8Kd5M,4531 +pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py,sha256=JsdAh0gL4XvQzhOEBRoFtJN91qLf1LFIDEFZs95445I,11778 +pip/_vendor/requests/packages/urllib3/contrib/socks.py,sha256=uPHtE6R8uyUbD9R8l2wO80c87WDGZ9rou3kNOwV74eA,5668 +pip/_vendor/requests/packages/urllib3/packages/__init__.py,sha256=nlChrGzkjCkmhCX9HrF_qHPUgosfsPQkVIJxiiLhk9g,109 +pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py,sha256=VQaPONfhVMsb8B63Xg7ZOydJqIE_jzeMhVN3Pec6ogw,8935 +pip/_vendor/requests/packages/urllib3/packages/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py,sha256=cOWMIn1orgJoA35p6pSzO_-Dc6iOX9Dhl6D2sL9b_2o,460 +pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=fK28k37hL7-D79v9iM2fHgNK9Q1Pw0M7qVRL4rkfFjQ,3778 +pip/_vendor/requests/packages/urllib3/util/__init__.py,sha256=n2QE9_0Bb6u8tf7LUc4qKe8V-Hz9G8lEOc9j_30Q8d0,892 +pip/_vendor/requests/packages/urllib3/util/connection.py,sha256=7B5Mmepg5Xd399VKE__VHxD2ObapYFrB3mWJ_EnIebs,4744 +pip/_vendor/requests/packages/urllib3/util/request.py,sha256=ZMDewRK-mjlK72szGIIjzYnLIn-zPP0WgJUMjKeZ6Tg,2128 +pip/_vendor/requests/packages/urllib3/util/response.py,sha256=1UFd5TIp9MyBp4xgnZoyQZscZVPPr0tWRaXNR5w_vds,2165 +pip/_vendor/requests/packages/urllib3/util/retry.py,sha256=5eA3GHR_L14qz66NU6gr-v5VbKYsvdEqOvCcsx1oLKo,10664 +pip/_vendor/requests/packages/urllib3/util/ssl_.py,sha256=7xR_jvQLTQA1U006wJ1bl2KuLGnD1qQvUcFM2uysedw,11622 +pip/_vendor/requests/packages/urllib3/util/timeout.py,sha256=ioAIYptFyBG7eU_r8_ZmO45hpj1dJE6WCvrGR9dNFjs,9596 +pip/_vendor/requests/packages/urllib3/util/url.py,sha256=EcX4ZfmgKWcqM4sY9FlC-yN4y_snuURPV0TpUPHNjnc,5879 +pip/_vendor/webencodings/__init__.py,sha256=t7rAQQxXwalY-ak9hTl73qHjhia9UH-sL-e00qQrBpo,10576 +pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 +pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 +pip/_vendor/webencodings/tests.py,sha256=7vTk7LgOJn_t1XtT_viofZlEJ7cJCzPe_hvVHOkcQl8,6562 +pip/_vendor/webencodings/x_user_defined.py,sha256=72cfPRhbfkRCGkkA8ZnvVV7UnoiLb5uPMhXwhrXiLPk,4306 +pip/commands/__init__.py,sha256=2Uq3HCdjchJD9FL1LB7rd5v6UySVAVizX0W3EX3hIoE,2244 +pip/commands/check.py,sha256=-A7GI1-WZBh9a4P6UoH_aR-J7I8Lz8ly7m3wnCjmevs,1382 +pip/commands/completion.py,sha256=kkPgVX7SUcJ_8Juw5GkgWaxHN9_45wmAr9mGs1zXEEs,2453 +pip/commands/download.py,sha256=8RuuPmSYgAq3iEDTqZY_1PDXRqREdUULHNjWJeAv7Mo,7810 +pip/commands/freeze.py,sha256=h6-yFMpjCjbNj8-gOm5UuoF6cg14N5rPV4TCi3_CeuI,2835 +pip/commands/hash.py,sha256=MCt4jEFyfoce0lVeNEz1x49uaTY-VDkKiBvvxrVcHkw,1597 +pip/commands/help.py,sha256=84HWkEdnGP_AEBHnn8gJP2Te0XTXRKFoXqXopbOZTNo,982 +pip/commands/install.py,sha256=ovG9p9n1X2NPqMgFVtSuT9kMbLAdx1r3YSSiXSvgOKI,17412 +pip/commands/list.py,sha256=93bCiFyt2Qut_YHkYHJMZHpXladmxsjS-yOtZeb3uqI,11369 +pip/commands/search.py,sha256=oTs9QNdefnrmCV_JeftG0PGiMuYVmiEDF1OUaYsmDao,4502 +pip/commands/show.py,sha256=ZYM57_7U8KP9MQIIyHKQdZxmiEZByy-DRzB697VFoTY,5891 +pip/commands/uninstall.py,sha256=tz8cXz4WdpUdnt3RvpdQwH6_SNMB50egBIZWa1dwfcc,2884 +pip/commands/wheel.py,sha256=z5SEhws2YRMb0Ml1IEkg6jFZMLRpLl86bHCrQbYt5zo,7729 +pip/compat/__init__.py,sha256=2Xs_IpsmdRgHbQgQO0c8_lPvHJnQXHyGWxPbLbYJL4c,4672 +pip/compat/dictconfig.py,sha256=dRrelPDWrceDSzFT51RTEVY2GuM7UDyc5Igh_tn4Fvk,23096 +pip/models/__init__.py,sha256=0Rs7_RA4DxeOkWT5Cq4CQzDrSEhvYcN3TH2cazr72PE,71 +pip/models/index.py,sha256=pUfbO__v3mD9j-2n_ClwPS8pVyx4l2wIwyvWt8GMCRA,487 +pip/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/operations/check.py,sha256=uwUN9cs1sPo7c0Sj6pRrSv7b22Pk29SXUImTelVchMQ,1590 +pip/operations/freeze.py,sha256=k-7w7LsM-RpPv7ERBzHiPpYkH-GuYfHLyR-Cp_1VPL0,5194 +pip/req/__init__.py,sha256=vFwZY8_Vc1WU1zFAespg1My_r_AT3n7cN0W9eX0EFqk,276 +pip/req/req_file.py,sha256=fG9MDsXUNPhmGwxUiwrIXEynyD8Q7s3L47-hLZPDXq0,11926 +pip/req/req_install.py,sha256=gYrH-lwQMmt55VVbav_EtRIPu94cQbHFHm_Kq6AeHbg,46487 +pip/req/req_set.py,sha256=jHspXqcA2FxcF05dgUIAZ5huYPv6bn0wRUX0Z7PKmaA,34462 +pip/req/req_uninstall.py,sha256=fdH2VgCjEC8NRYDS7fRu3ZJaBBUEy-N5muwxDX5MBNM,6897 +pip/utils/__init__.py,sha256=HX_wYS15oiYOz-H3qG1Kbi1CY7AGWCNK5jloiD0fauc,27187 +pip/utils/appdirs.py,sha256=kj2LK-I2fC5QnEh_A_v-ev_IQMcXaWWF5DE39sNvCLQ,8811 +pip/utils/build.py,sha256=4smLRrfSCmXmjEnVnMFh2tBEpNcSLRe6J0ejZJ-wWJE,1312 +pip/utils/deprecation.py,sha256=X_FMjtDbMJqfqEkdRrki-mYyIdPB6I6DHUTCA_ChY6M,2232 +pip/utils/encoding.py,sha256=NQxGiFS5GbeAveLZTnx92t5r0PYqvt0iRnP2u9SGG1w,971 +pip/utils/filesystem.py,sha256=ZEVBuYM3fqr2_lgOESh4Y7fPFszGD474zVm_M3Mb5Tk,899 +pip/utils/glibc.py,sha256=jcQYjt_oJLPKVZB28Kauy4Sw70zS-wawxoU1HHX36_0,2939 +pip/utils/hashes.py,sha256=oMk7cd3PbJgzpSQyXq1MytMud5f6H5Oa2YY5hYuCq6I,2866 +pip/utils/logging.py,sha256=7yWu4gZw-Qclj7X80QVdpGWkdTWGKT4LiUVKcE04pro,3327 +pip/utils/outdated.py,sha256=fNwOCL5r2EftPGhgCYGMKu032HC8cV-JAr9lp0HmToM,5455 +pip/utils/packaging.py,sha256=qhmli14odw6DIhWJgQYS2Q0RrSbr8nXNcG48f5yTRms,2080 +pip/utils/setuptools_build.py,sha256=0blfscmNJW_iZ5DcswJeDB_PbtTEjfK9RL1R1WEDW2E,278 +pip/utils/ui.py,sha256=pbDkSAeumZ6jdZcOJ2yAbx8iBgeP2zfpqNnLJK1gskQ,11597 +pip/vcs/__init__.py,sha256=WafFliUTHMmsSISV8PHp1M5EXDNSWyJr78zKaQmPLdY,12374 +pip/vcs/bazaar.py,sha256=tYTwc4b4off8mr0O2o8SiGejqBDJxcbDBMSMd9-ISYc,3803 +pip/vcs/git.py,sha256=5LfWryi78A-2ULjEZJvCTarJ_3l8venwXASlwm8hiug,11197 +pip/vcs/mercurial.py,sha256=xG6rDiwHCRytJEs23SIHBXl_SwQo2jkkdD_6rVVP5h4,3472 +pip/vcs/subversion.py,sha256=GAuX2Sk7IZvJyEzENKcVld_wGBrQ3fpXDlXjapZEYdI,9350 +pip-9.0.1.dist-info/DESCRIPTION.rst,sha256=Va8Wj1XBpTbVQ2Z41mZRJdALEeziiS_ZewWn1H2ecY4,1287 +pip-9.0.1.dist-info/METADATA,sha256=mvs_tLoKAbECXY_6QHiVWQsagSL-1UjolQTpScT8JSk,2529 +pip-9.0.1.dist-info/RECORD,, +pip-9.0.1.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +pip-9.0.1.dist-info/entry_points.txt,sha256=GWc-Wb9WUKZ1EuVWNz-G0l3BeIpbNJLx0OJbZ61AAV0,68 +pip-9.0.1.dist-info/metadata.json,sha256=aqvkETDy4mHUBob-2Fn5WWlXORi_M2OSfQ2HQCUU_Fk,1565 +pip-9.0.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +../../bin/pip.exe,sha256=oanYjnY6N2bknAYSJm275cBHEFlxZYqqvsTK-KCPBDw,89466 +../../bin/pip3.exe,sha256=oanYjnY6N2bknAYSJm275cBHEFlxZYqqvsTK-KCPBDw,89466 +../../bin/pip3.6.exe,sha256=oanYjnY6N2bknAYSJm275cBHEFlxZYqqvsTK-KCPBDw,89466 +pip-9.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/commands/__pycache__/check.cpython-36.pyc,, +pip/commands/__pycache__/completion.cpython-36.pyc,, +pip/commands/__pycache__/download.cpython-36.pyc,, +pip/commands/__pycache__/freeze.cpython-36.pyc,, +pip/commands/__pycache__/hash.cpython-36.pyc,, +pip/commands/__pycache__/help.cpython-36.pyc,, +pip/commands/__pycache__/install.cpython-36.pyc,, +pip/commands/__pycache__/list.cpython-36.pyc,, +pip/commands/__pycache__/search.cpython-36.pyc,, +pip/commands/__pycache__/show.cpython-36.pyc,, +pip/commands/__pycache__/uninstall.cpython-36.pyc,, +pip/commands/__pycache__/wheel.cpython-36.pyc,, +pip/commands/__pycache__/__init__.cpython-36.pyc,, +pip/compat/__pycache__/dictconfig.cpython-36.pyc,, +pip/compat/__pycache__/__init__.cpython-36.pyc,, +pip/models/__pycache__/index.cpython-36.pyc,, +pip/models/__pycache__/__init__.cpython-36.pyc,, +pip/operations/__pycache__/check.cpython-36.pyc,, +pip/operations/__pycache__/freeze.cpython-36.pyc,, +pip/operations/__pycache__/__init__.cpython-36.pyc,, +pip/req/__pycache__/req_file.cpython-36.pyc,, +pip/req/__pycache__/req_install.cpython-36.pyc,, +pip/req/__pycache__/req_set.cpython-36.pyc,, +pip/req/__pycache__/req_uninstall.cpython-36.pyc,, +pip/req/__pycache__/__init__.cpython-36.pyc,, +pip/utils/__pycache__/appdirs.cpython-36.pyc,, +pip/utils/__pycache__/build.cpython-36.pyc,, +pip/utils/__pycache__/deprecation.cpython-36.pyc,, +pip/utils/__pycache__/encoding.cpython-36.pyc,, +pip/utils/__pycache__/filesystem.cpython-36.pyc,, +pip/utils/__pycache__/glibc.cpython-36.pyc,, +pip/utils/__pycache__/hashes.cpython-36.pyc,, +pip/utils/__pycache__/logging.cpython-36.pyc,, +pip/utils/__pycache__/outdated.cpython-36.pyc,, +pip/utils/__pycache__/packaging.cpython-36.pyc,, +pip/utils/__pycache__/setuptools_build.cpython-36.pyc,, +pip/utils/__pycache__/ui.cpython-36.pyc,, +pip/utils/__pycache__/__init__.cpython-36.pyc,, +pip/vcs/__pycache__/bazaar.cpython-36.pyc,, +pip/vcs/__pycache__/git.cpython-36.pyc,, +pip/vcs/__pycache__/mercurial.cpython-36.pyc,, +pip/vcs/__pycache__/subversion.cpython-36.pyc,, +pip/vcs/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/adapter.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/cache.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/controller.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/serialize.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/ansi.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/ansitowin32.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/initialise.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/win32.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/winterm.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/misc.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/database.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/index.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/locators.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/manifest.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/markers.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/metadata.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/resources.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/scripts.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/util.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/version.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/wheel.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/base.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/lint.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-36.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-36.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/datrie.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/py.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/constants.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/html5parser.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/serializer.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_inputstream.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_utils.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/linklockfile.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/mkdirlockfile.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/pidlockfile.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/sqlitelockfile.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/symlinklockfile.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/markers.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/requirements.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/utils.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/version.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/_compat.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/_structures.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/__about__.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/pkg_resources/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/bar.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/counter.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/helpers.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/spinner.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/big5freq.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/big5prober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/chardetect.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/chardistribution.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/charsetgroupprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/charsetprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/codingstatemachine.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/constants.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/cp949prober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/escprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/escsm.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/eucjpprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/euckrfreq.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/euckrprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/euctwfreq.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/euctwprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/gb2312freq.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/gb2312prober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/hebrewprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/jisfreq.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/jpcntx.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langcyrillicmodel.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langgreekmodel.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langhebrewmodel.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langhungarianmodel.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langthaimodel.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/latin1prober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/mbcharsetprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/mbcssm.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/sbcharsetprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/sjisprober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/universaldetector.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/utf8prober.cpython-36.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/contrib/__pycache__/appengine.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/contrib/__pycache__/ntlmpool.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/contrib/__pycache__/pyopenssl.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/contrib/__pycache__/socks.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/contrib/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/packages/__pycache__/ordered_dict.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/packages/__pycache__/six.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/packages/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/connection.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/request.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/response.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/retry.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/ssl_.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/timeout.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/url.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/connection.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/connectionpool.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/exceptions.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/fields.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/filepost.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/poolmanager.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/request.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/response.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/_collections.cpython-36.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/requests/packages/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/adapters.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/api.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/auth.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/certs.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/cookies.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/exceptions.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/hooks.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/models.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/sessions.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/status_codes.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/structures.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/utils.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/labels.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/mklabels.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/tests.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/__pycache__/appdirs.cpython-36.pyc,, +pip/_vendor/__pycache__/distro.cpython-36.pyc,, +pip/_vendor/__pycache__/ipaddress.cpython-36.pyc,, +pip/_vendor/__pycache__/ordereddict.cpython-36.pyc,, +pip/_vendor/__pycache__/pyparsing.cpython-36.pyc,, +pip/_vendor/__pycache__/re-vendor.cpython-36.pyc,, +pip/_vendor/__pycache__/retrying.cpython-36.pyc,, +pip/_vendor/__pycache__/six.cpython-36.pyc,, +pip/_vendor/__pycache__/__init__.cpython-36.pyc,, +pip/__pycache__/basecommand.cpython-36.pyc,, +pip/__pycache__/baseparser.cpython-36.pyc,, +pip/__pycache__/cmdoptions.cpython-36.pyc,, +pip/__pycache__/download.cpython-36.pyc,, +pip/__pycache__/exceptions.cpython-36.pyc,, +pip/__pycache__/index.cpython-36.pyc,, +pip/__pycache__/locations.cpython-36.pyc,, +pip/__pycache__/pep425tags.cpython-36.pyc,, +pip/__pycache__/status_codes.cpython-36.pyc,, +pip/__pycache__/wheel.cpython-36.pyc,, +pip/__pycache__/__init__.cpython-36.pyc,, +pip/__pycache__/__main__.cpython-36.pyc,, diff --git a/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/WHEEL b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/entry_points.txt b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/entry_points.txt new file mode 100644 index 0000000..c02a8d5 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip:main +pip3 = pip:main +pip3.5 = pip:main + diff --git a/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/top_level.txt b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/top_level.txt new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip-9.0.1.dist-info/top_level.txt @@ -0,0 +1 @@ +pip diff --git a/RBXLegacyDiscordBot/lib/pip/__init__.py b/RBXLegacyDiscordBot/lib/pip/__init__.py new file mode 100644 index 0000000..9c1d8f9 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/__init__.py @@ -0,0 +1,331 @@ +#!/usr/bin/env python +from __future__ import absolute_import + +import locale +import logging +import os +import optparse +import warnings + +import sys +import re + +# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks, +# but if invoked (i.e. imported), it will issue a warning to stderr if socks +# isn't available. requests unconditionally imports urllib3's socks contrib +# module, triggering this warning. The warning breaks DEP-8 tests (because of +# the stderr output) and is just plain annoying in normal usage. I don't want +# to add socks as yet another dependency for pip, nor do I want to allow-stder +# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to +# be done before the import of pip.vcs. +from pip._vendor.requests.packages.urllib3.exceptions import DependencyWarning +warnings.filterwarnings("ignore", category=DependencyWarning) # noqa + + +from pip.exceptions import InstallationError, CommandError, PipError +from pip.utils import get_installed_distributions, get_prog +from pip.utils import deprecation, dist_is_editable +from pip.vcs import git, mercurial, subversion, bazaar # noqa +from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip.commands import get_summaries, get_similar_commands +from pip.commands import commands_dict +from pip._vendor.requests.packages.urllib3.exceptions import ( + InsecureRequestWarning, +) + + +# assignment for flake8 to be happy + +# This fixes a peculiarity when importing via __import__ - as we are +# initialising the pip module, "from pip import cmdoptions" is recursive +# and appears not to work properly in that situation. +import pip.cmdoptions +cmdoptions = pip.cmdoptions + +# The version as used in the setup.py and the docs conf.py +__version__ = "9.0.1" + + +logger = logging.getLogger(__name__) + +# Hide the InsecureRequestWarning from urllib3 +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +def autocomplete(): + """Command and option completion for the main option parser (and options) + and its subcommands (and options). + + Enable by sourcing one of the completion shell scripts (bash, zsh or fish). + """ + # Don't complete if user hasn't sourced bash_completion file. + if 'PIP_AUTO_COMPLETE' not in os.environ: + return + cwords = os.environ['COMP_WORDS'].split()[1:] + cword = int(os.environ['COMP_CWORD']) + try: + current = cwords[cword - 1] + except IndexError: + current = '' + + subcommands = [cmd for cmd, summary in get_summaries()] + options = [] + # subcommand + try: + subcommand_name = [w for w in cwords if w in subcommands][0] + except IndexError: + subcommand_name = None + + parser = create_main_parser() + # subcommand options + if subcommand_name: + # special case: 'help' subcommand has no options + if subcommand_name == 'help': + sys.exit(1) + # special case: list locally installed dists for uninstall command + if subcommand_name == 'uninstall' and not current.startswith('-'): + installed = [] + lc = current.lower() + for dist in get_installed_distributions(local_only=True): + if dist.key.startswith(lc) and dist.key not in cwords[1:]: + installed.append(dist.key) + # if there are no dists installed, fall back to option completion + if installed: + for dist in installed: + print(dist) + sys.exit(1) + + subcommand = commands_dict[subcommand_name]() + options += [(opt.get_opt_string(), opt.nargs) + for opt in subcommand.parser.option_list_all + if opt.help != optparse.SUPPRESS_HELP] + + # filter out previously specified options from available options + prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] + options = [(x, v) for (x, v) in options if x not in prev_opts] + # filter options by current input + options = [(k, v) for k, v in options if k.startswith(current)] + for option in options: + opt_label = option[0] + # append '=' to options which require args + if option[1]: + opt_label += '=' + print(opt_label) + else: + # show main parser options only when necessary + if current.startswith('-') or current.startswith('--'): + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + opts = (o for it in opts for o in it) + + subcommands += [i.get_opt_string() for i in opts + if i.help != optparse.SUPPRESS_HELP] + + print(' '.join([x for x in subcommands if x.startswith(current)])) + sys.exit(1) + + +def create_main_parser(): + parser_kw = { + 'usage': '\n%prog [options]', + 'add_help_option': False, + 'formatter': UpdatingDefaultsHelpFormatter(), + 'name': 'global', + 'prog': get_prog(), + } + + parser = ConfigOptionParser(**parser_kw) + parser.disable_interspersed_args() + + pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + parser.version = 'pip %s from %s (python %s)' % ( + __version__, pip_pkg_dir, sys.version[:3]) + + # add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) + parser.add_option_group(gen_opts) + + parser.main = True # so the help formatter knows + + # create command listing for description + command_summaries = get_summaries() + description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries] + parser.description = '\n'.join(description) + + return parser + + +def parseopts(args): + parser = create_main_parser() + + # Note: parser calls disable_interspersed_args(), so the result of this + # call is to split the initial args into the general options before the + # subcommand and everything else. + # For example: + # args: ['--timeout=5', 'install', '--user', 'INITools'] + # general_options: ['--timeout==5'] + # args_else: ['install', '--user', 'INITools'] + general_options, args_else = parser.parse_args(args) + + # --version + if general_options.version: + sys.stdout.write(parser.version) + sys.stdout.write(os.linesep) + sys.exit() + + # pip || pip help -> print_help() + if not args_else or (args_else[0] == 'help' and len(args_else) == 1): + parser.print_help() + sys.exit() + + # the subcommand name + cmd_name = args_else[0] + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = ['unknown command "%s"' % cmd_name] + if guess: + msg.append('maybe you meant "%s"' % guess) + + raise CommandError(' - '.join(msg)) + + # all the args without the subcommand + cmd_args = args[:] + cmd_args.remove(cmd_name) + + return cmd_name, cmd_args + + +def check_isolated(args): + isolated = False + + if "--isolated" in args: + isolated = True + + return isolated + + +def main(args=None): + if args is None: + args = sys.argv[1:] + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parseopts(args) + except PipError as exc: + sys.stderr.write("ERROR: %s" % exc) + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip.utils.encoding.auto_decode + try: + locale.setlocale(locale.LC_ALL, '') + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) + command = commands_dict[cmd_name](isolated=check_isolated(cmd_args)) + return command.main(cmd_args) + + +# ########################################################### +# # Writing freeze files + +class FrozenRequirement(object): + + def __init__(self, name, req, editable, comments=()): + self.name = name + self.req = req + self.editable = editable + self.comments = comments + + _rev_re = re.compile(r'-r(\d+)$') + _date_re = re.compile(r'-(20\d\d\d\d\d\d)$') + + @classmethod + def from_dist(cls, dist, dependency_links): + location = os.path.normcase(os.path.abspath(dist.location)) + comments = [] + from pip.vcs import vcs, get_src_requirement + if dist_is_editable(dist) and vcs.get_backend_name(location): + editable = True + try: + req = get_src_requirement(dist, location) + except InstallationError as exc: + logger.warning( + "Error when trying to get requirement for VCS system %s, " + "falling back to uneditable format", exc + ) + req = None + if req is None: + logger.warning( + 'Could not determine repository location of %s', location + ) + comments.append( + '## !! Could not determine repository location' + ) + req = dist.as_requirement() + editable = False + else: + editable = False + req = dist.as_requirement() + specs = req.specs + assert len(specs) == 1 and specs[0][0] in ["==", "==="], \ + 'Expected 1 spec with == or ===; specs = %r; dist = %r' % \ + (specs, dist) + version = specs[0][1] + ver_match = cls._rev_re.search(version) + date_match = cls._date_re.search(version) + if ver_match or date_match: + svn_backend = vcs.get_backend('svn') + if svn_backend: + svn_location = svn_backend().get_location( + dist, + dependency_links, + ) + if not svn_location: + logger.warning( + 'Warning: cannot find svn location for %s', req) + comments.append( + '## FIXME: could not find svn URL in dependency_links ' + 'for this package:' + ) + else: + comments.append( + '# Installing as editable to satisfy requirement %s:' % + req + ) + if ver_match: + rev = ver_match.group(1) + else: + rev = '{%s}' % date_match.group(1) + editable = True + req = '%s@%s#egg=%s' % ( + svn_location, + rev, + cls.egg_name(dist) + ) + return cls(dist.project_name, req, editable, comments) + + @staticmethod + def egg_name(dist): + name = dist.egg_name() + match = re.search(r'-py\d\.\d$', name) + if match: + name = name[:match.start()] + return name + + def __str__(self): + req = self.req + if self.editable: + req = '-e %s' % req + return '\n'.join(list(self.comments) + [str(req)]) + '\n' + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/RBXLegacyDiscordBot/lib/pip/__main__.py b/RBXLegacyDiscordBot/lib/pip/__main__.py new file mode 100644 index 0000000..5556539 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/__main__.py @@ -0,0 +1,19 @@ +from __future__ import absolute_import + +import os +import sys + +# If we are running from a wheel, add the wheel to sys.path +# This allows the usage python pip-*.whl/pip install pip-*.whl +if __package__ == '': + # __file__ is pip-*.whl/pip/__main__.py + # first dirname call strips of '/__main__.py', second strips off '/pip' + # Resulting path is the name of the wheel itself + # Add that to sys.path so we can import pip + path = os.path.dirname(os.path.dirname(__file__)) + sys.path.insert(0, path) + +import pip # noqa + +if __name__ == '__main__': + sys.exit(pip.main()) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/__init__.py b/RBXLegacyDiscordBot/lib/pip/_vendor/__init__.py new file mode 100644 index 0000000..bee5f5e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/__init__.py @@ -0,0 +1,107 @@ +""" +pip._vendor is for vendoring dependencies of pip to prevent needing pip to +depend on something external. + +Files inside of pip._vendor should be considered immutable and should only be +updated to versions from upstream. +""" +from __future__ import absolute_import + +import glob +import os.path +import sys + +# Downstream redistributors which have debundled our dependencies should also +# patch this value to be true. This will trigger the additional patching +# to cause things like "six" to be available as pip. +DEBUNDLED = False + +# By default, look in this directory for a bunch of .whl files which we will +# add to the beginning of sys.path before attempting to import anything. This +# is done to support downstream re-distributors like Debian and Fedora who +# wish to create their own Wheels for our dependencies to aid in debundling. +WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) + + +# Define a small helper function to alias our vendored modules to the real ones +# if the vendored ones do not exist. This idea of this was taken from +# https://github.com/kennethreitz/requests/pull/2567. +def vendored(modulename): + vendored_name = "{0}.{1}".format(__name__, modulename) + + try: + __import__(vendored_name, globals(), locals(), level=0) + except ImportError: + try: + __import__(modulename, globals(), locals(), level=0) + except ImportError: + # We can just silently allow import failures to pass here. If we + # got to this point it means that ``import pip._vendor.whatever`` + # failed and so did ``import whatever``. Since we're importing this + # upfront in an attempt to alias imports, not erroring here will + # just mean we get a regular import error whenever pip *actually* + # tries to import one of these modules to use it, which actually + # gives us a better error message than we would have otherwise + # gotten. + pass + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) + + +# If we're operating in a debundled setup, then we want to go ahead and trigger +# the aliasing of our vendored libraries as well as looking for wheels to add +# to our sys.path. This will cause all of this code to be a no-op typically +# however downstream redistributors can enable it in a consistent way across +# all platforms. +if DEBUNDLED: + # Actually look inside of WHEEL_DIR to find .whl files and add them to the + # front of our sys.path. + sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path + + # Actually alias all of our vendored dependencies. + vendored("cachecontrol") + vendored("colorama") + vendored("distlib") + vendored("distro") + vendored("html5lib") + vendored("lockfile") + vendored("six") + vendored("six.moves") + vendored("six.moves.urllib") + vendored("packaging") + vendored("packaging.version") + vendored("packaging.specifiers") + vendored("pkg_resources") + vendored("progress") + vendored("retrying") + vendored("requests") + vendored("requests.packages") + vendored("requests.packages.urllib3") + vendored("requests.packages.urllib3._collections") + vendored("requests.packages.urllib3.connection") + vendored("requests.packages.urllib3.connectionpool") + vendored("requests.packages.urllib3.contrib") + vendored("requests.packages.urllib3.contrib.ntlmpool") + vendored("requests.packages.urllib3.contrib.pyopenssl") + vendored("requests.packages.urllib3.exceptions") + vendored("requests.packages.urllib3.fields") + vendored("requests.packages.urllib3.filepost") + vendored("requests.packages.urllib3.packages") + vendored("requests.packages.urllib3.packages.ordered_dict") + vendored("requests.packages.urllib3.packages.six") + vendored("requests.packages.urllib3.packages.ssl_match_hostname") + vendored("requests.packages.urllib3.packages.ssl_match_hostname." + "_implementation") + vendored("requests.packages.urllib3.poolmanager") + vendored("requests.packages.urllib3.request") + vendored("requests.packages.urllib3.response") + vendored("requests.packages.urllib3.util") + vendored("requests.packages.urllib3.util.connection") + vendored("requests.packages.urllib3.util.request") + vendored("requests.packages.urllib3.util.response") + vendored("requests.packages.urllib3.util.retry") + vendored("requests.packages.urllib3.util.ssl_") + vendored("requests.packages.urllib3.util.timeout") + vendored("requests.packages.urllib3.util.url") diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/appdirs.py b/RBXLegacyDiscordBot/lib/pip/_vendor/appdirs.py new file mode 100644 index 0000000..4b5c38b --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/appdirs.py @@ -0,0 +1,552 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petrișor + +"""Utilities for determining application-specific dirs. + +See for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# - macOS: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version_info__ = (1, 4, 0) +__version__ = '.'.join(map(str, __version_info__)) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "macOS", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + macOS: ~/Library/Application Support/ + Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\\Application Data\\ + Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ + Win 7 (not roaming): C:\Users\\AppData\Local\\ + Win 7 (roaming): C:\Users\\AppData\Roaming\\ + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + """Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/', + if XDG_DATA_DIRS is not set + + Typical user data directories are: + macOS: /Library/Application Support/ + Unix: /usr/local/share/ or /usr/share/ + Win XP: C:\Documents and Settings\All Users\Application Data\\ + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + macOS: same as user_data_dir + Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by deafult "~/.config/". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + """Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set + + Typical user data directories are: + macOS: same as site_data_dir + Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + macOS: ~/Library/Caches/ + Unix: ~/.cache/ (XDG default) + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache + Vista: C:\Users\\AppData\Local\\\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user cache directories are: + macOS: ~/Library/Logs/ + Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs + Vista: C:\Users\\AppData\Local\\\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname, appauthor=None, version=None, roaming=False, + multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + +#---- internal support stuff + +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernal.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + import win32com.shell + _get_win_folder = _get_win_folder_with_pywin32 + except ImportError: + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +#---- self test code + +if __name__ == "__main__": + appname = "MyApp" + appauthor = "MyCompany" + + props = ("user_data_dir", "site_data_dir", + "user_config_dir", "site_config_dir", + "user_cache_dir", "user_log_dir") + + print("-- app dirs (with optional 'version')") + dirs = AppDirs(appname, appauthor, version="1.0") + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'version')") + dirs = AppDirs(appname, appauthor) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'appauthor')") + dirs = AppDirs(appname) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = AppDirs(appname, appauthor=False) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/__init__.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/__init__.py new file mode 100644 index 0000000..ec9da2e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/__init__.py @@ -0,0 +1,11 @@ +"""CacheControl import Interface. + +Make it easy to import from cachecontrol without long namespaces. +""" +__author__ = 'Eric Larson' +__email__ = 'eric@ionrock.org' +__version__ = '0.11.7' + +from .wrapper import CacheControl +from .adapter import CacheControlAdapter +from .controller import CacheController diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/_cmd.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/_cmd.py new file mode 100644 index 0000000..afdcc88 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/_cmd.py @@ -0,0 +1,60 @@ +import logging + +from pip._vendor import requests + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import logger + +from argparse import ArgumentParser + + +def setup_logging(): + logger.setLevel(logging.DEBUG) + handler = logging.StreamHandler() + logger.addHandler(handler) + + +def get_session(): + adapter = CacheControlAdapter( + DictCache(), + cache_etags=True, + serializer=None, + heuristic=None, + ) + sess = requests.Session() + sess.mount('http://', adapter) + sess.mount('https://', adapter) + + sess.cache_controller = adapter.controller + return sess + + +def get_args(): + parser = ArgumentParser() + parser.add_argument('url', help='The URL to try and cache') + return parser.parse_args() + + +def main(args=None): + args = get_args() + sess = get_session() + + # Make a request to get a response + resp = sess.get(args.url) + + # Turn on logging + setup_logging() + + # try setting the cache + sess.cache_controller.cache_response(resp.request, resp.raw) + + # Now try to get it + if sess.cache_controller.cached_request(resp.request): + print('Cached!') + else: + print('Not cached :(') + + +if __name__ == '__main__': + main() diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/adapter.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/adapter.py new file mode 100644 index 0000000..2348856 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/adapter.py @@ -0,0 +1,125 @@ +import types +import functools + +from pip._vendor.requests.adapters import HTTPAdapter + +from .controller import CacheController +from .cache import DictCache +from .filewrapper import CallbackFileWrapper + + +class CacheControlAdapter(HTTPAdapter): + invalidating_methods = set(['PUT', 'DELETE']) + + def __init__(self, cache=None, + cache_etags=True, + controller_class=None, + serializer=None, + heuristic=None, + *args, **kw): + super(CacheControlAdapter, self).__init__(*args, **kw) + self.cache = cache or DictCache() + self.heuristic = heuristic + + controller_factory = controller_class or CacheController + self.controller = controller_factory( + self.cache, + cache_etags=cache_etags, + serializer=serializer, + ) + + def send(self, request, **kw): + """ + Send a request. Use the request information to see if it + exists in the cache and cache the response if we need to and can. + """ + if request.method == 'GET': + cached_response = self.controller.cached_request(request) + if cached_response: + return self.build_response(request, cached_response, + from_cache=True) + + # check for etags and add headers if appropriate + request.headers.update( + self.controller.conditional_headers(request) + ) + + resp = super(CacheControlAdapter, self).send(request, **kw) + + return resp + + def build_response(self, request, response, from_cache=False): + """ + Build a response by making a request or using the cache. + + This will end up calling send and returning a potentially + cached response + """ + if not from_cache and request.method == 'GET': + # Check for any heuristics that might update headers + # before trying to cache. + if self.heuristic: + response = self.heuristic.apply(response) + + # apply any expiration heuristics + if response.status == 304: + # We must have sent an ETag request. This could mean + # that we've been expired already or that we simply + # have an etag. In either case, we want to try and + # update the cache if that is the case. + cached_response = self.controller.update_cached_response( + request, response + ) + + if cached_response is not response: + from_cache = True + + # We are done with the server response, read a + # possible response body (compliant servers will + # not return one, but we cannot be 100% sure) and + # release the connection back to the pool. + response.read(decode_content=False) + response.release_conn() + + response = cached_response + + # We always cache the 301 responses + elif response.status == 301: + self.controller.cache_response(request, response) + else: + # Wrap the response file with a wrapper that will cache the + # response when the stream has been consumed. + response._fp = CallbackFileWrapper( + response._fp, + functools.partial( + self.controller.cache_response, + request, + response, + ) + ) + if response.chunked: + super_update_chunk_length = response._update_chunk_length + + def _update_chunk_length(self): + super_update_chunk_length() + if self.chunk_left == 0: + self._fp._close() + response._update_chunk_length = types.MethodType(_update_chunk_length, response) + + resp = super(CacheControlAdapter, self).build_response( + request, response + ) + + # See if we should invalidate the cache. + if request.method in self.invalidating_methods and resp.ok: + cache_url = self.controller.cache_url(request.url) + self.cache.delete(cache_url) + + # Give the request a from_cache attr to let people use it + resp.from_cache = from_cache + + return resp + + def close(self): + self.cache.close() + super(CacheControlAdapter, self).close() diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/cache.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/cache.py new file mode 100644 index 0000000..7389a73 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/cache.py @@ -0,0 +1,39 @@ +""" +The cache object API for implementing caches. The default is a thread +safe in-memory dictionary. +""" +from threading import Lock + + +class BaseCache(object): + + def get(self, key): + raise NotImplemented() + + def set(self, key, value): + raise NotImplemented() + + def delete(self, key): + raise NotImplemented() + + def close(self): + pass + + +class DictCache(BaseCache): + + def __init__(self, init_dict=None): + self.lock = Lock() + self.data = init_dict or {} + + def get(self, key): + return self.data.get(key, None) + + def set(self, key, value): + with self.lock: + self.data.update({key: value}) + + def delete(self, key): + with self.lock: + if key in self.data: + self.data.pop(key) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/__init__.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/__init__.py new file mode 100644 index 0000000..f9e66a1 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/__init__.py @@ -0,0 +1,18 @@ +from textwrap import dedent + +try: + from .file_cache import FileCache +except ImportError: + notice = dedent(''' + NOTE: In order to use the FileCache you must have + lockfile installed. You can install it via pip: + pip install lockfile + ''') + print(notice) + + +try: + import redis + from .redis_cache import RedisCache +except ImportError: + pass diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/file_cache.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/file_cache.py new file mode 100644 index 0000000..b77728f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/file_cache.py @@ -0,0 +1,116 @@ +import hashlib +import os + +from pip._vendor.lockfile import LockFile +from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile + +from ..cache import BaseCache +from ..controller import CacheController + + +def _secure_open_write(filename, fmode): + # We only want to write to this file, so open it in write only mode + flags = os.O_WRONLY + + # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only + # will open *new* files. + # We specify this because we want to ensure that the mode we pass is the + # mode of the file. + flags |= os.O_CREAT | os.O_EXCL + + # Do not follow symlinks to prevent someone from making a symlink that + # we follow and insecurely open a cache file. + if hasattr(os, "O_NOFOLLOW"): + flags |= os.O_NOFOLLOW + + # On Windows we'll mark this file as binary + if hasattr(os, "O_BINARY"): + flags |= os.O_BINARY + + # Before we open our file, we want to delete any existing file that is + # there + try: + os.remove(filename) + except (IOError, OSError): + # The file must not exist already, so we can just skip ahead to opening + pass + + # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a + # race condition happens between the os.remove and this line, that an + # error will be raised. Because we utilize a lockfile this should only + # happen if someone is attempting to attack us. + fd = os.open(filename, flags, fmode) + try: + return os.fdopen(fd, "wb") + except: + # An error occurred wrapping our FD in a file object + os.close(fd) + raise + + +class FileCache(BaseCache): + def __init__(self, directory, forever=False, filemode=0o0600, + dirmode=0o0700, use_dir_lock=None, lock_class=None): + + if use_dir_lock is not None and lock_class is not None: + raise ValueError("Cannot use use_dir_lock and lock_class together") + + if use_dir_lock: + lock_class = MkdirLockFile + + if lock_class is None: + lock_class = LockFile + + self.directory = directory + self.forever = forever + self.filemode = filemode + self.dirmode = dirmode + self.lock_class = lock_class + + + @staticmethod + def encode(x): + return hashlib.sha224(x.encode()).hexdigest() + + def _fn(self, name): + # NOTE: This method should not change as some may depend on it. + # See: https://github.com/ionrock/cachecontrol/issues/63 + hashed = self.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key): + name = self._fn(key) + if not os.path.exists(name): + return None + + with open(name, 'rb') as fh: + return fh.read() + + def set(self, key, value): + name = self._fn(key) + + # Make sure the directory exists + try: + os.makedirs(os.path.dirname(name), self.dirmode) + except (IOError, OSError): + pass + + with self.lock_class(name) as lock: + # Write our actual file + with _secure_open_write(lock.path, self.filemode) as fh: + fh.write(value) + + def delete(self, key): + name = self._fn(key) + if not self.forever: + os.remove(name) + + +def url_to_file_path(url, filecache): + """Return the file cache path based on the URL. + + This does not ensure the file exists! + """ + key = CacheController.cache_url(url) + return filecache._fn(key) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/redis_cache.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/redis_cache.py new file mode 100644 index 0000000..9f5d55f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -0,0 +1,41 @@ +from __future__ import division + +from datetime import datetime + + +def total_seconds(td): + """Python 2.6 compatability""" + if hasattr(td, 'total_seconds'): + return td.total_seconds() + + ms = td.microseconds + secs = (td.seconds + td.days * 24 * 3600) + return (ms + secs * 10**6) / 10**6 + + +class RedisCache(object): + + def __init__(self, conn): + self.conn = conn + + def get(self, key): + return self.conn.get(key) + + def set(self, key, value, expires=None): + if not expires: + self.conn.set(key, value) + else: + expires = expires - datetime.now() + self.conn.setex(key, total_seconds(expires), value) + + def delete(self, key): + self.conn.delete(key) + + def clear(self): + """Helper for clearing all the keys in a database. Use with + caution!""" + for key in self.conn.keys(): + self.conn.delete(key) + + def close(self): + self.conn.disconnect() diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/compat.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/compat.py new file mode 100644 index 0000000..018e6ac --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/compat.py @@ -0,0 +1,20 @@ +try: + from urllib.parse import urljoin +except ImportError: + from urlparse import urljoin + + +try: + import cPickle as pickle +except ImportError: + import pickle + + +from pip._vendor.requests.packages.urllib3.response import HTTPResponse +from pip._vendor.requests.packages.urllib3.util import is_fp_closed + +# Replicate some six behaviour +try: + text_type = (unicode,) +except NameError: + text_type = (str,) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/controller.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/controller.py new file mode 100644 index 0000000..5eb961f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/controller.py @@ -0,0 +1,353 @@ +""" +The httplib2 algorithms ported for use with requests. +""" +import logging +import re +import calendar +import time +from email.utils import parsedate_tz + +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .cache import DictCache +from .serialize import Serializer + + +logger = logging.getLogger(__name__) + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + + +class CacheController(object): + """An interface to see if request should cached or not. + """ + def __init__(self, cache=None, cache_etags=True, serializer=None): + self.cache = cache or DictCache() + self.cache_etags = cache_etags + self.serializer = serializer or Serializer() + + @classmethod + def _urlnorm(cls, uri): + """Normalize the URL to create a safe key for the cache""" + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise Exception("Only absolute URIs are allowed. uri = %s" % uri) + + scheme = scheme.lower() + authority = authority.lower() + + if not path: + path = "/" + + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + defrag_uri = scheme + "://" + authority + request_uri + + return defrag_uri + + @classmethod + def cache_url(cls, uri): + return cls._urlnorm(uri) + + def parse_cache_control(self, headers): + """ + Parse the cache control headers returning a dictionary with values + for the different directives. + """ + retval = {} + + cc_header = 'cache-control' + if 'Cache-Control' in headers: + cc_header = 'Cache-Control' + + if cc_header in headers: + parts = headers[cc_header].split(',') + parts_with_args = [ + tuple([x.strip().lower() for x in part.split("=", 1)]) + for part in parts if -1 != part.find("=") + ] + parts_wo_args = [ + (name.strip().lower(), 1) + for name in parts if -1 == name.find("=") + ] + retval = dict(parts_with_args + parts_wo_args) + return retval + + def cached_request(self, request): + """ + Return a cached response if it exists in the cache, otherwise + return False. + """ + cache_url = self.cache_url(request.url) + logger.debug('Looking up "%s" in the cache', cache_url) + cc = self.parse_cache_control(request.headers) + + # Bail out if the request insists on fresh data + if 'no-cache' in cc: + logger.debug('Request header has "no-cache", cache bypassed') + return False + + if 'max-age' in cc and cc['max-age'] == 0: + logger.debug('Request header has "max_age" as 0, cache bypassed') + return False + + # Request allows serving from the cache, let's see if we find something + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug('No cache entry available') + return False + + # Check whether it can be deserialized + resp = self.serializer.loads(request, cache_data) + if not resp: + logger.warning('Cache entry deserialization failed, entry ignored') + return False + + # If we have a cached 301, return it immediately. We don't + # need to test our response for other headers b/c it is + # intrinsically "cacheable" as it is Permanent. + # See: + # https://tools.ietf.org/html/rfc7231#section-6.4.2 + # + # Client can try to refresh the value by repeating the request + # with cache busting headers as usual (ie no-cache). + if resp.status == 301: + msg = ('Returning cached "301 Moved Permanently" response ' + '(ignoring date and etag information)') + logger.debug(msg) + return resp + + headers = CaseInsensitiveDict(resp.headers) + if not headers or 'date' not in headers: + if 'etag' not in headers: + # Without date or etag, the cached response can never be used + # and should be deleted. + logger.debug('Purging cached response: no date or etag') + self.cache.delete(cache_url) + logger.debug('Ignoring cached response: no date') + return False + + now = time.time() + date = calendar.timegm( + parsedate_tz(headers['date']) + ) + current_age = max(0, now - date) + logger.debug('Current age based on date: %i', current_age) + + # TODO: There is an assumption that the result will be a + # urllib3 response object. This may not be best since we + # could probably avoid instantiating or constructing the + # response until we know we need it. + resp_cc = self.parse_cache_control(headers) + + # determine freshness + freshness_lifetime = 0 + + # Check the max-age pragma in the cache control header + if 'max-age' in resp_cc and resp_cc['max-age'].isdigit(): + freshness_lifetime = int(resp_cc['max-age']) + logger.debug('Freshness lifetime from max-age: %i', + freshness_lifetime) + + # If there isn't a max-age, check for an expires header + elif 'expires' in headers: + expires = parsedate_tz(headers['expires']) + if expires is not None: + expire_time = calendar.timegm(expires) - date + freshness_lifetime = max(0, expire_time) + logger.debug("Freshness lifetime from expires: %i", + freshness_lifetime) + + # Determine if we are setting freshness limit in the + # request. Note, this overrides what was in the response. + if 'max-age' in cc: + try: + freshness_lifetime = int(cc['max-age']) + logger.debug('Freshness lifetime from request max-age: %i', + freshness_lifetime) + except ValueError: + freshness_lifetime = 0 + + if 'min-fresh' in cc: + try: + min_fresh = int(cc['min-fresh']) + except ValueError: + min_fresh = 0 + # adjust our current age by our min fresh + current_age += min_fresh + logger.debug('Adjusted current age from min-fresh: %i', + current_age) + + # Return entry if it is fresh enough + if freshness_lifetime > current_age: + logger.debug('The response is "fresh", returning cached response') + logger.debug('%i > %i', freshness_lifetime, current_age) + return resp + + # we're not fresh. If we don't have an Etag, clear it out + if 'etag' not in headers: + logger.debug( + 'The cached response is "stale" with no etag, purging' + ) + self.cache.delete(cache_url) + + # return the original handler + return False + + def conditional_headers(self, request): + cache_url = self.cache_url(request.url) + resp = self.serializer.loads(request, self.cache.get(cache_url)) + new_headers = {} + + if resp: + headers = CaseInsensitiveDict(resp.headers) + + if 'etag' in headers: + new_headers['If-None-Match'] = headers['ETag'] + + if 'last-modified' in headers: + new_headers['If-Modified-Since'] = headers['Last-Modified'] + + return new_headers + + def cache_response(self, request, response, body=None): + """ + Algorithm for caching requests. + + This assumes a requests Response object. + """ + # From httplib2: Don't cache 206's since we aren't going to + # handle byte range requests + cacheable_status_codes = [200, 203, 300, 301] + if response.status not in cacheable_status_codes: + logger.debug( + 'Status code %s not in %s', + response.status, + cacheable_status_codes + ) + return + + response_headers = CaseInsensitiveDict(response.headers) + + # If we've been given a body, our response has a Content-Length, that + # Content-Length is valid then we can check to see if the body we've + # been given matches the expected size, and if it doesn't we'll just + # skip trying to cache it. + if (body is not None and + "content-length" in response_headers and + response_headers["content-length"].isdigit() and + int(response_headers["content-length"]) != len(body)): + return + + cc_req = self.parse_cache_control(request.headers) + cc = self.parse_cache_control(response_headers) + + cache_url = self.cache_url(request.url) + logger.debug('Updating cache with response from "%s"', cache_url) + + # Delete it from the cache if we happen to have it stored there + no_store = False + if cc.get('no-store'): + no_store = True + logger.debug('Response header has "no-store"') + if cc_req.get('no-store'): + no_store = True + logger.debug('Request header has "no-store"') + if no_store and self.cache.get(cache_url): + logger.debug('Purging existing cache entry to honor "no-store"') + self.cache.delete(cache_url) + + # If we've been given an etag, then keep the response + if self.cache_etags and 'etag' in response_headers: + logger.debug('Caching due to etag') + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body=body), + ) + + # Add to the cache any 301s. We do this before looking that + # the Date headers. + elif response.status == 301: + logger.debug('Caching permanant redirect') + self.cache.set( + cache_url, + self.serializer.dumps(request, response) + ) + + # Add to the cache if the response headers demand it. If there + # is no date header then we can't do anything about expiring + # the cache. + elif 'date' in response_headers: + # cache when there is a max-age > 0 + if cc and cc.get('max-age'): + if cc['max-age'].isdigit() and int(cc['max-age']) > 0: + logger.debug('Caching b/c date exists and max-age > 0') + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body=body), + ) + + # If the request can expire, it means we should cache it + # in the meantime. + elif 'expires' in response_headers: + if response_headers['expires']: + logger.debug('Caching b/c of expires header') + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body=body), + ) + + def update_cached_response(self, request, response): + """On a 304 we will get a new set of headers that we want to + update our cached value with, assuming we have one. + + This should only ever be called when we've sent an ETag and + gotten a 304 as the response. + """ + cache_url = self.cache_url(request.url) + + cached_response = self.serializer.loads( + request, + self.cache.get(cache_url) + ) + + if not cached_response: + # we didn't have a cached response + return response + + # Lets update our headers with the headers from the new request: + # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 + # + # The server isn't supposed to send headers that would make + # the cached body invalid. But... just in case, we'll be sure + # to strip out ones we know that might be problmatic due to + # typical assumptions. + excluded_headers = [ + "content-length", + ] + + cached_response.headers.update( + dict((k, v) for k, v in response.headers.items() + if k.lower() not in excluded_headers) + ) + + # we want a 200 b/c we have content via the cache + cached_response.status = 200 + + # update our cache + self.cache.set( + cache_url, + self.serializer.dumps(request, cached_response), + ) + + return cached_response diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/filewrapper.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/filewrapper.py new file mode 100644 index 0000000..f1e1ce0 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/filewrapper.py @@ -0,0 +1,78 @@ +from io import BytesIO + + +class CallbackFileWrapper(object): + """ + Small wrapper around a fp object which will tee everything read into a + buffer, and when that file is closed it will execute a callback with the + contents of that buffer. + + All attributes are proxied to the underlying file object. + + This class uses members with a double underscore (__) leading prefix so as + not to accidentally shadow an attribute. + """ + + def __init__(self, fp, callback): + self.__buf = BytesIO() + self.__fp = fp + self.__callback = callback + + def __getattr__(self, name): + # The vaguaries of garbage collection means that self.__fp is + # not always set. By using __getattribute__ and the private + # name[0] allows looking up the attribute value and raising an + # AttributeError when it doesn't exist. This stop thigns from + # infinitely recursing calls to getattr in the case where + # self.__fp hasn't been set. + # + # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers + fp = self.__getattribute__('_CallbackFileWrapper__fp') + return getattr(fp, name) + + def __is_fp_closed(self): + try: + return self.__fp.fp is None + except AttributeError: + pass + + try: + return self.__fp.closed + except AttributeError: + pass + + # We just don't cache it then. + # TODO: Add some logging here... + return False + + def _close(self): + if self.__callback: + self.__callback(self.__buf.getvalue()) + + # We assign this to None here, because otherwise we can get into + # really tricky problems where the CPython interpreter dead locks + # because the callback is holding a reference to something which + # has a __del__ method. Setting this to None breaks the cycle + # and allows the garbage collector to do it's thing normally. + self.__callback = None + + def read(self, amt=None): + data = self.__fp.read(amt) + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data + + def _safe_read(self, amt): + data = self.__fp._safe_read(amt) + if amt == 2 and data == b'\r\n': + # urllib executes this read to toss the CRLF at the end + # of the chunk. + return data + + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/heuristics.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/heuristics.py new file mode 100644 index 0000000..94715a4 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/heuristics.py @@ -0,0 +1,138 @@ +import calendar +import time + +from email.utils import formatdate, parsedate, parsedate_tz + +from datetime import datetime, timedelta + +TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" + + +def expire_after(delta, date=None): + date = date or datetime.now() + return date + delta + + +def datetime_to_header(dt): + return formatdate(calendar.timegm(dt.timetuple())) + + +class BaseHeuristic(object): + + def warning(self, response): + """ + Return a valid 1xx warning header value describing the cache + adjustments. + + The response is provided too allow warnings like 113 + http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need + to explicitly say response is over 24 hours old. + """ + return '110 - "Response is Stale"' + + def update_headers(self, response): + """Update the response headers with any new headers. + + NOTE: This SHOULD always include some Warning header to + signify that the response was cached by the client, not + by way of the provided headers. + """ + return {} + + def apply(self, response): + updated_headers = self.update_headers(response) + + if updated_headers: + response.headers.update(updated_headers) + warning_header_value = self.warning(response) + if warning_header_value is not None: + response.headers.update({'Warning': warning_header_value}) + + return response + + +class OneDayCache(BaseHeuristic): + """ + Cache the response by providing an expires 1 day in the + future. + """ + def update_headers(self, response): + headers = {} + + if 'expires' not in response.headers: + date = parsedate(response.headers['date']) + expires = expire_after(timedelta(days=1), + date=datetime(*date[:6])) + headers['expires'] = datetime_to_header(expires) + headers['cache-control'] = 'public' + return headers + + +class ExpiresAfter(BaseHeuristic): + """ + Cache **all** requests for a defined time period. + """ + + def __init__(self, **kw): + self.delta = timedelta(**kw) + + def update_headers(self, response): + expires = expire_after(self.delta) + return { + 'expires': datetime_to_header(expires), + 'cache-control': 'public', + } + + def warning(self, response): + tmpl = '110 - Automatically cached for %s. Response might be stale' + return tmpl % self.delta + + +class LastModified(BaseHeuristic): + """ + If there is no Expires header already, fall back on Last-Modified + using the heuristic from + http://tools.ietf.org/html/rfc7234#section-4.2.2 + to calculate a reasonable value. + + Firefox also does something like this per + https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ + http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 + Unlike mozilla we limit this to 24-hr. + """ + cacheable_by_default_statuses = set([ + 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 + ]) + + def update_headers(self, resp): + headers = resp.headers + + if 'expires' in headers: + return {} + + if 'cache-control' in headers and headers['cache-control'] != 'public': + return {} + + if resp.status not in self.cacheable_by_default_statuses: + return {} + + if 'date' not in headers or 'last-modified' not in headers: + return {} + + date = calendar.timegm(parsedate_tz(headers['date'])) + last_modified = parsedate(headers['last-modified']) + if date is None or last_modified is None: + return {} + + now = time.time() + current_age = max(0, now - date) + delta = date - calendar.timegm(last_modified) + freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) + if freshness_lifetime <= current_age: + return {} + + expires = date + freshness_lifetime + return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))} + + def warning(self, resp): + return None diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/serialize.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/serialize.py new file mode 100644 index 0000000..8f9c589 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/serialize.py @@ -0,0 +1,196 @@ +import base64 +import io +import json +import zlib + +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .compat import HTTPResponse, pickle, text_type + + +def _b64_encode_bytes(b): + return base64.b64encode(b).decode("ascii") + + +def _b64_encode_str(s): + return _b64_encode_bytes(s.encode("utf8")) + + +def _b64_encode(s): + if isinstance(s, text_type): + return _b64_encode_str(s) + return _b64_encode_bytes(s) + + +def _b64_decode_bytes(b): + return base64.b64decode(b.encode("ascii")) + + +def _b64_decode_str(s): + return _b64_decode_bytes(s).decode("utf8") + + +class Serializer(object): + + def dumps(self, request, response, body=None): + response_headers = CaseInsensitiveDict(response.headers) + + if body is None: + body = response.read(decode_content=False) + + # NOTE: 99% sure this is dead code. I'm only leaving it + # here b/c I don't have a test yet to prove + # it. Basically, before using + # `cachecontrol.filewrapper.CallbackFileWrapper`, + # this made an effort to reset the file handle. The + # `CallbackFileWrapper` short circuits this code by + # setting the body as the content is consumed, the + # result being a `body` argument is *always* passed + # into cache_response, and in turn, + # `Serializer.dump`. + response._fp = io.BytesIO(body) + + data = { + "response": { + "body": _b64_encode_bytes(body), + "headers": dict( + (_b64_encode(k), _b64_encode(v)) + for k, v in response.headers.items() + ), + "status": response.status, + "version": response.version, + "reason": _b64_encode_str(response.reason), + "strict": response.strict, + "decode_content": response.decode_content, + }, + } + + # Construct our vary headers + data["vary"] = {} + if "vary" in response_headers: + varied_headers = response_headers['vary'].split(',') + for header in varied_headers: + header = header.strip() + data["vary"][header] = request.headers.get(header, None) + + # Encode our Vary headers to ensure they can be serialized as JSON + data["vary"] = dict( + (_b64_encode(k), _b64_encode(v) if v is not None else v) + for k, v in data["vary"].items() + ) + + return b",".join([ + b"cc=2", + zlib.compress( + json.dumps( + data, separators=(",", ":"), sort_keys=True, + ).encode("utf8"), + ), + ]) + + def loads(self, request, data): + # Short circuit if we've been given an empty set of data + if not data: + return + + # Determine what version of the serializer the data was serialized + # with + try: + ver, data = data.split(b",", 1) + except ValueError: + ver = b"cc=0" + + # Make sure that our "ver" is actually a version and isn't a false + # positive from a , being in the data stream. + if ver[:3] != b"cc=": + data = ver + data + ver = b"cc=0" + + # Get the version number out of the cc=N + ver = ver.split(b"=", 1)[-1].decode("ascii") + + # Dispatch to the actual load method for the given version + try: + return getattr(self, "_loads_v{0}".format(ver))(request, data) + except AttributeError: + # This is a version we don't have a loads function for, so we'll + # just treat it as a miss and return None + return + + def prepare_response(self, request, cached): + """Verify our vary headers match and construct a real urllib3 + HTTPResponse object. + """ + # Special case the '*' Vary value as it means we cannot actually + # determine if the cached response is suitable for this request. + if "*" in cached.get("vary", {}): + return + + # Ensure that the Vary headers for the cached response match our + # request + for header, value in cached.get("vary", {}).items(): + if request.headers.get(header, None) != value: + return + + body_raw = cached["response"].pop("body") + + headers = CaseInsensitiveDict(data=cached['response']['headers']) + if headers.get('transfer-encoding', '') == 'chunked': + headers.pop('transfer-encoding') + + cached['response']['headers'] = headers + + try: + body = io.BytesIO(body_raw) + except TypeError: + # This can happen if cachecontrol serialized to v1 format (pickle) + # using Python 2. A Python 2 str(byte string) will be unpickled as + # a Python 3 str (unicode string), which will cause the above to + # fail with: + # + # TypeError: 'str' does not support the buffer interface + body = io.BytesIO(body_raw.encode('utf8')) + + return HTTPResponse( + body=body, + preload_content=False, + **cached["response"] + ) + + def _loads_v0(self, request, data): + # The original legacy cache data. This doesn't contain enough + # information to construct everything we need, so we'll treat this as + # a miss. + return + + def _loads_v1(self, request, data): + try: + cached = pickle.loads(data) + except ValueError: + return + + return self.prepare_response(request, cached) + + def _loads_v2(self, request, data): + try: + cached = json.loads(zlib.decompress(data).decode("utf8")) + except ValueError: + return + + # We need to decode the items that we've base64 encoded + cached["response"]["body"] = _b64_decode_bytes( + cached["response"]["body"] + ) + cached["response"]["headers"] = dict( + (_b64_decode_str(k), _b64_decode_str(v)) + for k, v in cached["response"]["headers"].items() + ) + cached["response"]["reason"] = _b64_decode_str( + cached["response"]["reason"], + ) + cached["vary"] = dict( + (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) + for k, v in cached["vary"].items() + ) + + return self.prepare_response(request, cached) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/wrapper.py b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/wrapper.py new file mode 100644 index 0000000..ea421aa --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/cachecontrol/wrapper.py @@ -0,0 +1,21 @@ +from .adapter import CacheControlAdapter +from .cache import DictCache + + +def CacheControl(sess, + cache=None, + cache_etags=True, + serializer=None, + heuristic=None): + + cache = cache or DictCache() + adapter = CacheControlAdapter( + cache, + cache_etags=cache_etags, + serializer=serializer, + heuristic=heuristic, + ) + sess.mount('http://', adapter) + sess.mount('https://', adapter) + + return sess diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/__init__.py b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/__init__.py new file mode 100644 index 0000000..670e6b3 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/__init__.py @@ -0,0 +1,7 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from .initialise import init, deinit, reinit, colorama_text +from .ansi import Fore, Back, Style, Cursor +from .ansitowin32 import AnsiToWin32 + +__version__ = '0.3.7' + diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/ansi.py b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/ansi.py new file mode 100644 index 0000000..7877658 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/ansi.py @@ -0,0 +1,102 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +''' +This module generates ANSI character codes to printing colors to terminals. +See: http://en.wikipedia.org/wiki/ANSI_escape_code +''' + +CSI = '\033[' +OSC = '\033]' +BEL = '\007' + + +def code_to_chars(code): + return CSI + str(code) + 'm' + +def set_title(title): + return OSC + '2;' + title + BEL + +def clear_screen(mode=2): + return CSI + str(mode) + 'J' + +def clear_line(mode=2): + return CSI + str(mode) + 'K' + + +class AnsiCodes(object): + def __init__(self): + # the subclasses declare class attributes which are numbers. + # Upon instantiation we define instance attributes, which are the same + # as the class attributes but wrapped with the ANSI escape sequence + for name in dir(self): + if not name.startswith('_'): + value = getattr(self, name) + setattr(self, name, code_to_chars(value)) + + +class AnsiCursor(object): + def UP(self, n=1): + return CSI + str(n) + 'A' + def DOWN(self, n=1): + return CSI + str(n) + 'B' + def FORWARD(self, n=1): + return CSI + str(n) + 'C' + def BACK(self, n=1): + return CSI + str(n) + 'D' + def POS(self, x=1, y=1): + return CSI + str(y) + ';' + str(x) + 'H' + + +class AnsiFore(AnsiCodes): + BLACK = 30 + RED = 31 + GREEN = 32 + YELLOW = 33 + BLUE = 34 + MAGENTA = 35 + CYAN = 36 + WHITE = 37 + RESET = 39 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 90 + LIGHTRED_EX = 91 + LIGHTGREEN_EX = 92 + LIGHTYELLOW_EX = 93 + LIGHTBLUE_EX = 94 + LIGHTMAGENTA_EX = 95 + LIGHTCYAN_EX = 96 + LIGHTWHITE_EX = 97 + + +class AnsiBack(AnsiCodes): + BLACK = 40 + RED = 41 + GREEN = 42 + YELLOW = 43 + BLUE = 44 + MAGENTA = 45 + CYAN = 46 + WHITE = 47 + RESET = 49 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 100 + LIGHTRED_EX = 101 + LIGHTGREEN_EX = 102 + LIGHTYELLOW_EX = 103 + LIGHTBLUE_EX = 104 + LIGHTMAGENTA_EX = 105 + LIGHTCYAN_EX = 106 + LIGHTWHITE_EX = 107 + + +class AnsiStyle(AnsiCodes): + BRIGHT = 1 + DIM = 2 + NORMAL = 22 + RESET_ALL = 0 + +Fore = AnsiFore() +Back = AnsiBack() +Style = AnsiStyle() +Cursor = AnsiCursor() diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/ansitowin32.py b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/ansitowin32.py new file mode 100644 index 0000000..b7ff6f2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/ansitowin32.py @@ -0,0 +1,236 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import re +import sys +import os + +from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style +from .winterm import WinTerm, WinColor, WinStyle +from .win32 import windll, winapi_test + + +winterm = None +if windll is not None: + winterm = WinTerm() + + +def is_stream_closed(stream): + return not hasattr(stream, 'closed') or stream.closed + + +def is_a_tty(stream): + return hasattr(stream, 'isatty') and stream.isatty() + + +class StreamWrapper(object): + ''' + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()', which is delegated to our + Converter instance. + ''' + def __init__(self, wrapped, converter): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + self.__convertor = converter + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def write(self, text): + self.__convertor.write(text) + + +class AnsiToWin32(object): + ''' + Implements a 'write()' method which, on Windows, will strip ANSI character + sequences from the text, and if outputting to a tty, will convert them into + win32 function calls. + ''' + ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer + ANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?') # Operating System Command + + def __init__(self, wrapped, convert=None, strip=None, autoreset=False): + # The wrapped stream (normally sys.stdout or sys.stderr) + self.wrapped = wrapped + + # should we reset colors to defaults after every .write() + self.autoreset = autoreset + + # create the proxy wrapping our output stream + self.stream = StreamWrapper(wrapped, self) + + on_windows = os.name == 'nt' + # We test if the WinAPI works, because even if we are on Windows + # we may be using a terminal that doesn't support the WinAPI + # (e.g. Cygwin Terminal). In this case it's up to the terminal + # to support the ANSI codes. + conversion_supported = on_windows and winapi_test() + + # should we strip ANSI sequences from our output? + if strip is None: + strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped)) + self.strip = strip + + # should we should convert ANSI sequences into win32 calls? + if convert is None: + convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped) + self.convert = convert + + # dict of ansi codes to win32 functions and parameters + self.win32_calls = self.get_win32_calls() + + # are we wrapping stderr? + self.on_stderr = self.wrapped is sys.stderr + + def should_wrap(self): + ''' + True if this class is actually needed. If false, then the output + stream will not be affected, nor will win32 calls be issued, so + wrapping stdout is not actually required. This will generally be + False on non-Windows platforms, unless optional functionality like + autoreset has been requested using kwargs to init() + ''' + return self.convert or self.strip or self.autoreset + + def get_win32_calls(self): + if self.convert and winterm: + return { + AnsiStyle.RESET_ALL: (winterm.reset_all, ), + AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), + AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), + AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), + AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), + AnsiFore.RED: (winterm.fore, WinColor.RED), + AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), + AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), + AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), + AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), + AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), + AnsiFore.WHITE: (winterm.fore, WinColor.GREY), + AnsiFore.RESET: (winterm.fore, ), + AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), + AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), + AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), + AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), + AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), + AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), + AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), + AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), + AnsiBack.BLACK: (winterm.back, WinColor.BLACK), + AnsiBack.RED: (winterm.back, WinColor.RED), + AnsiBack.GREEN: (winterm.back, WinColor.GREEN), + AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), + AnsiBack.BLUE: (winterm.back, WinColor.BLUE), + AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), + AnsiBack.CYAN: (winterm.back, WinColor.CYAN), + AnsiBack.WHITE: (winterm.back, WinColor.GREY), + AnsiBack.RESET: (winterm.back, ), + AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), + AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), + AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), + AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), + AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), + AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), + AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), + AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), + } + return dict() + + def write(self, text): + if self.strip or self.convert: + self.write_and_convert(text) + else: + self.wrapped.write(text) + self.wrapped.flush() + if self.autoreset: + self.reset_all() + + + def reset_all(self): + if self.convert: + self.call_win32('m', (0,)) + elif not self.strip and not is_stream_closed(self.wrapped): + self.wrapped.write(Style.RESET_ALL) + + + def write_and_convert(self, text): + ''' + Write the given text to our wrapped stream, stripping any ANSI + sequences from the text, and optionally converting them into win32 + calls. + ''' + cursor = 0 + text = self.convert_osc(text) + for match in self.ANSI_CSI_RE.finditer(text): + start, end = match.span() + self.write_plain_text(text, cursor, start) + self.convert_ansi(*match.groups()) + cursor = end + self.write_plain_text(text, cursor, len(text)) + + + def write_plain_text(self, text, start, end): + if start < end: + self.wrapped.write(text[start:end]) + self.wrapped.flush() + + + def convert_ansi(self, paramstring, command): + if self.convert: + params = self.extract_params(command, paramstring) + self.call_win32(command, params) + + + def extract_params(self, command, paramstring): + if command in 'Hf': + params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) + while len(params) < 2: + # defaults: + params = params + (1,) + else: + params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) + if len(params) == 0: + # defaults: + if command in 'JKm': + params = (0,) + elif command in 'ABCD': + params = (1,) + + return params + + + def call_win32(self, command, params): + if command == 'm': + for param in params: + if param in self.win32_calls: + func_args = self.win32_calls[param] + func = func_args[0] + args = func_args[1:] + kwargs = dict(on_stderr=self.on_stderr) + func(*args, **kwargs) + elif command in 'J': + winterm.erase_screen(params[0], on_stderr=self.on_stderr) + elif command in 'K': + winterm.erase_line(params[0], on_stderr=self.on_stderr) + elif command in 'Hf': # cursor position - absolute + winterm.set_cursor_position(params, on_stderr=self.on_stderr) + elif command in 'ABCD': # cursor position - relative + n = params[0] + # A - up, B - down, C - forward, D - back + x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] + winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) + + + def convert_osc(self, text): + for match in self.ANSI_OSC_RE.finditer(text): + start, end = match.span() + text = text[:start] + text[end:] + paramstring, command = match.groups() + if command in '\x07': # \x07 = BEL + params = paramstring.split(";") + # 0 - change title and icon (we will only change title) + # 1 - change icon (we don't support this) + # 2 - change title + if params[0] in '02': + winterm.set_title(params[1]) + return text diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/initialise.py b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/initialise.py new file mode 100644 index 0000000..834962a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/initialise.py @@ -0,0 +1,82 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import atexit +import contextlib +import sys + +from .ansitowin32 import AnsiToWin32 + + +orig_stdout = None +orig_stderr = None + +wrapped_stdout = None +wrapped_stderr = None + +atexit_done = False + + +def reset_all(): + if AnsiToWin32 is not None: # Issue #74: objects might become None at exit + AnsiToWin32(orig_stdout).reset_all() + + +def init(autoreset=False, convert=None, strip=None, wrap=True): + + if not wrap and any([autoreset, convert, strip]): + raise ValueError('wrap=False conflicts with any other arg=True') + + global wrapped_stdout, wrapped_stderr + global orig_stdout, orig_stderr + + orig_stdout = sys.stdout + orig_stderr = sys.stderr + + if sys.stdout is None: + wrapped_stdout = None + else: + sys.stdout = wrapped_stdout = \ + wrap_stream(orig_stdout, convert, strip, autoreset, wrap) + if sys.stderr is None: + wrapped_stderr = None + else: + sys.stderr = wrapped_stderr = \ + wrap_stream(orig_stderr, convert, strip, autoreset, wrap) + + global atexit_done + if not atexit_done: + atexit.register(reset_all) + atexit_done = True + + +def deinit(): + if orig_stdout is not None: + sys.stdout = orig_stdout + if orig_stderr is not None: + sys.stderr = orig_stderr + + +@contextlib.contextmanager +def colorama_text(*args, **kwargs): + init(*args, **kwargs) + try: + yield + finally: + deinit() + + +def reinit(): + if wrapped_stdout is not None: + sys.stdout = wrapped_stdout + if wrapped_stderr is not None: + sys.stderr = wrapped_stderr + + +def wrap_stream(stream, convert, strip, autoreset, wrap): + if wrap: + wrapper = AnsiToWin32(stream, + convert=convert, strip=strip, autoreset=autoreset) + if wrapper.should_wrap(): + stream = wrapper.stream + return stream + + diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/win32.py b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/win32.py new file mode 100644 index 0000000..3d1d2f2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/win32.py @@ -0,0 +1,154 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. + +# from winbase.h +STDOUT = -11 +STDERR = -12 + +try: + import ctypes + from ctypes import LibraryLoader + windll = LibraryLoader(ctypes.WinDLL) + from ctypes import wintypes +except (AttributeError, ImportError): + windll = None + SetConsoleTextAttribute = lambda *_: None + winapi_test = lambda *_: None +else: + from ctypes import byref, Structure, c_char, POINTER + + COORD = wintypes._COORD + + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + """struct in wincon.h.""" + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + def __str__(self): + return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( + self.dwSize.Y, self.dwSize.X + , self.dwCursorPosition.Y, self.dwCursorPosition.X + , self.wAttributes + , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right + , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X + ) + + _GetStdHandle = windll.kernel32.GetStdHandle + _GetStdHandle.argtypes = [ + wintypes.DWORD, + ] + _GetStdHandle.restype = wintypes.HANDLE + + _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo + _GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + POINTER(CONSOLE_SCREEN_BUFFER_INFO), + ] + _GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute + _SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + ] + _SetConsoleTextAttribute.restype = wintypes.BOOL + + _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition + _SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + COORD, + ] + _SetConsoleCursorPosition.restype = wintypes.BOOL + + _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA + _FillConsoleOutputCharacterA.argtypes = [ + wintypes.HANDLE, + c_char, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputCharacterA.restype = wintypes.BOOL + + _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute + _FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputAttribute.restype = wintypes.BOOL + + _SetConsoleTitleW = windll.kernel32.SetConsoleTitleA + _SetConsoleTitleW.argtypes = [ + wintypes.LPCSTR + ] + _SetConsoleTitleW.restype = wintypes.BOOL + + handles = { + STDOUT: _GetStdHandle(STDOUT), + STDERR: _GetStdHandle(STDERR), + } + + def winapi_test(): + handle = handles[STDOUT] + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return bool(success) + + def GetConsoleScreenBufferInfo(stream_id=STDOUT): + handle = handles[stream_id] + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return csbi + + def SetConsoleTextAttribute(stream_id, attrs): + handle = handles[stream_id] + return _SetConsoleTextAttribute(handle, attrs) + + def SetConsoleCursorPosition(stream_id, position, adjust=True): + position = COORD(*position) + # If the position is out of range, do nothing. + if position.Y <= 0 or position.X <= 0: + return + # Adjust for Windows' SetConsoleCursorPosition: + # 1. being 0-based, while ANSI is 1-based. + # 2. expecting (x,y), while ANSI uses (y,x). + adjusted_position = COORD(position.Y - 1, position.X - 1) + if adjust: + # Adjust for viewport's scroll position + sr = GetConsoleScreenBufferInfo(STDOUT).srWindow + adjusted_position.Y += sr.Top + adjusted_position.X += sr.Left + # Resume normal processing + handle = handles[stream_id] + return _SetConsoleCursorPosition(handle, adjusted_position) + + def FillConsoleOutputCharacter(stream_id, char, length, start): + handle = handles[stream_id] + char = c_char(char.encode()) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + success = _FillConsoleOutputCharacterA( + handle, char, length, start, byref(num_written)) + return num_written.value + + def FillConsoleOutputAttribute(stream_id, attr, length, start): + ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' + handle = handles[stream_id] + attribute = wintypes.WORD(attr) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + return _FillConsoleOutputAttribute( + handle, attribute, length, start, byref(num_written)) + + def SetConsoleTitle(title): + return _SetConsoleTitleW(title) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/winterm.py b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/winterm.py new file mode 100644 index 0000000..60309d3 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/colorama/winterm.py @@ -0,0 +1,162 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from . import win32 + + +# from wincon.h +class WinColor(object): + BLACK = 0 + BLUE = 1 + GREEN = 2 + CYAN = 3 + RED = 4 + MAGENTA = 5 + YELLOW = 6 + GREY = 7 + +# from wincon.h +class WinStyle(object): + NORMAL = 0x00 # dim text, dim background + BRIGHT = 0x08 # bright text, dim background + BRIGHT_BACKGROUND = 0x80 # dim text, bright background + +class WinTerm(object): + + def __init__(self): + self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes + self.set_attrs(self._default) + self._default_fore = self._fore + self._default_back = self._back + self._default_style = self._style + # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. + # So that LIGHT_EX colors and BRIGHT style do not clobber each other, + # we track them separately, since LIGHT_EX is overwritten by Fore/Back + # and BRIGHT is overwritten by Style codes. + self._light = 0 + + def get_attrs(self): + return self._fore + self._back * 16 + (self._style | self._light) + + def set_attrs(self, value): + self._fore = value & 7 + self._back = (value >> 4) & 7 + self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) + + def reset_all(self, on_stderr=None): + self.set_attrs(self._default) + self.set_console(attrs=self._default) + + def fore(self, fore=None, light=False, on_stderr=False): + if fore is None: + fore = self._default_fore + self._fore = fore + # Emulate LIGHT_EX with BRIGHT Style + if light: + self._light |= WinStyle.BRIGHT + else: + self._light &= ~WinStyle.BRIGHT + self.set_console(on_stderr=on_stderr) + + def back(self, back=None, light=False, on_stderr=False): + if back is None: + back = self._default_back + self._back = back + # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style + if light: + self._light |= WinStyle.BRIGHT_BACKGROUND + else: + self._light &= ~WinStyle.BRIGHT_BACKGROUND + self.set_console(on_stderr=on_stderr) + + def style(self, style=None, on_stderr=False): + if style is None: + style = self._default_style + self._style = style + self.set_console(on_stderr=on_stderr) + + def set_console(self, attrs=None, on_stderr=False): + if attrs is None: + attrs = self.get_attrs() + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleTextAttribute(handle, attrs) + + def get_position(self, handle): + position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition + # Because Windows coordinates are 0-based, + # and win32.SetConsoleCursorPosition expects 1-based. + position.X += 1 + position.Y += 1 + return position + + def set_cursor_position(self, position=None, on_stderr=False): + if position is None: + # I'm not currently tracking the position, so there is no default. + # position = self.get_position() + return + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleCursorPosition(handle, position) + + def cursor_adjust(self, x, y, on_stderr=False): + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + position = self.get_position(handle) + adjusted_position = (position.Y + y, position.X + x) + win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) + + def erase_screen(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the screen. + # 1 should clear from the cursor to the beginning of the screen. + # 2 should clear the entire screen, and move cursor to (1,1) + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + # get the number of character cells in the current buffer + cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y + # get number of character cells before current cursor position + cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = cells_in_screen - cells_before_cursor + if mode == 1: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_before_cursor + elif mode == 2: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_in_screen + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + if mode == 2: + # put the cursor where needed + win32.SetConsoleCursorPosition(handle, (1, 1)) + + def erase_line(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the line. + # 1 should clear from the cursor to the beginning of the line. + # 2 should clear the entire line. + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X + if mode == 1: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwCursorPosition.X + elif mode == 2: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwSize.X + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + + def set_title(self, title): + win32.SetConsoleTitle(title) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/__init__.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/__init__.py new file mode 100644 index 0000000..d186b0a --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/__init__.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2016 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import logging + +__version__ = '0.2.4' + +class DistlibException(Exception): + pass + +try: + from logging import NullHandler +except ImportError: # pragma: no cover + class NullHandler(logging.Handler): + def handle(self, record): pass + def emit(self, record): pass + def createLock(self): self.lock = None + +logger = logging.getLogger(__name__) +logger.addHandler(NullHandler()) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/__init__.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/__init__.py new file mode 100644 index 0000000..f7dbf4c --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/__init__.py @@ -0,0 +1,6 @@ +"""Modules copied from Python 3 standard libraries, for internal use only. + +Individual classes and functions are found in d2._backport.misc. Intended +usage is to always import things missing from 3.1 from that module: the +built-in/stdlib objects will be used if found. +""" diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/misc.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/misc.py new file mode 100644 index 0000000..cfb318d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/misc.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Backports for individual classes and functions.""" + +import os +import sys + +__all__ = ['cache_from_source', 'callable', 'fsencode'] + + +try: + from imp import cache_from_source +except ImportError: + def cache_from_source(py_file, debug=__debug__): + ext = debug and 'c' or 'o' + return py_file + ext + + +try: + callable = callable +except NameError: + from collections import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode +except AttributeError: + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, str): + return filename.encode(sys.getfilesystemencoding()) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/shutil.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/shutil.py new file mode 100644 index 0000000..159e49e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/shutil.py @@ -0,0 +1,761 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Utility functions for copying and archiving files and directory trees. + +XXX The functions here don't copy the resource fork or other metadata on Mac. + +""" + +import os +import sys +import stat +from os.path import abspath +import fnmatch +import collections +import errno +from . import tarfile + +try: + import bz2 + _BZ2_SUPPORTED = True +except ImportError: + _BZ2_SUPPORTED = False + +try: + from pwd import getpwnam +except ImportError: + getpwnam = None + +try: + from grp import getgrnam +except ImportError: + getgrnam = None + +__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", + "copytree", "move", "rmtree", "Error", "SpecialFileError", + "ExecError", "make_archive", "get_archive_formats", + "register_archive_format", "unregister_archive_format", + "get_unpack_formats", "register_unpack_format", + "unregister_unpack_format", "unpack_archive", "ignore_patterns"] + +class Error(EnvironmentError): + pass + +class SpecialFileError(EnvironmentError): + """Raised when trying to do a kind of operation (e.g. copying) which is + not supported on a special file (e.g. a named pipe)""" + +class ExecError(EnvironmentError): + """Raised when a command could not be executed""" + +class ReadError(EnvironmentError): + """Raised when an archive cannot be read""" + +class RegistryError(Exception): + """Raised when a registry operation with the archiving + and unpacking registries fails""" + + +try: + WindowsError +except NameError: + WindowsError = None + +def copyfileobj(fsrc, fdst, length=16*1024): + """copy data from file-like object fsrc to file-like object fdst""" + while 1: + buf = fsrc.read(length) + if not buf: + break + fdst.write(buf) + +def _samefile(src, dst): + # Macintosh, Unix. + if hasattr(os.path, 'samefile'): + try: + return os.path.samefile(src, dst) + except OSError: + return False + + # All other platforms: check for same pathname. + return (os.path.normcase(os.path.abspath(src)) == + os.path.normcase(os.path.abspath(dst))) + +def copyfile(src, dst): + """Copy data from src to dst""" + if _samefile(src, dst): + raise Error("`%s` and `%s` are the same file" % (src, dst)) + + for fn in [src, dst]: + try: + st = os.stat(fn) + except OSError: + # File most likely does not exist + pass + else: + # XXX What about other special files? (sockets, devices...) + if stat.S_ISFIFO(st.st_mode): + raise SpecialFileError("`%s` is a named pipe" % fn) + + with open(src, 'rb') as fsrc: + with open(dst, 'wb') as fdst: + copyfileobj(fsrc, fdst) + +def copymode(src, dst): + """Copy mode bits from src to dst""" + if hasattr(os, 'chmod'): + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + os.chmod(dst, mode) + +def copystat(src, dst): + """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + if hasattr(os, 'utime'): + os.utime(dst, (st.st_atime, st.st_mtime)) + if hasattr(os, 'chmod'): + os.chmod(dst, mode) + if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): + try: + os.chflags(dst, st.st_flags) + except OSError as why: + if (not hasattr(errno, 'EOPNOTSUPP') or + why.errno != errno.EOPNOTSUPP): + raise + +def copy(src, dst): + """Copy data and mode bits ("cp src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copymode(src, dst) + +def copy2(src, dst): + """Copy data and all stat info ("cp -p src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copystat(src, dst) + +def ignore_patterns(*patterns): + """Function that can be used as copytree() ignore parameter. + + Patterns is a sequence of glob-style patterns + that are used to exclude files""" + def _ignore_patterns(path, names): + ignored_names = [] + for pattern in patterns: + ignored_names.extend(fnmatch.filter(names, pattern)) + return set(ignored_names) + return _ignore_patterns + +def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, + ignore_dangling_symlinks=False): + """Recursively copy a directory tree. + + The destination directory must not already exist. + If exception(s) occur, an Error is raised with a list of reasons. + + If the optional symlinks flag is true, symbolic links in the + source tree result in symbolic links in the destination tree; if + it is false, the contents of the files pointed to by symbolic + links are copied. If the file pointed by the symlink doesn't + exist, an exception will be added in the list of errors raised in + an Error exception at the end of the copy process. + + You can set the optional ignore_dangling_symlinks flag to true if you + want to silence this exception. Notice that this has no effect on + platforms that don't support os.symlink. + + The optional ignore argument is a callable. If given, it + is called with the `src` parameter, which is the directory + being visited by copytree(), and `names` which is the list of + `src` contents, as returned by os.listdir(): + + callable(src, names) -> ignored_names + + Since copytree() is called recursively, the callable will be + called once for each directory that is copied. It returns a + list of names relative to the `src` directory that should + not be copied. + + The optional copy_function argument is a callable that will be used + to copy each file. It will be called with the source path and the + destination path as arguments. By default, copy2() is used, but any + function that supports the same signature (like copy()) can be used. + + """ + names = os.listdir(src) + if ignore is not None: + ignored_names = ignore(src, names) + else: + ignored_names = set() + + os.makedirs(dst) + errors = [] + for name in names: + if name in ignored_names: + continue + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + try: + if os.path.islink(srcname): + linkto = os.readlink(srcname) + if symlinks: + os.symlink(linkto, dstname) + else: + # ignore dangling symlink if the flag is on + if not os.path.exists(linkto) and ignore_dangling_symlinks: + continue + # otherwise let the copy occurs. copy2 will raise an error + copy_function(srcname, dstname) + elif os.path.isdir(srcname): + copytree(srcname, dstname, symlinks, ignore, copy_function) + else: + # Will raise a SpecialFileError for unsupported file types + copy_function(srcname, dstname) + # catch the Error from the recursive copytree so that we can + # continue with other files + except Error as err: + errors.extend(err.args[0]) + except EnvironmentError as why: + errors.append((srcname, dstname, str(why))) + try: + copystat(src, dst) + except OSError as why: + if WindowsError is not None and isinstance(why, WindowsError): + # Copying file access times may fail on Windows + pass + else: + errors.extend((src, dst, str(why))) + if errors: + raise Error(errors) + +def rmtree(path, ignore_errors=False, onerror=None): + """Recursively delete a directory tree. + + If ignore_errors is set, errors are ignored; otherwise, if onerror + is set, it is called to handle the error with arguments (func, + path, exc_info) where func is os.listdir, os.remove, or os.rmdir; + path is the argument to that function that caused it to fail; and + exc_info is a tuple returned by sys.exc_info(). If ignore_errors + is false and onerror is None, an exception is raised. + + """ + if ignore_errors: + def onerror(*args): + pass + elif onerror is None: + def onerror(*args): + raise + try: + if os.path.islink(path): + # symlinks to directories are forbidden, see bug #1669 + raise OSError("Cannot call rmtree on a symbolic link") + except OSError: + onerror(os.path.islink, path, sys.exc_info()) + # can't continue even if onerror hook returns + return + names = [] + try: + names = os.listdir(path) + except os.error: + onerror(os.listdir, path, sys.exc_info()) + for name in names: + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except os.error: + mode = 0 + if stat.S_ISDIR(mode): + rmtree(fullname, ignore_errors, onerror) + else: + try: + os.remove(fullname) + except os.error: + onerror(os.remove, fullname, sys.exc_info()) + try: + os.rmdir(path) + except os.error: + onerror(os.rmdir, path, sys.exc_info()) + + +def _basename(path): + # A basename() variant which first strips the trailing slash, if present. + # Thus we always get the last component of the path, even for directories. + return os.path.basename(path.rstrip(os.path.sep)) + +def move(src, dst): + """Recursively move a file or directory to another location. This is + similar to the Unix "mv" command. + + If the destination is a directory or a symlink to a directory, the source + is moved inside the directory. The destination path must not already + exist. + + If the destination already exists but is not a directory, it may be + overwritten depending on os.rename() semantics. + + If the destination is on our current filesystem, then rename() is used. + Otherwise, src is copied to the destination and then removed. + A lot more could be done here... A look at a mv.c shows a lot of + the issues this implementation glosses over. + + """ + real_dst = dst + if os.path.isdir(dst): + if _samefile(src, dst): + # We might be on a case insensitive filesystem, + # perform the rename anyway. + os.rename(src, dst) + return + + real_dst = os.path.join(dst, _basename(src)) + if os.path.exists(real_dst): + raise Error("Destination path '%s' already exists" % real_dst) + try: + os.rename(src, real_dst) + except OSError: + if os.path.isdir(src): + if _destinsrc(src, dst): + raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) + copytree(src, real_dst, symlinks=True) + rmtree(src) + else: + copy2(src, real_dst) + os.unlink(src) + +def _destinsrc(src, dst): + src = abspath(src) + dst = abspath(dst) + if not src.endswith(os.path.sep): + src += os.path.sep + if not dst.endswith(os.path.sep): + dst += os.path.sep + return dst.startswith(src) + +def _get_gid(name): + """Returns a gid, given a group name.""" + if getgrnam is None or name is None: + return None + try: + result = getgrnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _get_uid(name): + """Returns an uid, given a user name.""" + if getpwnam is None or name is None: + return None + try: + result = getpwnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, + owner=None, group=None, logger=None): + """Create a (possibly compressed) tar file from all the files under + 'base_dir'. + + 'compress' must be "gzip" (the default), "bzip2", or None. + + 'owner' and 'group' can be used to define an owner and a group for the + archive that is being built. If not provided, the current owner and group + will be used. + + The output tar file will be named 'base_name' + ".tar", possibly plus + the appropriate compression extension (".gz", or ".bz2"). + + Returns the output filename. + """ + tar_compression = {'gzip': 'gz', None: ''} + compress_ext = {'gzip': '.gz'} + + if _BZ2_SUPPORTED: + tar_compression['bzip2'] = 'bz2' + compress_ext['bzip2'] = '.bz2' + + # flags for compression program, each element of list will be an argument + if compress is not None and compress not in compress_ext: + raise ValueError("bad value for 'compress', or compression format not " + "supported : {0}".format(compress)) + + archive_name = base_name + '.tar' + compress_ext.get(compress, '') + archive_dir = os.path.dirname(archive_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # creating the tarball + if logger is not None: + logger.info('Creating tar archive') + + uid = _get_uid(owner) + gid = _get_gid(group) + + def _set_uid_gid(tarinfo): + if gid is not None: + tarinfo.gid = gid + tarinfo.gname = group + if uid is not None: + tarinfo.uid = uid + tarinfo.uname = owner + return tarinfo + + if not dry_run: + tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) + try: + tar.add(base_dir, filter=_set_uid_gid) + finally: + tar.close() + + return archive_name + +def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): + # XXX see if we want to keep an external call here + if verbose: + zipoptions = "-r" + else: + zipoptions = "-rq" + from distutils.errors import DistutilsExecError + from distutils.spawn import spawn + try: + spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) + except DistutilsExecError: + # XXX really should distinguish between "couldn't find + # external 'zip' command" and "zip failed". + raise ExecError("unable to create zip file '%s': " + "could neither import the 'zipfile' module nor " + "find a standalone zip utility") % zip_filename + +def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): + """Create a zip file from all the files under 'base_dir'. + + The output zip file will be named 'base_name' + ".zip". Uses either the + "zipfile" Python module (if available) or the InfoZIP "zip" utility + (if installed and found on the default search path). If neither tool is + available, raises ExecError. Returns the name of the output zip + file. + """ + zip_filename = base_name + ".zip" + archive_dir = os.path.dirname(base_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # If zipfile module is not available, try spawning an external 'zip' + # command. + try: + import zipfile + except ImportError: + zipfile = None + + if zipfile is None: + _call_external_zip(base_dir, zip_filename, verbose, dry_run) + else: + if logger is not None: + logger.info("creating '%s' and adding '%s' to it", + zip_filename, base_dir) + + if not dry_run: + zip = zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_DEFLATED) + + for dirpath, dirnames, filenames in os.walk(base_dir): + for name in filenames: + path = os.path.normpath(os.path.join(dirpath, name)) + if os.path.isfile(path): + zip.write(path, path) + if logger is not None: + logger.info("adding '%s'", path) + zip.close() + + return zip_filename + +_ARCHIVE_FORMATS = { + 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), + 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), + 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), + 'zip': (_make_zipfile, [], "ZIP file"), + } + +if _BZ2_SUPPORTED: + _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], + "bzip2'ed tar-file") + +def get_archive_formats(): + """Returns a list of supported formats for archiving and unarchiving. + + Each element of the returned sequence is a tuple (name, description) + """ + formats = [(name, registry[2]) for name, registry in + _ARCHIVE_FORMATS.items()] + formats.sort() + return formats + +def register_archive_format(name, function, extra_args=None, description=''): + """Registers an archive format. + + name is the name of the format. function is the callable that will be + used to create archives. If provided, extra_args is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_archive_formats() function. + """ + if extra_args is None: + extra_args = [] + if not isinstance(function, collections.Callable): + raise TypeError('The %s object is not callable' % function) + if not isinstance(extra_args, (tuple, list)): + raise TypeError('extra_args needs to be a sequence') + for element in extra_args: + if not isinstance(element, (tuple, list)) or len(element) !=2: + raise TypeError('extra_args elements are : (arg_name, value)') + + _ARCHIVE_FORMATS[name] = (function, extra_args, description) + +def unregister_archive_format(name): + del _ARCHIVE_FORMATS[name] + +def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, + dry_run=0, owner=None, group=None, logger=None): + """Create an archive file (eg. zip or tar). + + 'base_name' is the name of the file to create, minus any format-specific + extension; 'format' is the archive format: one of "zip", "tar", "bztar" + or "gztar". + + 'root_dir' is a directory that will be the root directory of the + archive; ie. we typically chdir into 'root_dir' before creating the + archive. 'base_dir' is the directory where we start archiving from; + ie. 'base_dir' will be the common prefix of all files and + directories in the archive. 'root_dir' and 'base_dir' both default + to the current directory. Returns the name of the archive file. + + 'owner' and 'group' are used when creating a tar archive. By default, + uses the current owner and group. + """ + save_cwd = os.getcwd() + if root_dir is not None: + if logger is not None: + logger.debug("changing into '%s'", root_dir) + base_name = os.path.abspath(base_name) + if not dry_run: + os.chdir(root_dir) + + if base_dir is None: + base_dir = os.curdir + + kwargs = {'dry_run': dry_run, 'logger': logger} + + try: + format_info = _ARCHIVE_FORMATS[format] + except KeyError: + raise ValueError("unknown archive format '%s'" % format) + + func = format_info[0] + for arg, val in format_info[1]: + kwargs[arg] = val + + if format != 'zip': + kwargs['owner'] = owner + kwargs['group'] = group + + try: + filename = func(base_name, base_dir, **kwargs) + finally: + if root_dir is not None: + if logger is not None: + logger.debug("changing back to '%s'", save_cwd) + os.chdir(save_cwd) + + return filename + + +def get_unpack_formats(): + """Returns a list of supported formats for unpacking. + + Each element of the returned sequence is a tuple + (name, extensions, description) + """ + formats = [(name, info[0], info[3]) for name, info in + _UNPACK_FORMATS.items()] + formats.sort() + return formats + +def _check_unpack_options(extensions, function, extra_args): + """Checks what gets registered as an unpacker.""" + # first make sure no other unpacker is registered for this extension + existing_extensions = {} + for name, info in _UNPACK_FORMATS.items(): + for ext in info[0]: + existing_extensions[ext] = name + + for extension in extensions: + if extension in existing_extensions: + msg = '%s is already registered for "%s"' + raise RegistryError(msg % (extension, + existing_extensions[extension])) + + if not isinstance(function, collections.Callable): + raise TypeError('The registered function must be a callable') + + +def register_unpack_format(name, extensions, function, extra_args=None, + description=''): + """Registers an unpack format. + + `name` is the name of the format. `extensions` is a list of extensions + corresponding to the format. + + `function` is the callable that will be + used to unpack archives. The callable will receive archives to unpack. + If it's unable to handle an archive, it needs to raise a ReadError + exception. + + If provided, `extra_args` is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_unpack_formats() function. + """ + if extra_args is None: + extra_args = [] + _check_unpack_options(extensions, function, extra_args) + _UNPACK_FORMATS[name] = extensions, function, extra_args, description + +def unregister_unpack_format(name): + """Removes the pack format from the registry.""" + del _UNPACK_FORMATS[name] + +def _ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + +def _unpack_zipfile(filename, extract_dir): + """Unpack zip `filename` to `extract_dir` + """ + try: + import zipfile + except ImportError: + raise ReadError('zlib not supported, cannot unpack this archive.') + + if not zipfile.is_zipfile(filename): + raise ReadError("%s is not a zip file" % filename) + + zip = zipfile.ZipFile(filename) + try: + for info in zip.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name: + continue + + target = os.path.join(extract_dir, *name.split('/')) + if not target: + continue + + _ensure_directory(target) + if not name.endswith('/'): + # file + data = zip.read(info.filename) + f = open(target, 'wb') + try: + f.write(data) + finally: + f.close() + del data + finally: + zip.close() + +def _unpack_tarfile(filename, extract_dir): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise ReadError( + "%s is not a compressed or uncompressed tar file" % filename) + try: + tarobj.extractall(extract_dir) + finally: + tarobj.close() + +_UNPACK_FORMATS = { + 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), + 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), + 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") + } + +if _BZ2_SUPPORTED: + _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], + "bzip2'ed tar-file") + +def _find_unpack_format(filename): + for name, info in _UNPACK_FORMATS.items(): + for extension in info[0]: + if filename.endswith(extension): + return name + return None + +def unpack_archive(filename, extract_dir=None, format=None): + """Unpack an archive. + + `filename` is the name of the archive. + + `extract_dir` is the name of the target directory, where the archive + is unpacked. If not provided, the current working directory is used. + + `format` is the archive format: one of "zip", "tar", or "gztar". Or any + other registered format. If not provided, unpack_archive will use the + filename extension and see if an unpacker was registered for that + extension. + + In case none is found, a ValueError is raised. + """ + if extract_dir is None: + extract_dir = os.getcwd() + + if format is not None: + try: + format_info = _UNPACK_FORMATS[format] + except KeyError: + raise ValueError("Unknown unpack format '{0}'".format(format)) + + func = format_info[1] + func(filename, extract_dir, **dict(format_info[2])) + else: + # we need to look at the registered unpackers supported extensions + format = _find_unpack_format(filename) + if format is None: + raise ReadError("Unknown archive format '{0}'".format(filename)) + + func = _UNPACK_FORMATS[format][1] + kwargs = dict(_UNPACK_FORMATS[format][2]) + func(filename, extract_dir, **kwargs) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/sysconfig.cfg b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/sysconfig.cfg new file mode 100644 index 0000000..1746bd0 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/sysconfig.cfg @@ -0,0 +1,84 @@ +[posix_prefix] +# Configuration directories. Some of these come straight out of the +# configure script. They are for implementing the other variables, not to +# be used directly in [resource_locations]. +confdir = /etc +datadir = /usr/share +libdir = /usr/lib +statedir = /var +# User resource directory +local = ~/.local/{distribution.name} + +stdlib = {base}/lib/python{py_version_short} +platstdlib = {platbase}/lib/python{py_version_short} +purelib = {base}/lib/python{py_version_short}/site-packages +platlib = {platbase}/lib/python{py_version_short}/site-packages +include = {base}/include/python{py_version_short}{abiflags} +platinclude = {platbase}/include/python{py_version_short}{abiflags} +data = {base} + +[posix_home] +stdlib = {base}/lib/python +platstdlib = {base}/lib/python +purelib = {base}/lib/python +platlib = {base}/lib/python +include = {base}/include/python +platinclude = {base}/include/python +scripts = {base}/bin +data = {base} + +[nt] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2_home] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[nt_user] +stdlib = {userbase}/Python{py_version_nodot} +platstdlib = {userbase}/Python{py_version_nodot} +purelib = {userbase}/Python{py_version_nodot}/site-packages +platlib = {userbase}/Python{py_version_nodot}/site-packages +include = {userbase}/Python{py_version_nodot}/Include +scripts = {userbase}/Scripts +data = {userbase} + +[posix_user] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[osx_framework_user] +stdlib = {userbase}/lib/python +platstdlib = {userbase}/lib/python +purelib = {userbase}/lib/python/site-packages +platlib = {userbase}/lib/python/site-packages +include = {userbase}/include +scripts = {userbase}/bin +data = {userbase} diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/sysconfig.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/sysconfig.py new file mode 100644 index 0000000..ec28480 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/sysconfig.py @@ -0,0 +1,788 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Access to Python's configuration information.""" + +import codecs +import os +import re +import sys +from os.path import pardir, realpath +try: + import configparser +except ImportError: + import ConfigParser as configparser + + +__all__ = [ + 'get_config_h_filename', + 'get_config_var', + 'get_config_vars', + 'get_makefile_filename', + 'get_path', + 'get_path_names', + 'get_paths', + 'get_platform', + 'get_python_version', + 'get_scheme_names', + 'parse_config_h', +] + + +def _safe_realpath(path): + try: + return realpath(path) + except OSError: + return path + + +if sys.executable: + _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) +else: + # sys.executable can be empty if argv[0] has been changed and Python is + # unable to retrieve the real program name + _PROJECT_BASE = _safe_realpath(os.getcwd()) + +if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) +# PC/VS7.1 +if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) +# PC/AMD64 +if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) + + +def is_python_build(): + for fn in ("Setup.dist", "Setup.local"): + if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): + return True + return False + +_PYTHON_BUILD = is_python_build() + +_cfg_read = False + +def _ensure_cfg_read(): + global _cfg_read + if not _cfg_read: + from ..resources import finder + backport_package = __name__.rsplit('.', 1)[0] + _finder = finder(backport_package) + _cfgfile = _finder.find('sysconfig.cfg') + assert _cfgfile, 'sysconfig.cfg exists' + with _cfgfile.as_stream() as s: + _SCHEMES.readfp(s) + if _PYTHON_BUILD: + for scheme in ('posix_prefix', 'posix_home'): + _SCHEMES.set(scheme, 'include', '{srcdir}/Include') + _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') + + _cfg_read = True + + +_SCHEMES = configparser.RawConfigParser() +_VAR_REPL = re.compile(r'\{([^{]*?)\}') + +def _expand_globals(config): + _ensure_cfg_read() + if config.has_section('globals'): + globals = config.items('globals') + else: + globals = tuple() + + sections = config.sections() + for section in sections: + if section == 'globals': + continue + for option, value in globals: + if config.has_option(section, option): + continue + config.set(section, option, value) + config.remove_section('globals') + + # now expanding local variables defined in the cfg file + # + for section in config.sections(): + variables = dict(config.items(section)) + + def _replacer(matchobj): + name = matchobj.group(1) + if name in variables: + return variables[name] + return matchobj.group(0) + + for option, value in config.items(section): + config.set(section, option, _VAR_REPL.sub(_replacer, value)) + +#_expand_globals(_SCHEMES) + + # FIXME don't rely on sys.version here, its format is an implementation detail + # of CPython, use sys.version_info or sys.hexversion +_PY_VERSION = sys.version.split()[0] +_PY_VERSION_SHORT = sys.version[:3] +_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2] +_PREFIX = os.path.normpath(sys.prefix) +_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) +_CONFIG_VARS = None +_USER_BASE = None + + +def _subst_vars(path, local_vars): + """In the string `path`, replace tokens like {some.thing} with the + corresponding value from the map `local_vars`. + + If there is no corresponding value, leave the token unchanged. + """ + def _replacer(matchobj): + name = matchobj.group(1) + if name in local_vars: + return local_vars[name] + elif name in os.environ: + return os.environ[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, path) + + +def _extend_dict(target_dict, other_dict): + target_keys = target_dict.keys() + for key, value in other_dict.items(): + if key in target_keys: + continue + target_dict[key] = value + + +def _expand_vars(scheme, vars): + res = {} + if vars is None: + vars = {} + _extend_dict(vars, get_config_vars()) + + for key, value in _SCHEMES.items(scheme): + if os.name in ('posix', 'nt'): + value = os.path.expanduser(value) + res[key] = os.path.normpath(_subst_vars(value, vars)) + return res + + +def format_value(value, vars): + def _replacer(matchobj): + name = matchobj.group(1) + if name in vars: + return vars[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, value) + + +def _get_default_scheme(): + if os.name == 'posix': + # the default scheme for posix is posix_prefix + return 'posix_prefix' + return os.name + + +def _getuserbase(): + env_base = os.environ.get("PYTHONUSERBASE", None) + + def joinuser(*args): + return os.path.expanduser(os.path.join(*args)) + + # what about 'os2emx', 'riscos' ? + if os.name == "nt": + base = os.environ.get("APPDATA") or "~" + if env_base: + return env_base + else: + return joinuser(base, "Python") + + if sys.platform == "darwin": + framework = get_config_var("PYTHONFRAMEWORK") + if framework: + if env_base: + return env_base + else: + return joinuser("~", "Library", framework, "%d.%d" % + sys.version_info[:2]) + + if env_base: + return env_base + else: + return joinuser("~", ".local") + + +def _parse_makefile(filename, vars=None): + """Parse a Makefile-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + # Regexes needed for parsing Makefile (and similar syntaxes, + # like old-style Setup files). + _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") + _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") + _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") + + if vars is None: + vars = {} + done = {} + notdone = {} + + with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: + lines = f.readlines() + + for line in lines: + if line.startswith('#') or line.strip() == '': + continue + m = _variable_rx.match(line) + if m: + n, v = m.group(1, 2) + v = v.strip() + # `$$' is a literal `$' in make + tmpv = v.replace('$$', '') + + if "$" in tmpv: + notdone[n] = v + else: + try: + v = int(v) + except ValueError: + # insert literal `$' + done[n] = v.replace('$$', '$') + else: + done[n] = v + + # do variable interpolation here + variables = list(notdone.keys()) + + # Variables with a 'PY_' prefix in the makefile. These need to + # be made available without that prefix through sysconfig. + # Special care is needed to ensure that variable expansion works, even + # if the expansion uses the name without a prefix. + renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') + + while len(variables) > 0: + for name in tuple(variables): + value = notdone[name] + m = _findvar1_rx.search(value) or _findvar2_rx.search(value) + if m is not None: + n = m.group(1) + found = True + if n in done: + item = str(done[n]) + elif n in notdone: + # get it on a subsequent round + found = False + elif n in os.environ: + # do it like make: fall back to environment + item = os.environ[n] + + elif n in renamed_variables: + if (name.startswith('PY_') and + name[3:] in renamed_variables): + item = "" + + elif 'PY_' + n in notdone: + found = False + + else: + item = str(done['PY_' + n]) + + else: + done[n] = item = "" + + if found: + after = value[m.end():] + value = value[:m.start()] + item + after + if "$" in after: + notdone[name] = value + else: + try: + value = int(value) + except ValueError: + done[name] = value.strip() + else: + done[name] = value + variables.remove(name) + + if (name.startswith('PY_') and + name[3:] in renamed_variables): + + name = name[3:] + if name not in done: + done[name] = value + + else: + # bogus variable reference (e.g. "prefix=$/opt/python"); + # just drop it since we can't deal + done[name] = value + variables.remove(name) + + # strip spurious spaces + for k, v in done.items(): + if isinstance(v, str): + done[k] = v.strip() + + # save the results in the global dictionary + vars.update(done) + return vars + + +def get_makefile_filename(): + """Return the path of the Makefile.""" + if _PYTHON_BUILD: + return os.path.join(_PROJECT_BASE, "Makefile") + if hasattr(sys, 'abiflags'): + config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) + else: + config_dir_name = 'config' + return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') + + +def _init_posix(vars): + """Initialize the module as appropriate for POSIX systems.""" + # load the installed Makefile: + makefile = get_makefile_filename() + try: + _parse_makefile(makefile, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % makefile + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # load the installed pyconfig.h: + config_h = get_config_h_filename() + try: + with open(config_h) as f: + parse_config_h(f, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % config_h + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # On AIX, there are wrong paths to the linker scripts in the Makefile + # -- these paths are relative to the Python source, but when installed + # the scripts are in another directory. + if _PYTHON_BUILD: + vars['LDSHARED'] = vars['BLDSHARED'] + + +def _init_non_posix(vars): + """Initialize the module as appropriate for NT""" + # set basic install directories + vars['LIBDEST'] = get_path('stdlib') + vars['BINLIBDEST'] = get_path('platstdlib') + vars['INCLUDEPY'] = get_path('include') + vars['SO'] = '.pyd' + vars['EXE'] = '.exe' + vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT + vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) + +# +# public APIs +# + + +def parse_config_h(fp, vars=None): + """Parse a config.h-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + if vars is None: + vars = {} + define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") + undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") + + while True: + line = fp.readline() + if not line: + break + m = define_rx.match(line) + if m: + n, v = m.group(1, 2) + try: + v = int(v) + except ValueError: + pass + vars[n] = v + else: + m = undef_rx.match(line) + if m: + vars[m.group(1)] = 0 + return vars + + +def get_config_h_filename(): + """Return the path of pyconfig.h.""" + if _PYTHON_BUILD: + if os.name == "nt": + inc_dir = os.path.join(_PROJECT_BASE, "PC") + else: + inc_dir = _PROJECT_BASE + else: + inc_dir = get_path('platinclude') + return os.path.join(inc_dir, 'pyconfig.h') + + +def get_scheme_names(): + """Return a tuple containing the schemes names.""" + return tuple(sorted(_SCHEMES.sections())) + + +def get_path_names(): + """Return a tuple containing the paths names.""" + # xxx see if we want a static list + return _SCHEMES.options('posix_prefix') + + +def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): + """Return a mapping containing an install scheme. + + ``scheme`` is the install scheme name. If not provided, it will + return the default scheme for the current platform. + """ + _ensure_cfg_read() + if expand: + return _expand_vars(scheme, vars) + else: + return dict(_SCHEMES.items(scheme)) + + +def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): + """Return a path corresponding to the scheme. + + ``scheme`` is the install scheme name. + """ + return get_paths(scheme, vars, expand)[name] + + +def get_config_vars(*args): + """With no arguments, return a dictionary of all configuration + variables relevant for the current platform. + + On Unix, this means every variable defined in Python's installed Makefile; + On Windows and Mac OS it's a much smaller set. + + With arguments, return a list of values that result from looking up + each argument in the configuration variable dictionary. + """ + global _CONFIG_VARS + if _CONFIG_VARS is None: + _CONFIG_VARS = {} + # Normalized versions of prefix and exec_prefix are handy to have; + # in fact, these are the standard versions used most places in the + # distutils2 module. + _CONFIG_VARS['prefix'] = _PREFIX + _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX + _CONFIG_VARS['py_version'] = _PY_VERSION + _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT + _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] + _CONFIG_VARS['base'] = _PREFIX + _CONFIG_VARS['platbase'] = _EXEC_PREFIX + _CONFIG_VARS['projectbase'] = _PROJECT_BASE + try: + _CONFIG_VARS['abiflags'] = sys.abiflags + except AttributeError: + # sys.abiflags may not be defined on all platforms. + _CONFIG_VARS['abiflags'] = '' + + if os.name in ('nt', 'os2'): + _init_non_posix(_CONFIG_VARS) + if os.name == 'posix': + _init_posix(_CONFIG_VARS) + # Setting 'userbase' is done below the call to the + # init function to enable using 'get_config_var' in + # the init-function. + if sys.version >= '2.6': + _CONFIG_VARS['userbase'] = _getuserbase() + + if 'srcdir' not in _CONFIG_VARS: + _CONFIG_VARS['srcdir'] = _PROJECT_BASE + else: + _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) + + # Convert srcdir into an absolute path if it appears necessary. + # Normally it is relative to the build directory. However, during + # testing, for example, we might be running a non-installed python + # from a different directory. + if _PYTHON_BUILD and os.name == "posix": + base = _PROJECT_BASE + try: + cwd = os.getcwd() + except OSError: + cwd = None + if (not os.path.isabs(_CONFIG_VARS['srcdir']) and + base != cwd): + # srcdir is relative and we are not in the same directory + # as the executable. Assume executable is in the build + # directory and make srcdir absolute. + srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) + _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) + + if sys.platform == 'darwin': + kernel_version = os.uname()[2] # Kernel version (8.4.3) + major_version = int(kernel_version.split('.')[0]) + + if major_version < 8: + # On macOS before 10.4, check if -arch and -isysroot + # are in CFLAGS or LDFLAGS and remove them if they are. + # This is needed when building extensions on a 10.3 system + # using a universal build of python. + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + flags = _CONFIG_VARS[key] + flags = re.sub('-arch\s+\w+\s', ' ', flags) + flags = re.sub('-isysroot [^ \t]*', ' ', flags) + _CONFIG_VARS[key] = flags + else: + # Allow the user to override the architecture flags using + # an environment variable. + # NOTE: This name was introduced by Apple in OSX 10.5 and + # is used by several scripting languages distributed with + # that OS release. + if 'ARCHFLAGS' in os.environ: + arch = os.environ['ARCHFLAGS'] + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub('-arch\s+\w+\s', ' ', flags) + flags = flags + ' ' + arch + _CONFIG_VARS[key] = flags + + # If we're on OSX 10.5 or later and the user tries to + # compiles an extension using an SDK that is not present + # on the current machine it is better to not use an SDK + # than to fail. + # + # The major usecase for this is users using a Python.org + # binary installer on OSX 10.6: that installer uses + # the 10.4u SDK, but that SDK is not installed by default + # when you install Xcode. + # + CFLAGS = _CONFIG_VARS.get('CFLAGS', '') + m = re.search('-isysroot\s+(\S+)', CFLAGS) + if m is not None: + sdk = m.group(1) + if not os.path.exists(sdk): + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags) + _CONFIG_VARS[key] = flags + + if args: + vals = [] + for name in args: + vals.append(_CONFIG_VARS.get(name)) + return vals + else: + return _CONFIG_VARS + + +def get_config_var(name): + """Return the value of a single variable using the dictionary returned by + 'get_config_vars()'. + + Equivalent to get_config_vars().get(name) + """ + return get_config_vars().get(name) + + +def get_platform(): + """Return a string that identifies the current platform. + + This is used mainly to distinguish platform-specific build directories and + platform-specific built distributions. Typically includes the OS name + and version and the architecture (as supplied by 'os.uname()'), + although the exact information included depends on the OS; eg. for IRIX + the architecture isn't particularly important (IRIX only runs on SGI + hardware), but for Linux the kernel version isn't particularly + important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + irix-5.3 + irix64-6.2 + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win-ia64 (64bit Windows on Itanium) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + """ + if os.name == 'nt': + # sniff sys.version for architecture. + prefix = " bit (" + i = sys.version.find(prefix) + if i == -1: + return sys.platform + j = sys.version.find(")", i) + look = sys.version[i+len(prefix):j].lower() + if look == 'amd64': + return 'win-amd64' + if look == 'itanium': + return 'win-ia64' + return sys.platform + + if os.name != "posix" or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + osname, host, release, version, machine = os.uname() + + # Convert the OS name to lowercase, remove '/' characters + # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_') + machine = machine.replace('/', '-') + + if osname[:5] == "linux": + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + elif osname[:5] == "sunos": + if release[0] >= "5": # SunOS 5 == Solaris 2 + osname = "solaris" + release = "%d.%s" % (int(release[0]) - 3, release[2:]) + # fall through to standard osname-release-machine representation + elif osname[:4] == "irix": # could be "irix64"! + return "%s-%s" % (osname, release) + elif osname[:3] == "aix": + return "%s-%s.%s" % (osname, version, release) + elif osname[:6] == "cygwin": + osname = "cygwin" + rel_re = re.compile(r'[\d.]+') + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == "darwin": + # + # For our purposes, we'll assume that the system version from + # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set + # to. This makes the compatibility story a bit more sane because the + # machine is going to compile and link as if it were + # MACOSX_DEPLOYMENT_TARGET. + cfgvars = get_config_vars() + macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') + + if True: + # Always calculate the release of the running machine, + # needed to determine if we can build fat binaries or not. + + macrelease = macver + # Get the system version. Reading this plist is a documented + # way to get the system version (see the documentation for + # the Gestalt Manager) + try: + f = open('/System/Library/CoreServices/SystemVersion.plist') + except IOError: + # We're on a plain darwin box, fall back to the default + # behaviour. + pass + else: + try: + m = re.search(r'ProductUserVisibleVersion\s*' + r'(.*?)', f.read()) + finally: + f.close() + if m is not None: + macrelease = '.'.join(m.group(1).split('.')[:2]) + # else: fall back to the default behaviour + + if not macver: + macver = macrelease + + if macver: + release = macver + osname = "macosx" + + if ((macrelease + '.') >= '10.4.' and + '-arch' in get_config_vars().get('CFLAGS', '').strip()): + # The universal build will build fat binaries, but not on + # systems before 10.4 + # + # Try to detect 4-way universal builds, those have machine-type + # 'universal' instead of 'fat'. + + machine = 'fat' + cflags = get_config_vars().get('CFLAGS') + + archs = re.findall('-arch\s+(\S+)', cflags) + archs = tuple(sorted(set(archs))) + + if len(archs) == 1: + machine = archs[0] + elif archs == ('i386', 'ppc'): + machine = 'fat' + elif archs == ('i386', 'x86_64'): + machine = 'intel' + elif archs == ('i386', 'ppc', 'x86_64'): + machine = 'fat3' + elif archs == ('ppc64', 'x86_64'): + machine = 'fat64' + elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): + machine = 'universal' + else: + raise ValueError( + "Don't know machine value for archs=%r" % (archs,)) + + elif machine == 'i386': + # On OSX the machine type returned by uname is always the + # 32-bit variant, even if the executable architecture is + # the 64-bit variant + if sys.maxsize >= 2**32: + machine = 'x86_64' + + elif machine in ('PowerPC', 'Power_Macintosh'): + # Pick a sane name for the PPC architecture. + # See 'i386' case + if sys.maxsize >= 2**32: + machine = 'ppc64' + else: + machine = 'ppc' + + return "%s-%s-%s" % (osname, release, machine) + + +def get_python_version(): + return _PY_VERSION_SHORT + + +def _print_dict(title, data): + for index, (key, value) in enumerate(sorted(data.items())): + if index == 0: + print('%s: ' % (title)) + print('\t%s = "%s"' % (key, value)) + + +def _main(): + """Display all information sysconfig detains.""" + print('Platform: "%s"' % get_platform()) + print('Python version: "%s"' % get_python_version()) + print('Current installation scheme: "%s"' % _get_default_scheme()) + print() + _print_dict('Paths', get_paths()) + print() + _print_dict('Variables', get_config_vars()) + + +if __name__ == '__main__': + _main() diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/tarfile.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/tarfile.py new file mode 100644 index 0000000..d66d856 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/_backport/tarfile.py @@ -0,0 +1,2607 @@ +#------------------------------------------------------------------- +# tarfile.py +#------------------------------------------------------------------- +# Copyright (C) 2002 Lars Gustaebel +# All rights reserved. +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +from __future__ import print_function + +"""Read from and write to tar format archives. +""" + +__version__ = "$Revision$" + +version = "0.9.0" +__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" +__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" +__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" +__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." + +#--------- +# Imports +#--------- +import sys +import os +import stat +import errno +import time +import struct +import copy +import re + +try: + import grp, pwd +except ImportError: + grp = pwd = None + +# os.symlink on Windows prior to 6.0 raises NotImplementedError +symlink_exception = (AttributeError, NotImplementedError) +try: + # WindowsError (1314) will be raised if the caller does not hold the + # SeCreateSymbolicLinkPrivilege privilege + symlink_exception += (WindowsError,) +except NameError: + pass + +# from tarfile import * +__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] + +if sys.version_info[0] < 3: + import __builtin__ as builtins +else: + import builtins + +_open = builtins.open # Since 'open' is TarFile.open + +#--------------------------------------------------------- +# tar constants +#--------------------------------------------------------- +NUL = b"\0" # the null character +BLOCKSIZE = 512 # length of processing blocks +RECORDSIZE = BLOCKSIZE * 20 # length of records +GNU_MAGIC = b"ustar \0" # magic gnu tar string +POSIX_MAGIC = b"ustar\x0000" # magic posix tar string + +LENGTH_NAME = 100 # maximum length of a filename +LENGTH_LINK = 100 # maximum length of a linkname +LENGTH_PREFIX = 155 # maximum length of the prefix field + +REGTYPE = b"0" # regular file +AREGTYPE = b"\0" # regular file +LNKTYPE = b"1" # link (inside tarfile) +SYMTYPE = b"2" # symbolic link +CHRTYPE = b"3" # character special device +BLKTYPE = b"4" # block special device +DIRTYPE = b"5" # directory +FIFOTYPE = b"6" # fifo special device +CONTTYPE = b"7" # contiguous file + +GNUTYPE_LONGNAME = b"L" # GNU tar longname +GNUTYPE_LONGLINK = b"K" # GNU tar longlink +GNUTYPE_SPARSE = b"S" # GNU tar sparse file + +XHDTYPE = b"x" # POSIX.1-2001 extended header +XGLTYPE = b"g" # POSIX.1-2001 global header +SOLARIS_XHDTYPE = b"X" # Solaris extended header + +USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format +GNU_FORMAT = 1 # GNU tar format +PAX_FORMAT = 2 # POSIX.1-2001 (pax) format +DEFAULT_FORMAT = GNU_FORMAT + +#--------------------------------------------------------- +# tarfile constants +#--------------------------------------------------------- +# File types that tarfile supports: +SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, + SYMTYPE, DIRTYPE, FIFOTYPE, + CONTTYPE, CHRTYPE, BLKTYPE, + GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# File types that will be treated as a regular file. +REGULAR_TYPES = (REGTYPE, AREGTYPE, + CONTTYPE, GNUTYPE_SPARSE) + +# File types that are part of the GNU tar format. +GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# Fields from a pax header that override a TarInfo attribute. +PAX_FIELDS = ("path", "linkpath", "size", "mtime", + "uid", "gid", "uname", "gname") + +# Fields from a pax header that are affected by hdrcharset. +PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname")) + +# Fields in a pax header that are numbers, all other fields +# are treated as strings. +PAX_NUMBER_FIELDS = { + "atime": float, + "ctime": float, + "mtime": float, + "uid": int, + "gid": int, + "size": int +} + +#--------------------------------------------------------- +# Bits used in the mode field, values in octal. +#--------------------------------------------------------- +S_IFLNK = 0o120000 # symbolic link +S_IFREG = 0o100000 # regular file +S_IFBLK = 0o060000 # block device +S_IFDIR = 0o040000 # directory +S_IFCHR = 0o020000 # character device +S_IFIFO = 0o010000 # fifo + +TSUID = 0o4000 # set UID on execution +TSGID = 0o2000 # set GID on execution +TSVTX = 0o1000 # reserved + +TUREAD = 0o400 # read by owner +TUWRITE = 0o200 # write by owner +TUEXEC = 0o100 # execute/search by owner +TGREAD = 0o040 # read by group +TGWRITE = 0o020 # write by group +TGEXEC = 0o010 # execute/search by group +TOREAD = 0o004 # read by other +TOWRITE = 0o002 # write by other +TOEXEC = 0o001 # execute/search by other + +#--------------------------------------------------------- +# initialization +#--------------------------------------------------------- +if os.name in ("nt", "ce"): + ENCODING = "utf-8" +else: + ENCODING = sys.getfilesystemencoding() + +#--------------------------------------------------------- +# Some useful functions +#--------------------------------------------------------- + +def stn(s, length, encoding, errors): + """Convert a string to a null-terminated bytes object. + """ + s = s.encode(encoding, errors) + return s[:length] + (length - len(s)) * NUL + +def nts(s, encoding, errors): + """Convert a null-terminated bytes object to a string. + """ + p = s.find(b"\0") + if p != -1: + s = s[:p] + return s.decode(encoding, errors) + +def nti(s): + """Convert a number field to a python number. + """ + # There are two possible encodings for a number field, see + # itn() below. + if s[0] != chr(0o200): + try: + n = int(nts(s, "ascii", "strict") or "0", 8) + except ValueError: + raise InvalidHeaderError("invalid header") + else: + n = 0 + for i in range(len(s) - 1): + n <<= 8 + n += ord(s[i + 1]) + return n + +def itn(n, digits=8, format=DEFAULT_FORMAT): + """Convert a python number to a number field. + """ + # POSIX 1003.1-1988 requires numbers to be encoded as a string of + # octal digits followed by a null-byte, this allows values up to + # (8**(digits-1))-1. GNU tar allows storing numbers greater than + # that if necessary. A leading 0o200 byte indicates this particular + # encoding, the following digits-1 bytes are a big-endian + # representation. This allows values up to (256**(digits-1))-1. + if 0 <= n < 8 ** (digits - 1): + s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL + else: + if format != GNU_FORMAT or n >= 256 ** (digits - 1): + raise ValueError("overflow in number field") + + if n < 0: + # XXX We mimic GNU tar's behaviour with negative numbers, + # this could raise OverflowError. + n = struct.unpack("L", struct.pack("l", n))[0] + + s = bytearray() + for i in range(digits - 1): + s.insert(0, n & 0o377) + n >>= 8 + s.insert(0, 0o200) + return s + +def calc_chksums(buf): + """Calculate the checksum for a member's header by summing up all + characters except for the chksum field which is treated as if + it was filled with spaces. According to the GNU tar sources, + some tars (Sun and NeXT) calculate chksum with signed char, + which will be different if there are chars in the buffer with + the high bit set. So we calculate two checksums, unsigned and + signed. + """ + unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) + signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) + return unsigned_chksum, signed_chksum + +def copyfileobj(src, dst, length=None): + """Copy length bytes from fileobj src to fileobj dst. + If length is None, copy the entire content. + """ + if length == 0: + return + if length is None: + while True: + buf = src.read(16*1024) + if not buf: + break + dst.write(buf) + return + + BUFSIZE = 16 * 1024 + blocks, remainder = divmod(length, BUFSIZE) + for b in range(blocks): + buf = src.read(BUFSIZE) + if len(buf) < BUFSIZE: + raise IOError("end of file reached") + dst.write(buf) + + if remainder != 0: + buf = src.read(remainder) + if len(buf) < remainder: + raise IOError("end of file reached") + dst.write(buf) + return + +filemode_table = ( + ((S_IFLNK, "l"), + (S_IFREG, "-"), + (S_IFBLK, "b"), + (S_IFDIR, "d"), + (S_IFCHR, "c"), + (S_IFIFO, "p")), + + ((TUREAD, "r"),), + ((TUWRITE, "w"),), + ((TUEXEC|TSUID, "s"), + (TSUID, "S"), + (TUEXEC, "x")), + + ((TGREAD, "r"),), + ((TGWRITE, "w"),), + ((TGEXEC|TSGID, "s"), + (TSGID, "S"), + (TGEXEC, "x")), + + ((TOREAD, "r"),), + ((TOWRITE, "w"),), + ((TOEXEC|TSVTX, "t"), + (TSVTX, "T"), + (TOEXEC, "x")) +) + +def filemode(mode): + """Convert a file's mode to a string of the form + -rwxrwxrwx. + Used by TarFile.list() + """ + perm = [] + for table in filemode_table: + for bit, char in table: + if mode & bit == bit: + perm.append(char) + break + else: + perm.append("-") + return "".join(perm) + +class TarError(Exception): + """Base exception.""" + pass +class ExtractError(TarError): + """General exception for extract errors.""" + pass +class ReadError(TarError): + """Exception for unreadable tar archives.""" + pass +class CompressionError(TarError): + """Exception for unavailable compression methods.""" + pass +class StreamError(TarError): + """Exception for unsupported operations on stream-like TarFiles.""" + pass +class HeaderError(TarError): + """Base exception for header errors.""" + pass +class EmptyHeaderError(HeaderError): + """Exception for empty headers.""" + pass +class TruncatedHeaderError(HeaderError): + """Exception for truncated headers.""" + pass +class EOFHeaderError(HeaderError): + """Exception for end of file headers.""" + pass +class InvalidHeaderError(HeaderError): + """Exception for invalid headers.""" + pass +class SubsequentHeaderError(HeaderError): + """Exception for missing and invalid extended headers.""" + pass + +#--------------------------- +# internal stream interface +#--------------------------- +class _LowLevelFile(object): + """Low-level file object. Supports reading and writing. + It is used instead of a regular file object for streaming + access. + """ + + def __init__(self, name, mode): + mode = { + "r": os.O_RDONLY, + "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + }[mode] + if hasattr(os, "O_BINARY"): + mode |= os.O_BINARY + self.fd = os.open(name, mode, 0o666) + + def close(self): + os.close(self.fd) + + def read(self, size): + return os.read(self.fd, size) + + def write(self, s): + os.write(self.fd, s) + +class _Stream(object): + """Class that serves as an adapter between TarFile and + a stream-like object. The stream-like object only + needs to have a read() or write() method and is accessed + blockwise. Use of gzip or bzip2 compression is possible. + A stream-like object could be for example: sys.stdin, + sys.stdout, a socket, a tape device etc. + + _Stream is intended to be used only internally. + """ + + def __init__(self, name, mode, comptype, fileobj, bufsize): + """Construct a _Stream object. + """ + self._extfileobj = True + if fileobj is None: + fileobj = _LowLevelFile(name, mode) + self._extfileobj = False + + if comptype == '*': + # Enable transparent compression detection for the + # stream interface + fileobj = _StreamProxy(fileobj) + comptype = fileobj.getcomptype() + + self.name = name or "" + self.mode = mode + self.comptype = comptype + self.fileobj = fileobj + self.bufsize = bufsize + self.buf = b"" + self.pos = 0 + self.closed = False + + try: + if comptype == "gz": + try: + import zlib + except ImportError: + raise CompressionError("zlib module is not available") + self.zlib = zlib + self.crc = zlib.crc32(b"") + if mode == "r": + self._init_read_gz() + else: + self._init_write_gz() + + if comptype == "bz2": + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + if mode == "r": + self.dbuf = b"" + self.cmp = bz2.BZ2Decompressor() + else: + self.cmp = bz2.BZ2Compressor() + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + def __del__(self): + if hasattr(self, "closed") and not self.closed: + self.close() + + def _init_write_gz(self): + """Initialize for writing with gzip compression. + """ + self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, + -self.zlib.MAX_WBITS, + self.zlib.DEF_MEM_LEVEL, + 0) + timestamp = struct.pack(" self.bufsize: + self.fileobj.write(self.buf[:self.bufsize]) + self.buf = self.buf[self.bufsize:] + + def close(self): + """Close the _Stream object. No operation should be + done on it afterwards. + """ + if self.closed: + return + + if self.mode == "w" and self.comptype != "tar": + self.buf += self.cmp.flush() + + if self.mode == "w" and self.buf: + self.fileobj.write(self.buf) + self.buf = b"" + if self.comptype == "gz": + # The native zlib crc is an unsigned 32-bit integer, but + # the Python wrapper implicitly casts that to a signed C + # long. So, on a 32-bit box self.crc may "look negative", + # while the same crc on a 64-bit box may "look positive". + # To avoid irksome warnings from the `struct` module, force + # it to look positive on all boxes. + self.fileobj.write(struct.pack("= 0: + blocks, remainder = divmod(pos - self.pos, self.bufsize) + for i in range(blocks): + self.read(self.bufsize) + self.read(remainder) + else: + raise StreamError("seeking backwards is not allowed") + return self.pos + + def read(self, size=None): + """Return the next size number of bytes from the stream. + If size is not defined, return all bytes of the stream + up to EOF. + """ + if size is None: + t = [] + while True: + buf = self._read(self.bufsize) + if not buf: + break + t.append(buf) + buf = "".join(t) + else: + buf = self._read(size) + self.pos += len(buf) + return buf + + def _read(self, size): + """Return size bytes from the stream. + """ + if self.comptype == "tar": + return self.__read(size) + + c = len(self.dbuf) + while c < size: + buf = self.__read(self.bufsize) + if not buf: + break + try: + buf = self.cmp.decompress(buf) + except IOError: + raise ReadError("invalid compressed data") + self.dbuf += buf + c += len(buf) + buf = self.dbuf[:size] + self.dbuf = self.dbuf[size:] + return buf + + def __read(self, size): + """Return size bytes from stream. If internal buffer is empty, + read another block from the stream. + """ + c = len(self.buf) + while c < size: + buf = self.fileobj.read(self.bufsize) + if not buf: + break + self.buf += buf + c += len(buf) + buf = self.buf[:size] + self.buf = self.buf[size:] + return buf +# class _Stream + +class _StreamProxy(object): + """Small proxy class that enables transparent compression + detection for the Stream interface (mode 'r|*'). + """ + + def __init__(self, fileobj): + self.fileobj = fileobj + self.buf = self.fileobj.read(BLOCKSIZE) + + def read(self, size): + self.read = self.fileobj.read + return self.buf + + def getcomptype(self): + if self.buf.startswith(b"\037\213\010"): + return "gz" + if self.buf.startswith(b"BZh91"): + return "bz2" + return "tar" + + def close(self): + self.fileobj.close() +# class StreamProxy + +class _BZ2Proxy(object): + """Small proxy class that enables external file object + support for "r:bz2" and "w:bz2" modes. This is actually + a workaround for a limitation in bz2 module's BZ2File + class which (unlike gzip.GzipFile) has no support for + a file object argument. + """ + + blocksize = 16 * 1024 + + def __init__(self, fileobj, mode): + self.fileobj = fileobj + self.mode = mode + self.name = getattr(self.fileobj, "name", None) + self.init() + + def init(self): + import bz2 + self.pos = 0 + if self.mode == "r": + self.bz2obj = bz2.BZ2Decompressor() + self.fileobj.seek(0) + self.buf = b"" + else: + self.bz2obj = bz2.BZ2Compressor() + + def read(self, size): + x = len(self.buf) + while x < size: + raw = self.fileobj.read(self.blocksize) + if not raw: + break + data = self.bz2obj.decompress(raw) + self.buf += data + x += len(data) + + buf = self.buf[:size] + self.buf = self.buf[size:] + self.pos += len(buf) + return buf + + def seek(self, pos): + if pos < self.pos: + self.init() + self.read(pos - self.pos) + + def tell(self): + return self.pos + + def write(self, data): + self.pos += len(data) + raw = self.bz2obj.compress(data) + self.fileobj.write(raw) + + def close(self): + if self.mode == "w": + raw = self.bz2obj.flush() + self.fileobj.write(raw) +# class _BZ2Proxy + +#------------------------ +# Extraction file object +#------------------------ +class _FileInFile(object): + """A thin wrapper around an existing file object that + provides a part of its data as an individual file + object. + """ + + def __init__(self, fileobj, offset, size, blockinfo=None): + self.fileobj = fileobj + self.offset = offset + self.size = size + self.position = 0 + + if blockinfo is None: + blockinfo = [(0, size)] + + # Construct a map with data and zero blocks. + self.map_index = 0 + self.map = [] + lastpos = 0 + realpos = self.offset + for offset, size in blockinfo: + if offset > lastpos: + self.map.append((False, lastpos, offset, None)) + self.map.append((True, offset, offset + size, realpos)) + realpos += size + lastpos = offset + size + if lastpos < self.size: + self.map.append((False, lastpos, self.size, None)) + + def seekable(self): + if not hasattr(self.fileobj, "seekable"): + # XXX gzip.GzipFile and bz2.BZ2File + return True + return self.fileobj.seekable() + + def tell(self): + """Return the current file position. + """ + return self.position + + def seek(self, position): + """Seek to a position in the file. + """ + self.position = position + + def read(self, size=None): + """Read data from the file. + """ + if size is None: + size = self.size - self.position + else: + size = min(size, self.size - self.position) + + buf = b"" + while size > 0: + while True: + data, start, stop, offset = self.map[self.map_index] + if start <= self.position < stop: + break + else: + self.map_index += 1 + if self.map_index == len(self.map): + self.map_index = 0 + length = min(size, stop - self.position) + if data: + self.fileobj.seek(offset + (self.position - start)) + buf += self.fileobj.read(length) + else: + buf += NUL * length + size -= length + self.position += length + return buf +#class _FileInFile + + +class ExFileObject(object): + """File-like object for reading an archive member. + Is returned by TarFile.extractfile(). + """ + blocksize = 1024 + + def __init__(self, tarfile, tarinfo): + self.fileobj = _FileInFile(tarfile.fileobj, + tarinfo.offset_data, + tarinfo.size, + tarinfo.sparse) + self.name = tarinfo.name + self.mode = "r" + self.closed = False + self.size = tarinfo.size + + self.position = 0 + self.buffer = b"" + + def readable(self): + return True + + def writable(self): + return False + + def seekable(self): + return self.fileobj.seekable() + + def read(self, size=None): + """Read at most size bytes from the file. If size is not + present or None, read all data until EOF is reached. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + buf = b"" + if self.buffer: + if size is None: + buf = self.buffer + self.buffer = b"" + else: + buf = self.buffer[:size] + self.buffer = self.buffer[size:] + + if size is None: + buf += self.fileobj.read() + else: + buf += self.fileobj.read(size - len(buf)) + + self.position += len(buf) + return buf + + # XXX TextIOWrapper uses the read1() method. + read1 = read + + def readline(self, size=-1): + """Read one entire line from the file. If size is present + and non-negative, return a string with at most that + size, which may be an incomplete line. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + while True: + buf = self.fileobj.read(self.blocksize) + self.buffer += buf + if not buf or b"\n" in buf: + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + pos = len(self.buffer) + break + + if size != -1: + pos = min(size, pos) + + buf = self.buffer[:pos] + self.buffer = self.buffer[pos:] + self.position += len(buf) + return buf + + def readlines(self): + """Return a list with all remaining lines. + """ + result = [] + while True: + line = self.readline() + if not line: break + result.append(line) + return result + + def tell(self): + """Return the current file position. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + return self.position + + def seek(self, pos, whence=os.SEEK_SET): + """Seek to a position in the file. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + if whence == os.SEEK_SET: + self.position = min(max(pos, 0), self.size) + elif whence == os.SEEK_CUR: + if pos < 0: + self.position = max(self.position + pos, 0) + else: + self.position = min(self.position + pos, self.size) + elif whence == os.SEEK_END: + self.position = max(min(self.size + pos, self.size), 0) + else: + raise ValueError("Invalid argument") + + self.buffer = b"" + self.fileobj.seek(self.position) + + def close(self): + """Close the file object. + """ + self.closed = True + + def __iter__(self): + """Get an iterator over the file's lines. + """ + while True: + line = self.readline() + if not line: + break + yield line +#class ExFileObject + +#------------------ +# Exported Classes +#------------------ +class TarInfo(object): + """Informational class which holds the details about an + archive member given by a tar header block. + TarInfo objects are returned by TarFile.getmember(), + TarFile.getmembers() and TarFile.gettarinfo() and are + usually created internally. + """ + + __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", + "chksum", "type", "linkname", "uname", "gname", + "devmajor", "devminor", + "offset", "offset_data", "pax_headers", "sparse", + "tarfile", "_sparse_structs", "_link_target") + + def __init__(self, name=""): + """Construct a TarInfo object. name is the optional name + of the member. + """ + self.name = name # member name + self.mode = 0o644 # file permissions + self.uid = 0 # user id + self.gid = 0 # group id + self.size = 0 # file size + self.mtime = 0 # modification time + self.chksum = 0 # header checksum + self.type = REGTYPE # member type + self.linkname = "" # link name + self.uname = "" # user name + self.gname = "" # group name + self.devmajor = 0 # device major number + self.devminor = 0 # device minor number + + self.offset = 0 # the tar header starts here + self.offset_data = 0 # the file's data starts here + + self.sparse = None # sparse member information + self.pax_headers = {} # pax header information + + # In pax headers the "name" and "linkname" field are called + # "path" and "linkpath". + def _getpath(self): + return self.name + def _setpath(self, name): + self.name = name + path = property(_getpath, _setpath) + + def _getlinkpath(self): + return self.linkname + def _setlinkpath(self, linkname): + self.linkname = linkname + linkpath = property(_getlinkpath, _setlinkpath) + + def __repr__(self): + return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) + + def get_info(self): + """Return the TarInfo's attributes as a dictionary. + """ + info = { + "name": self.name, + "mode": self.mode & 0o7777, + "uid": self.uid, + "gid": self.gid, + "size": self.size, + "mtime": self.mtime, + "chksum": self.chksum, + "type": self.type, + "linkname": self.linkname, + "uname": self.uname, + "gname": self.gname, + "devmajor": self.devmajor, + "devminor": self.devminor + } + + if info["type"] == DIRTYPE and not info["name"].endswith("/"): + info["name"] += "/" + + return info + + def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): + """Return a tar header as a string of 512 byte blocks. + """ + info = self.get_info() + + if format == USTAR_FORMAT: + return self.create_ustar_header(info, encoding, errors) + elif format == GNU_FORMAT: + return self.create_gnu_header(info, encoding, errors) + elif format == PAX_FORMAT: + return self.create_pax_header(info, encoding) + else: + raise ValueError("invalid format") + + def create_ustar_header(self, info, encoding, errors): + """Return the object as a ustar header block. + """ + info["magic"] = POSIX_MAGIC + + if len(info["linkname"]) > LENGTH_LINK: + raise ValueError("linkname is too long") + + if len(info["name"]) > LENGTH_NAME: + info["prefix"], info["name"] = self._posix_split_name(info["name"]) + + return self._create_header(info, USTAR_FORMAT, encoding, errors) + + def create_gnu_header(self, info, encoding, errors): + """Return the object as a GNU header block sequence. + """ + info["magic"] = GNU_MAGIC + + buf = b"" + if len(info["linkname"]) > LENGTH_LINK: + buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) + + if len(info["name"]) > LENGTH_NAME: + buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) + + return buf + self._create_header(info, GNU_FORMAT, encoding, errors) + + def create_pax_header(self, info, encoding): + """Return the object as a ustar header block. If it cannot be + represented this way, prepend a pax extended header sequence + with supplement information. + """ + info["magic"] = POSIX_MAGIC + pax_headers = self.pax_headers.copy() + + # Test string fields for values that exceed the field length or cannot + # be represented in ASCII encoding. + for name, hname, length in ( + ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), + ("uname", "uname", 32), ("gname", "gname", 32)): + + if hname in pax_headers: + # The pax header has priority. + continue + + # Try to encode the string as ASCII. + try: + info[name].encode("ascii", "strict") + except UnicodeEncodeError: + pax_headers[hname] = info[name] + continue + + if len(info[name]) > length: + pax_headers[hname] = info[name] + + # Test number fields for values that exceed the field limit or values + # that like to be stored as float. + for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): + if name in pax_headers: + # The pax header has priority. Avoid overflow. + info[name] = 0 + continue + + val = info[name] + if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): + pax_headers[name] = str(val) + info[name] = 0 + + # Create a pax extended header if necessary. + if pax_headers: + buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) + else: + buf = b"" + + return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") + + @classmethod + def create_pax_global_header(cls, pax_headers): + """Return the object as a pax global header block sequence. + """ + return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8") + + def _posix_split_name(self, name): + """Split a name longer than 100 chars into a prefix + and a name part. + """ + prefix = name[:LENGTH_PREFIX + 1] + while prefix and prefix[-1] != "/": + prefix = prefix[:-1] + + name = name[len(prefix):] + prefix = prefix[:-1] + + if not prefix or len(name) > LENGTH_NAME: + raise ValueError("name is too long") + return prefix, name + + @staticmethod + def _create_header(info, format, encoding, errors): + """Return a header block. info is a dictionary with file + information, format must be one of the *_FORMAT constants. + """ + parts = [ + stn(info.get("name", ""), 100, encoding, errors), + itn(info.get("mode", 0) & 0o7777, 8, format), + itn(info.get("uid", 0), 8, format), + itn(info.get("gid", 0), 8, format), + itn(info.get("size", 0), 12, format), + itn(info.get("mtime", 0), 12, format), + b" ", # checksum field + info.get("type", REGTYPE), + stn(info.get("linkname", ""), 100, encoding, errors), + info.get("magic", POSIX_MAGIC), + stn(info.get("uname", ""), 32, encoding, errors), + stn(info.get("gname", ""), 32, encoding, errors), + itn(info.get("devmajor", 0), 8, format), + itn(info.get("devminor", 0), 8, format), + stn(info.get("prefix", ""), 155, encoding, errors) + ] + + buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) + chksum = calc_chksums(buf[-BLOCKSIZE:])[0] + buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] + return buf + + @staticmethod + def _create_payload(payload): + """Return the string payload filled with zero bytes + up to the next 512 byte border. + """ + blocks, remainder = divmod(len(payload), BLOCKSIZE) + if remainder > 0: + payload += (BLOCKSIZE - remainder) * NUL + return payload + + @classmethod + def _create_gnu_long_header(cls, name, type, encoding, errors): + """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence + for name. + """ + name = name.encode(encoding, errors) + NUL + + info = {} + info["name"] = "././@LongLink" + info["type"] = type + info["size"] = len(name) + info["magic"] = GNU_MAGIC + + # create extended header + name blocks. + return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ + cls._create_payload(name) + + @classmethod + def _create_pax_generic_header(cls, pax_headers, type, encoding): + """Return a POSIX.1-2008 extended or global header sequence + that contains a list of keyword, value pairs. The values + must be strings. + """ + # Check if one of the fields contains surrogate characters and thereby + # forces hdrcharset=BINARY, see _proc_pax() for more information. + binary = False + for keyword, value in pax_headers.items(): + try: + value.encode("utf8", "strict") + except UnicodeEncodeError: + binary = True + break + + records = b"" + if binary: + # Put the hdrcharset field at the beginning of the header. + records += b"21 hdrcharset=BINARY\n" + + for keyword, value in pax_headers.items(): + keyword = keyword.encode("utf8") + if binary: + # Try to restore the original byte representation of `value'. + # Needless to say, that the encoding must match the string. + value = value.encode(encoding, "surrogateescape") + else: + value = value.encode("utf8") + + l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' + n = p = 0 + while True: + n = l + len(str(p)) + if n == p: + break + p = n + records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" + + # We use a hardcoded "././@PaxHeader" name like star does + # instead of the one that POSIX recommends. + info = {} + info["name"] = "././@PaxHeader" + info["type"] = type + info["size"] = len(records) + info["magic"] = POSIX_MAGIC + + # Create pax header + record blocks. + return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ + cls._create_payload(records) + + @classmethod + def frombuf(cls, buf, encoding, errors): + """Construct a TarInfo object from a 512 byte bytes object. + """ + if len(buf) == 0: + raise EmptyHeaderError("empty header") + if len(buf) != BLOCKSIZE: + raise TruncatedHeaderError("truncated header") + if buf.count(NUL) == BLOCKSIZE: + raise EOFHeaderError("end of file header") + + chksum = nti(buf[148:156]) + if chksum not in calc_chksums(buf): + raise InvalidHeaderError("bad checksum") + + obj = cls() + obj.name = nts(buf[0:100], encoding, errors) + obj.mode = nti(buf[100:108]) + obj.uid = nti(buf[108:116]) + obj.gid = nti(buf[116:124]) + obj.size = nti(buf[124:136]) + obj.mtime = nti(buf[136:148]) + obj.chksum = chksum + obj.type = buf[156:157] + obj.linkname = nts(buf[157:257], encoding, errors) + obj.uname = nts(buf[265:297], encoding, errors) + obj.gname = nts(buf[297:329], encoding, errors) + obj.devmajor = nti(buf[329:337]) + obj.devminor = nti(buf[337:345]) + prefix = nts(buf[345:500], encoding, errors) + + # Old V7 tar format represents a directory as a regular + # file with a trailing slash. + if obj.type == AREGTYPE and obj.name.endswith("/"): + obj.type = DIRTYPE + + # The old GNU sparse format occupies some of the unused + # space in the buffer for up to 4 sparse structures. + # Save the them for later processing in _proc_sparse(). + if obj.type == GNUTYPE_SPARSE: + pos = 386 + structs = [] + for i in range(4): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[482]) + origsize = nti(buf[483:495]) + obj._sparse_structs = (structs, isextended, origsize) + + # Remove redundant slashes from directories. + if obj.isdir(): + obj.name = obj.name.rstrip("/") + + # Reconstruct a ustar longname. + if prefix and obj.type not in GNU_TYPES: + obj.name = prefix + "/" + obj.name + return obj + + @classmethod + def fromtarfile(cls, tarfile): + """Return the next TarInfo object from TarFile object + tarfile. + """ + buf = tarfile.fileobj.read(BLOCKSIZE) + obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) + obj.offset = tarfile.fileobj.tell() - BLOCKSIZE + return obj._proc_member(tarfile) + + #-------------------------------------------------------------------------- + # The following are methods that are called depending on the type of a + # member. The entry point is _proc_member() which can be overridden in a + # subclass to add custom _proc_*() methods. A _proc_*() method MUST + # implement the following + # operations: + # 1. Set self.offset_data to the position where the data blocks begin, + # if there is data that follows. + # 2. Set tarfile.offset to the position where the next member's header will + # begin. + # 3. Return self or another valid TarInfo object. + def _proc_member(self, tarfile): + """Choose the right processing method depending on + the type and call it. + """ + if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): + return self._proc_gnulong(tarfile) + elif self.type == GNUTYPE_SPARSE: + return self._proc_sparse(tarfile) + elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): + return self._proc_pax(tarfile) + else: + return self._proc_builtin(tarfile) + + def _proc_builtin(self, tarfile): + """Process a builtin type or an unknown type which + will be treated as a regular file. + """ + self.offset_data = tarfile.fileobj.tell() + offset = self.offset_data + if self.isreg() or self.type not in SUPPORTED_TYPES: + # Skip the following data blocks. + offset += self._block(self.size) + tarfile.offset = offset + + # Patch the TarInfo object with saved global + # header information. + self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) + + return self + + def _proc_gnulong(self, tarfile): + """Process the blocks that hold a GNU longname + or longlink member. + """ + buf = tarfile.fileobj.read(self._block(self.size)) + + # Fetch the next header and process it. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Patch the TarInfo object from the next header with + # the longname information. + next.offset = self.offset + if self.type == GNUTYPE_LONGNAME: + next.name = nts(buf, tarfile.encoding, tarfile.errors) + elif self.type == GNUTYPE_LONGLINK: + next.linkname = nts(buf, tarfile.encoding, tarfile.errors) + + return next + + def _proc_sparse(self, tarfile): + """Process a GNU sparse header plus extra headers. + """ + # We already collected some sparse structures in frombuf(). + structs, isextended, origsize = self._sparse_structs + del self._sparse_structs + + # Collect sparse structures from extended header blocks. + while isextended: + buf = tarfile.fileobj.read(BLOCKSIZE) + pos = 0 + for i in range(21): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + if offset and numbytes: + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[504]) + self.sparse = structs + + self.offset_data = tarfile.fileobj.tell() + tarfile.offset = self.offset_data + self._block(self.size) + self.size = origsize + return self + + def _proc_pax(self, tarfile): + """Process an extended or global header as described in + POSIX.1-2008. + """ + # Read the header information. + buf = tarfile.fileobj.read(self._block(self.size)) + + # A pax header stores supplemental information for either + # the following file (extended) or all following files + # (global). + if self.type == XGLTYPE: + pax_headers = tarfile.pax_headers + else: + pax_headers = tarfile.pax_headers.copy() + + # Check if the pax header contains a hdrcharset field. This tells us + # the encoding of the path, linkpath, uname and gname fields. Normally, + # these fields are UTF-8 encoded but since POSIX.1-2008 tar + # implementations are allowed to store them as raw binary strings if + # the translation to UTF-8 fails. + match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) + if match is not None: + pax_headers["hdrcharset"] = match.group(1).decode("utf8") + + # For the time being, we don't care about anything other than "BINARY". + # The only other value that is currently allowed by the standard is + # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. + hdrcharset = pax_headers.get("hdrcharset") + if hdrcharset == "BINARY": + encoding = tarfile.encoding + else: + encoding = "utf8" + + # Parse pax header information. A record looks like that: + # "%d %s=%s\n" % (length, keyword, value). length is the size + # of the complete record including the length field itself and + # the newline. keyword and value are both UTF-8 encoded strings. + regex = re.compile(br"(\d+) ([^=]+)=") + pos = 0 + while True: + match = regex.match(buf, pos) + if not match: + break + + length, keyword = match.groups() + length = int(length) + value = buf[match.end(2) + 1:match.start(1) + length - 1] + + # Normally, we could just use "utf8" as the encoding and "strict" + # as the error handler, but we better not take the risk. For + # example, GNU tar <= 1.23 is known to store filenames it cannot + # translate to UTF-8 as raw strings (unfortunately without a + # hdrcharset=BINARY header). + # We first try the strict standard encoding, and if that fails we + # fall back on the user's encoding and error handler. + keyword = self._decode_pax_field(keyword, "utf8", "utf8", + tarfile.errors) + if keyword in PAX_NAME_FIELDS: + value = self._decode_pax_field(value, encoding, tarfile.encoding, + tarfile.errors) + else: + value = self._decode_pax_field(value, "utf8", "utf8", + tarfile.errors) + + pax_headers[keyword] = value + pos += length + + # Fetch the next header. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Process GNU sparse information. + if "GNU.sparse.map" in pax_headers: + # GNU extended sparse format version 0.1. + self._proc_gnusparse_01(next, pax_headers) + + elif "GNU.sparse.size" in pax_headers: + # GNU extended sparse format version 0.0. + self._proc_gnusparse_00(next, pax_headers, buf) + + elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": + # GNU extended sparse format version 1.0. + self._proc_gnusparse_10(next, pax_headers, tarfile) + + if self.type in (XHDTYPE, SOLARIS_XHDTYPE): + # Patch the TarInfo object with the extended header info. + next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) + next.offset = self.offset + + if "size" in pax_headers: + # If the extended header replaces the size field, + # we need to recalculate the offset where the next + # header starts. + offset = next.offset_data + if next.isreg() or next.type not in SUPPORTED_TYPES: + offset += next._block(next.size) + tarfile.offset = offset + + return next + + def _proc_gnusparse_00(self, next, pax_headers, buf): + """Process a GNU tar extended sparse header, version 0.0. + """ + offsets = [] + for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): + offsets.append(int(match.group(1))) + numbytes = [] + for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): + numbytes.append(int(match.group(1))) + next.sparse = list(zip(offsets, numbytes)) + + def _proc_gnusparse_01(self, next, pax_headers): + """Process a GNU tar extended sparse header, version 0.1. + """ + sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _proc_gnusparse_10(self, next, pax_headers, tarfile): + """Process a GNU tar extended sparse header, version 1.0. + """ + fields = None + sparse = [] + buf = tarfile.fileobj.read(BLOCKSIZE) + fields, buf = buf.split(b"\n", 1) + fields = int(fields) + while len(sparse) < fields * 2: + if b"\n" not in buf: + buf += tarfile.fileobj.read(BLOCKSIZE) + number, buf = buf.split(b"\n", 1) + sparse.append(int(number)) + next.offset_data = tarfile.fileobj.tell() + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _apply_pax_info(self, pax_headers, encoding, errors): + """Replace fields with supplemental information from a previous + pax extended or global header. + """ + for keyword, value in pax_headers.items(): + if keyword == "GNU.sparse.name": + setattr(self, "path", value) + elif keyword == "GNU.sparse.size": + setattr(self, "size", int(value)) + elif keyword == "GNU.sparse.realsize": + setattr(self, "size", int(value)) + elif keyword in PAX_FIELDS: + if keyword in PAX_NUMBER_FIELDS: + try: + value = PAX_NUMBER_FIELDS[keyword](value) + except ValueError: + value = 0 + if keyword == "path": + value = value.rstrip("/") + setattr(self, keyword, value) + + self.pax_headers = pax_headers.copy() + + def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): + """Decode a single field from a pax record. + """ + try: + return value.decode(encoding, "strict") + except UnicodeDecodeError: + return value.decode(fallback_encoding, fallback_errors) + + def _block(self, count): + """Round up a byte count by BLOCKSIZE and return it, + e.g. _block(834) => 1024. + """ + blocks, remainder = divmod(count, BLOCKSIZE) + if remainder: + blocks += 1 + return blocks * BLOCKSIZE + + def isreg(self): + return self.type in REGULAR_TYPES + def isfile(self): + return self.isreg() + def isdir(self): + return self.type == DIRTYPE + def issym(self): + return self.type == SYMTYPE + def islnk(self): + return self.type == LNKTYPE + def ischr(self): + return self.type == CHRTYPE + def isblk(self): + return self.type == BLKTYPE + def isfifo(self): + return self.type == FIFOTYPE + def issparse(self): + return self.sparse is not None + def isdev(self): + return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) +# class TarInfo + +class TarFile(object): + """The TarFile Class provides an interface to tar archives. + """ + + debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) + + dereference = False # If true, add content of linked file to the + # tar file, else the link. + + ignore_zeros = False # If true, skips empty or invalid blocks and + # continues processing. + + errorlevel = 1 # If 0, fatal errors only appear in debug + # messages (if debug >= 0). If > 0, errors + # are passed to the caller as exceptions. + + format = DEFAULT_FORMAT # The format to use when creating an archive. + + encoding = ENCODING # Encoding for 8-bit character strings. + + errors = None # Error handler for unicode conversion. + + tarinfo = TarInfo # The default TarInfo class to use. + + fileobject = ExFileObject # The default ExFileObject class to use. + + def __init__(self, name=None, mode="r", fileobj=None, format=None, + tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, + errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None): + """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. `mode' + defaults to 'r'. + If `fileobj' is given, it is used for reading or writing data. If it + can be determined, `mode' is overridden by `fileobj's mode. + `fileobj' is not closed, when TarFile is closed. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + self.mode = mode + self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] + + if not fileobj: + if self.mode == "a" and not os.path.exists(name): + # Create nonexistent files in append mode. + self.mode = "w" + self._mode = "wb" + fileobj = bltn_open(name, self._mode) + self._extfileobj = False + else: + if name is None and hasattr(fileobj, "name"): + name = fileobj.name + if hasattr(fileobj, "mode"): + self._mode = fileobj.mode + self._extfileobj = True + self.name = os.path.abspath(name) if name else None + self.fileobj = fileobj + + # Init attributes. + if format is not None: + self.format = format + if tarinfo is not None: + self.tarinfo = tarinfo + if dereference is not None: + self.dereference = dereference + if ignore_zeros is not None: + self.ignore_zeros = ignore_zeros + if encoding is not None: + self.encoding = encoding + self.errors = errors + + if pax_headers is not None and self.format == PAX_FORMAT: + self.pax_headers = pax_headers + else: + self.pax_headers = {} + + if debug is not None: + self.debug = debug + if errorlevel is not None: + self.errorlevel = errorlevel + + # Init datastructures. + self.closed = False + self.members = [] # list of members as TarInfo objects + self._loaded = False # flag if all members have been read + self.offset = self.fileobj.tell() + # current position in the archive file + self.inodes = {} # dictionary caching the inodes of + # archive members already added + + try: + if self.mode == "r": + self.firstmember = None + self.firstmember = self.next() + + if self.mode == "a": + # Move to the end of the archive, + # before the first empty block. + while True: + self.fileobj.seek(self.offset) + try: + tarinfo = self.tarinfo.fromtarfile(self) + self.members.append(tarinfo) + except EOFHeaderError: + self.fileobj.seek(self.offset) + break + except HeaderError as e: + raise ReadError(str(e)) + + if self.mode in "aw": + self._loaded = True + + if self.pax_headers: + buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) + self.fileobj.write(buf) + self.offset += len(buf) + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + #-------------------------------------------------------------------------- + # Below are the classmethods which act as alternate constructors to the + # TarFile class. The open() method is the only one that is needed for + # public use; it is the "super"-constructor and is able to select an + # adequate "sub"-constructor for a particular compression using the mapping + # from OPEN_METH. + # + # This concept allows one to subclass TarFile without losing the comfort of + # the super-constructor. A sub-constructor is registered and made available + # by adding it to the mapping in OPEN_METH. + + @classmethod + def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): + """Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + """ + + if not name and not fileobj: + raise ValueError("nothing to open") + + if mode in ("r", "r:*"): + # Find out which *open() is appropriate for opening the file. + for comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + if fileobj is not None: + saved_pos = fileobj.tell() + try: + return func(name, "r", fileobj, **kwargs) + except (ReadError, CompressionError) as e: + if fileobj is not None: + fileobj.seek(saved_pos) + continue + raise ReadError("file could not be opened successfully") + + elif ":" in mode: + filemode, comptype = mode.split(":", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + # Select the *open() function according to + # given compression. + if comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + else: + raise CompressionError("unknown compression type %r" % comptype) + return func(name, filemode, fileobj, **kwargs) + + elif "|" in mode: + filemode, comptype = mode.split("|", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + if filemode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + stream = _Stream(name, filemode, comptype, fileobj, bufsize) + try: + t = cls(name, filemode, stream, **kwargs) + except: + stream.close() + raise + t._extfileobj = False + return t + + elif mode in "aw": + return cls.taropen(name, mode, fileobj, **kwargs) + + raise ValueError("undiscernible mode") + + @classmethod + def taropen(cls, name, mode="r", fileobj=None, **kwargs): + """Open uncompressed tar archive name for reading or writing. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + return cls(name, mode, fileobj, **kwargs) + + @classmethod + def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open gzip compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + try: + import gzip + gzip.GzipFile + except (ImportError, AttributeError): + raise CompressionError("gzip module is not available") + + extfileobj = fileobj is not None + try: + fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) + t = cls.taropen(name, mode, fileobj, **kwargs) + except IOError: + if not extfileobj and fileobj is not None: + fileobj.close() + if fileobj is None: + raise + raise ReadError("not a gzip file") + except: + if not extfileobj and fileobj is not None: + fileobj.close() + raise + t._extfileobj = extfileobj + return t + + @classmethod + def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open bzip2 compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'.") + + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + + if fileobj is not None: + fileobj = _BZ2Proxy(fileobj, mode) + else: + fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) + + try: + t = cls.taropen(name, mode, fileobj, **kwargs) + except (IOError, EOFError): + fileobj.close() + raise ReadError("not a bzip2 file") + t._extfileobj = False + return t + + # All *open() methods are registered here. + OPEN_METH = { + "tar": "taropen", # uncompressed tar + "gz": "gzopen", # gzip compressed tar + "bz2": "bz2open" # bzip2 compressed tar + } + + #-------------------------------------------------------------------------- + # The public methods which TarFile provides: + + def close(self): + """Close the TarFile. In write-mode, two finishing zero blocks are + appended to the archive. + """ + if self.closed: + return + + if self.mode in "aw": + self.fileobj.write(NUL * (BLOCKSIZE * 2)) + self.offset += (BLOCKSIZE * 2) + # fill up the end with zero-blocks + # (like option -b20 for tar does) + blocks, remainder = divmod(self.offset, RECORDSIZE) + if remainder > 0: + self.fileobj.write(NUL * (RECORDSIZE - remainder)) + + if not self._extfileobj: + self.fileobj.close() + self.closed = True + + def getmember(self, name): + """Return a TarInfo object for member `name'. If `name' can not be + found in the archive, KeyError is raised. If a member occurs more + than once in the archive, its last occurrence is assumed to be the + most up-to-date version. + """ + tarinfo = self._getmember(name) + if tarinfo is None: + raise KeyError("filename %r not found" % name) + return tarinfo + + def getmembers(self): + """Return the members of the archive as a list of TarInfo objects. The + list has the same order as the members in the archive. + """ + self._check() + if not self._loaded: # if we want to obtain a list of + self._load() # all members, we first have to + # scan the whole archive. + return self.members + + def getnames(self): + """Return the members of the archive as a list of their names. It has + the same order as the list returned by getmembers(). + """ + return [tarinfo.name for tarinfo in self.getmembers()] + + def gettarinfo(self, name=None, arcname=None, fileobj=None): + """Create a TarInfo object for either the file `name' or the file + object `fileobj' (using os.fstat on its file descriptor). You can + modify some of the TarInfo's attributes before you add it using + addfile(). If given, `arcname' specifies an alternative name for the + file in the archive. + """ + self._check("aw") + + # When fileobj is given, replace name by + # fileobj's real name. + if fileobj is not None: + name = fileobj.name + + # Building the name of the member in the archive. + # Backward slashes are converted to forward slashes, + # Absolute paths are turned to relative paths. + if arcname is None: + arcname = name + drv, arcname = os.path.splitdrive(arcname) + arcname = arcname.replace(os.sep, "/") + arcname = arcname.lstrip("/") + + # Now, fill the TarInfo object with + # information specific for the file. + tarinfo = self.tarinfo() + tarinfo.tarfile = self + + # Use os.stat or os.lstat, depending on platform + # and if symlinks shall be resolved. + if fileobj is None: + if hasattr(os, "lstat") and not self.dereference: + statres = os.lstat(name) + else: + statres = os.stat(name) + else: + statres = os.fstat(fileobj.fileno()) + linkname = "" + + stmd = statres.st_mode + if stat.S_ISREG(stmd): + inode = (statres.st_ino, statres.st_dev) + if not self.dereference and statres.st_nlink > 1 and \ + inode in self.inodes and arcname != self.inodes[inode]: + # Is it a hardlink to an already + # archived file? + type = LNKTYPE + linkname = self.inodes[inode] + else: + # The inode is added only if its valid. + # For win32 it is always 0. + type = REGTYPE + if inode[0]: + self.inodes[inode] = arcname + elif stat.S_ISDIR(stmd): + type = DIRTYPE + elif stat.S_ISFIFO(stmd): + type = FIFOTYPE + elif stat.S_ISLNK(stmd): + type = SYMTYPE + linkname = os.readlink(name) + elif stat.S_ISCHR(stmd): + type = CHRTYPE + elif stat.S_ISBLK(stmd): + type = BLKTYPE + else: + return None + + # Fill the TarInfo object with all + # information we can get. + tarinfo.name = arcname + tarinfo.mode = stmd + tarinfo.uid = statres.st_uid + tarinfo.gid = statres.st_gid + if type == REGTYPE: + tarinfo.size = statres.st_size + else: + tarinfo.size = 0 + tarinfo.mtime = statres.st_mtime + tarinfo.type = type + tarinfo.linkname = linkname + if pwd: + try: + tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] + except KeyError: + pass + if grp: + try: + tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] + except KeyError: + pass + + if type in (CHRTYPE, BLKTYPE): + if hasattr(os, "major") and hasattr(os, "minor"): + tarinfo.devmajor = os.major(statres.st_rdev) + tarinfo.devminor = os.minor(statres.st_rdev) + return tarinfo + + def list(self, verbose=True): + """Print a table of contents to sys.stdout. If `verbose' is False, only + the names of the members are printed. If it is True, an `ls -l'-like + output is produced. + """ + self._check() + + for tarinfo in self: + if verbose: + print(filemode(tarinfo.mode), end=' ') + print("%s/%s" % (tarinfo.uname or tarinfo.uid, + tarinfo.gname or tarinfo.gid), end=' ') + if tarinfo.ischr() or tarinfo.isblk(): + print("%10s" % ("%d,%d" \ + % (tarinfo.devmajor, tarinfo.devminor)), end=' ') + else: + print("%10d" % tarinfo.size, end=' ') + print("%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6], end=' ') + + print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ') + + if verbose: + if tarinfo.issym(): + print("->", tarinfo.linkname, end=' ') + if tarinfo.islnk(): + print("link to", tarinfo.linkname, end=' ') + print() + + def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): + """Add the file `name' to the archive. `name' may be any type of file + (directory, fifo, symbolic link, etc.). If given, `arcname' + specifies an alternative name for the file in the archive. + Directories are added recursively by default. This can be avoided by + setting `recursive' to False. `exclude' is a function that should + return True for each filename to be excluded. `filter' is a function + that expects a TarInfo object argument and returns the changed + TarInfo object, if it returns None the TarInfo object will be + excluded from the archive. + """ + self._check("aw") + + if arcname is None: + arcname = name + + # Exclude pathnames. + if exclude is not None: + import warnings + warnings.warn("use the filter argument instead", + DeprecationWarning, 2) + if exclude(name): + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Skip if somebody tries to archive the archive... + if self.name is not None and os.path.abspath(name) == self.name: + self._dbg(2, "tarfile: Skipped %r" % name) + return + + self._dbg(1, name) + + # Create a TarInfo object from the file. + tarinfo = self.gettarinfo(name, arcname) + + if tarinfo is None: + self._dbg(1, "tarfile: Unsupported type %r" % name) + return + + # Change or exclude the TarInfo object. + if filter is not None: + tarinfo = filter(tarinfo) + if tarinfo is None: + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Append the tar header and data to the archive. + if tarinfo.isreg(): + f = bltn_open(name, "rb") + self.addfile(tarinfo, f) + f.close() + + elif tarinfo.isdir(): + self.addfile(tarinfo) + if recursive: + for f in os.listdir(name): + self.add(os.path.join(name, f), os.path.join(arcname, f), + recursive, exclude, filter=filter) + + else: + self.addfile(tarinfo) + + def addfile(self, tarinfo, fileobj=None): + """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is + given, tarinfo.size bytes are read from it and added to the archive. + You can create TarInfo objects using gettarinfo(). + On Windows platforms, `fileobj' should always be opened with mode + 'rb' to avoid irritation about the file size. + """ + self._check("aw") + + tarinfo = copy.copy(tarinfo) + + buf = tarinfo.tobuf(self.format, self.encoding, self.errors) + self.fileobj.write(buf) + self.offset += len(buf) + + # If there's data to follow, append it. + if fileobj is not None: + copyfileobj(fileobj, self.fileobj, tarinfo.size) + blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) + if remainder > 0: + self.fileobj.write(NUL * (BLOCKSIZE - remainder)) + blocks += 1 + self.offset += blocks * BLOCKSIZE + + self.members.append(tarinfo) + + def extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 0o700 + # Do not set_attrs directories, as we will do that further down + self.extract(tarinfo, path, set_attrs=not tarinfo.isdir()) + + # Reverse sort directories. + directories.sort(key=lambda a: a.name) + directories.reverse() + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extract(self, member, path="", set_attrs=True): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. `member' may be a filename or a TarInfo object. You can + specify a different directory using `path'. File attributes (owner, + mtime, mode) are set unless `set_attrs' is False. + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + # Prepare the link target for makelink(). + if tarinfo.islnk(): + tarinfo._link_target = os.path.join(path, tarinfo.linkname) + + try: + self._extract_member(tarinfo, os.path.join(path, tarinfo.name), + set_attrs=set_attrs) + except EnvironmentError as e: + if self.errorlevel > 0: + raise + else: + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) + else: + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extractfile(self, member): + """Extract a member from the archive as a file object. `member' may be + a filename or a TarInfo object. If `member' is a regular file, a + file-like object is returned. If `member' is a link, a file-like + object is constructed from the link's target. If `member' is none of + the above, None is returned. + The file-like object is read-only and provides the following + methods: read(), readline(), readlines(), seek() and tell() + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + if tarinfo.isreg(): + return self.fileobject(self, tarinfo) + + elif tarinfo.type not in SUPPORTED_TYPES: + # If a member's type is unknown, it is treated as a + # regular file. + return self.fileobject(self, tarinfo) + + elif tarinfo.islnk() or tarinfo.issym(): + if isinstance(self.fileobj, _Stream): + # A small but ugly workaround for the case that someone tries + # to extract a (sym)link as a file-object from a non-seekable + # stream of tar blocks. + raise StreamError("cannot extract (sym)link as file object") + else: + # A (sym)link's file object is its target's file object. + return self.extractfile(self._find_link_target(tarinfo)) + else: + # If there's no data associated with the member (directory, chrdev, + # blkdev, etc.), return None instead of a file object. + return None + + def _extract_member(self, tarinfo, targetpath, set_attrs=True): + """Extract the TarInfo object tarinfo to a physical + file called targetpath. + """ + # Fetch the TarInfo object for the given name + # and build the destination pathname, replacing + # forward slashes to platform specific separators. + targetpath = targetpath.rstrip("/") + targetpath = targetpath.replace("/", os.sep) + + # Create all upper directories. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + # Create directories that are not part of the archive with + # default permissions. + os.makedirs(upperdirs) + + if tarinfo.islnk() or tarinfo.issym(): + self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) + else: + self._dbg(1, tarinfo.name) + + if tarinfo.isreg(): + self.makefile(tarinfo, targetpath) + elif tarinfo.isdir(): + self.makedir(tarinfo, targetpath) + elif tarinfo.isfifo(): + self.makefifo(tarinfo, targetpath) + elif tarinfo.ischr() or tarinfo.isblk(): + self.makedev(tarinfo, targetpath) + elif tarinfo.islnk() or tarinfo.issym(): + self.makelink(tarinfo, targetpath) + elif tarinfo.type not in SUPPORTED_TYPES: + self.makeunknown(tarinfo, targetpath) + else: + self.makefile(tarinfo, targetpath) + + if set_attrs: + self.chown(tarinfo, targetpath) + if not tarinfo.issym(): + self.chmod(tarinfo, targetpath) + self.utime(tarinfo, targetpath) + + #-------------------------------------------------------------------------- + # Below are the different file methods. They are called via + # _extract_member() when extract() is called. They can be replaced in a + # subclass to implement other functionality. + + def makedir(self, tarinfo, targetpath): + """Make a directory called targetpath. + """ + try: + # Use a safe mode for the directory, the real mode is set + # later in _extract_member(). + os.mkdir(targetpath, 0o700) + except EnvironmentError as e: + if e.errno != errno.EEXIST: + raise + + def makefile(self, tarinfo, targetpath): + """Make a file called targetpath. + """ + source = self.fileobj + source.seek(tarinfo.offset_data) + target = bltn_open(targetpath, "wb") + if tarinfo.sparse is not None: + for offset, size in tarinfo.sparse: + target.seek(offset) + copyfileobj(source, target, size) + else: + copyfileobj(source, target, tarinfo.size) + target.seek(tarinfo.size) + target.truncate() + target.close() + + def makeunknown(self, tarinfo, targetpath): + """Make a file from a TarInfo object with an unknown type + at targetpath. + """ + self.makefile(tarinfo, targetpath) + self._dbg(1, "tarfile: Unknown file type %r, " \ + "extracted as regular file." % tarinfo.type) + + def makefifo(self, tarinfo, targetpath): + """Make a fifo called targetpath. + """ + if hasattr(os, "mkfifo"): + os.mkfifo(targetpath) + else: + raise ExtractError("fifo not supported by system") + + def makedev(self, tarinfo, targetpath): + """Make a character or block device called targetpath. + """ + if not hasattr(os, "mknod") or not hasattr(os, "makedev"): + raise ExtractError("special devices not supported by system") + + mode = tarinfo.mode + if tarinfo.isblk(): + mode |= stat.S_IFBLK + else: + mode |= stat.S_IFCHR + + os.mknod(targetpath, mode, + os.makedev(tarinfo.devmajor, tarinfo.devminor)) + + def makelink(self, tarinfo, targetpath): + """Make a (symbolic) link called targetpath. If it cannot be created + (platform limitation), we try to make a copy of the referenced file + instead of a link. + """ + try: + # For systems that support symbolic and hard links. + if tarinfo.issym(): + os.symlink(tarinfo.linkname, targetpath) + else: + # See extract(). + if os.path.exists(tarinfo._link_target): + os.link(tarinfo._link_target, targetpath) + else: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except symlink_exception: + if tarinfo.issym(): + linkpath = os.path.join(os.path.dirname(tarinfo.name), + tarinfo.linkname) + else: + linkpath = tarinfo.linkname + else: + try: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except KeyError: + raise ExtractError("unable to resolve link inside archive") + + def chown(self, tarinfo, targetpath): + """Set owner of targetpath according to tarinfo. + """ + if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: + # We have to be root to do so. + try: + g = grp.getgrnam(tarinfo.gname)[2] + except KeyError: + g = tarinfo.gid + try: + u = pwd.getpwnam(tarinfo.uname)[2] + except KeyError: + u = tarinfo.uid + try: + if tarinfo.issym() and hasattr(os, "lchown"): + os.lchown(targetpath, u, g) + else: + if sys.platform != "os2emx": + os.chown(targetpath, u, g) + except EnvironmentError as e: + raise ExtractError("could not change owner") + + def chmod(self, tarinfo, targetpath): + """Set file permissions of targetpath according to tarinfo. + """ + if hasattr(os, 'chmod'): + try: + os.chmod(targetpath, tarinfo.mode) + except EnvironmentError as e: + raise ExtractError("could not change mode") + + def utime(self, tarinfo, targetpath): + """Set modification time of targetpath according to tarinfo. + """ + if not hasattr(os, 'utime'): + return + try: + os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) + except EnvironmentError as e: + raise ExtractError("could not change modification time") + + #-------------------------------------------------------------------------- + def next(self): + """Return the next member of the archive as a TarInfo object, when + TarFile is opened for reading. Return None if there is no more + available. + """ + self._check("ra") + if self.firstmember is not None: + m = self.firstmember + self.firstmember = None + return m + + # Read the next block. + self.fileobj.seek(self.offset) + tarinfo = None + while True: + try: + tarinfo = self.tarinfo.fromtarfile(self) + except EOFHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + except InvalidHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + elif self.offset == 0: + raise ReadError(str(e)) + except EmptyHeaderError: + if self.offset == 0: + raise ReadError("empty file") + except TruncatedHeaderError as e: + if self.offset == 0: + raise ReadError(str(e)) + except SubsequentHeaderError as e: + raise ReadError(str(e)) + break + + if tarinfo is not None: + self.members.append(tarinfo) + else: + self._loaded = True + + return tarinfo + + #-------------------------------------------------------------------------- + # Little helper methods: + + def _getmember(self, name, tarinfo=None, normalize=False): + """Find an archive member by name from bottom to top. + If tarinfo is given, it is used as the starting point. + """ + # Ensure that all members have been loaded. + members = self.getmembers() + + # Limit the member search list up to tarinfo. + if tarinfo is not None: + members = members[:members.index(tarinfo)] + + if normalize: + name = os.path.normpath(name) + + for member in reversed(members): + if normalize: + member_name = os.path.normpath(member.name) + else: + member_name = member.name + + if name == member_name: + return member + + def _load(self): + """Read through the entire archive file and look for readable + members. + """ + while True: + tarinfo = self.next() + if tarinfo is None: + break + self._loaded = True + + def _check(self, mode=None): + """Check if TarFile is still open, and if the operation's mode + corresponds to TarFile's mode. + """ + if self.closed: + raise IOError("%s is closed" % self.__class__.__name__) + if mode is not None and self.mode not in mode: + raise IOError("bad operation for mode %r" % self.mode) + + def _find_link_target(self, tarinfo): + """Find the target member of a symlink or hardlink member in the + archive. + """ + if tarinfo.issym(): + # Always search the entire archive. + linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname + limit = None + else: + # Search the archive before the link, because a hard link is + # just a reference to an already archived file. + linkname = tarinfo.linkname + limit = tarinfo + + member = self._getmember(linkname, tarinfo=limit, normalize=True) + if member is None: + raise KeyError("linkname %r not found" % linkname) + return member + + def __iter__(self): + """Provide an iterator object. + """ + if self._loaded: + return iter(self.members) + else: + return TarIter(self) + + def _dbg(self, level, msg): + """Write debugging output to sys.stderr. + """ + if level <= self.debug: + print(msg, file=sys.stderr) + + def __enter__(self): + self._check() + return self + + def __exit__(self, type, value, traceback): + if type is None: + self.close() + else: + # An exception occurred. We must not call close() because + # it would try to write end-of-archive blocks and padding. + if not self._extfileobj: + self.fileobj.close() + self.closed = True +# class TarFile + +class TarIter(object): + """Iterator Class. + + for tarinfo in TarFile(...): + suite... + """ + + def __init__(self, tarfile): + """Construct a TarIter object. + """ + self.tarfile = tarfile + self.index = 0 + def __iter__(self): + """Return iterator object. + """ + return self + + def __next__(self): + """Return the next item using TarFile's next() method. + When all members have been read, set TarFile as _loaded. + """ + # Fix for SF #1100429: Under rare circumstances it can + # happen that getmembers() is called during iteration, + # which will cause TarIter to stop prematurely. + if not self.tarfile._loaded: + tarinfo = self.tarfile.next() + if not tarinfo: + self.tarfile._loaded = True + raise StopIteration + else: + try: + tarinfo = self.tarfile.members[self.index] + except IndexError: + raise StopIteration + self.index += 1 + return tarinfo + + next = __next__ # for Python 2.x + +#-------------------- +# exported functions +#-------------------- +def is_tarfile(name): + """Return True if name points to a tar archive that we + are able to handle, else return False. + """ + try: + t = open(name) + t.close() + return True + except TarError: + return False + +bltn_open = open +open = TarFile.open diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/compat.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/compat.py new file mode 100644 index 0000000..2b198dd --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/compat.py @@ -0,0 +1,1111 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2016 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import absolute_import + +import os +import re +import sys + +try: + import ssl +except ImportError: + ssl = None + +if sys.version_info[0] < 3: # pragma: no cover + from StringIO import StringIO + string_types = basestring, + text_type = unicode + from types import FileType as file_type + import __builtin__ as builtins + import ConfigParser as configparser + from ._backport import shutil + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, + pathname2url, ContentTooShortError, splittype) + + def quote(s): + if isinstance(s, unicode): + s = s.encode('utf-8') + return _quote(s) + + import urllib2 + from urllib2 import (Request, urlopen, URLError, HTTPError, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib2 import HTTPSHandler + import httplib + import xmlrpclib + import Queue as queue + from HTMLParser import HTMLParser + import htmlentitydefs + raw_input = raw_input + from itertools import ifilter as filter + from itertools import ifilterfalse as filterfalse + + _userprog = None + def splituser(host): + """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + global _userprog + if _userprog is None: + import re + _userprog = re.compile('^(.*)@(.*)$') + + match = _userprog.match(host) + if match: return match.group(1, 2) + return None, host + +else: # pragma: no cover + from io import StringIO + string_types = str, + text_type = str + from io import TextIOWrapper as file_type + import builtins + import configparser + import shutil + from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote, + unquote, urlsplit, urlunsplit, splittype) + from urllib.request import (urlopen, urlretrieve, Request, url2pathname, + pathname2url, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib.request import HTTPSHandler + from urllib.error import HTTPError, URLError, ContentTooShortError + import http.client as httplib + import urllib.request as urllib2 + import xmlrpc.client as xmlrpclib + import queue + from html.parser import HTMLParser + import html.entities as htmlentitydefs + raw_input = input + from itertools import filterfalse + filter = filter + +try: + from ssl import match_hostname, CertificateError +except ImportError: # pragma: no cover + class CertificateError(ValueError): + pass + + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + parts = dn.split('.') + leftmost, remainder = parts[0], parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +try: + from types import SimpleNamespace as Container +except ImportError: # pragma: no cover + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: # pragma: no cover + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if not normdir in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + +# ZipFile is a context manager in 2.7, but not in 2.6 + +from zipfile import ZipFile as BaseZipFile + +if hasattr(BaseZipFile, '__enter__'): # pragma: no cover + ZipFile = BaseZipFile +else: + from zipfile import ZipExtFile as BaseZipExtFile + + class ZipExtFile(BaseZipExtFile): + def __init__(self, base): + self.__dict__.update(base.__dict__) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + class ZipFile(BaseZipFile): + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + def open(self, *args, **kwargs): + base = BaseZipFile.open(self, *args, **kwargs) + return ZipExtFile(base) + +try: + from platform import python_implementation +except ImportError: # pragma: no cover + def python_implementation(): + """Return a string identifying the Python implementation.""" + if 'PyPy' in sys.version: + return 'PyPy' + if os.name == 'java': + return 'Jython' + if sys.version.startswith('IronPython'): + return 'IronPython' + return 'CPython' + +try: + import sysconfig +except ImportError: # pragma: no cover + from ._backport import sysconfig + +try: + callable = callable +except NameError: # pragma: no cover + from collections import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode + fsdecode = os.fsdecode +except AttributeError: # pragma: no cover + _fsencoding = sys.getfilesystemencoding() + if _fsencoding == 'mbcs': + _fserrors = 'strict' + else: + _fserrors = 'surrogateescape' + + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, text_type): + return filename.encode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + def fsdecode(filename): + if isinstance(filename, text_type): + return filename + elif isinstance(filename, bytes): + return filename.decode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + +try: + from tokenize import detect_encoding +except ImportError: # pragma: no cover + from codecs import BOM_UTF8, lookup + import re + + cookie_re = re.compile("coding[:=]\s*([-\w.]+)") + + def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + + def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + try: + filename = readline.__self__.name + except AttributeError: + filename = None + bom_found = False + encoding = None + default = 'utf-8' + def read_or_stop(): + try: + return readline() + except StopIteration: + return b'' + + def find_cookie(line): + try: + # Decode as UTF-8. Either the line is an encoding declaration, + # in which case it should be pure ASCII, or it must be UTF-8 + # per default encoding. + line_string = line.decode('utf-8') + except UnicodeDecodeError: + msg = "invalid or missing encoding declaration" + if filename is not None: + msg = '{} for {!r}'.format(msg, filename) + raise SyntaxError(msg) + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + if filename is None: + msg = "unknown encoding: " + encoding + else: + msg = "unknown encoding for {!r}: {}".format(filename, + encoding) + raise SyntaxError(msg) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + if filename is None: + msg = 'encoding problem: utf-8' + else: + msg = 'encoding problem for {!r}: utf-8'.format(filename) + raise SyntaxError(msg) + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + +# For converting & <-> & etc. +try: + from html import escape +except ImportError: + from cgi import escape +if sys.version_info[:2] < (3, 4): + unescape = HTMLParser().unescape +else: + from html import unescape + +try: + from collections import ChainMap +except ImportError: # pragma: no cover + from collections import MutableMapping + + try: + from reprlib import recursive_repr as _recursive_repr + except ImportError: + def _recursive_repr(fillvalue='...'): + ''' + Decorator to make a repr function return fillvalue for a recursive + call + ''' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + +try: + from imp import cache_from_source +except ImportError: # pragma: no cover + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix + +try: + from collections import OrderedDict +except ImportError: # pragma: no cover +## {{{ http://code.activestate.com/recipes/576693/ (r9) +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. + try: + from thread import get_ident as _get_ident + except ImportError: + from dummy_thread import get_ident as _get_ident + + try: + from _abcoll import KeysView, ValuesView, ItemsView + except ImportError: + pass + + + class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running=None): + 'od.__repr__() <==> repr(od)' + if not _repr_running: _repr_running = {} + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext' : 'ext_convert', + 'cfg' : 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + #print d, rest + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + #rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance(value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance(value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and\ + isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/database.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/database.py new file mode 100644 index 0000000..c314426 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/database.py @@ -0,0 +1,1312 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2016 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""PEP 376 implementation.""" + +from __future__ import unicode_literals + +import base64 +import codecs +import contextlib +import hashlib +import logging +import os +import posixpath +import sys +import zipimport + +from . import DistlibException, resources +from .compat import StringIO +from .version import get_scheme, UnsupportedVersionError +from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME +from .util import (parse_requirement, cached_property, parse_name_and_version, + read_exports, write_exports, CSVReader, CSVWriter) + + +__all__ = ['Distribution', 'BaseInstalledDistribution', + 'InstalledDistribution', 'EggInfoDistribution', + 'DistributionPath'] + + +logger = logging.getLogger(__name__) + +EXPORTS_FILENAME = 'pydist-exports.json' +COMMANDS_FILENAME = 'pydist-commands.json' + +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', + 'RESOURCES', EXPORTS_FILENAME, 'SHARED') + +DISTINFO_EXT = '.dist-info' + + +class _Cache(object): + """ + A simple cache mapping names and .dist-info paths to distributions + """ + def __init__(self): + """ + Initialise an instance. There is normally one for each DistributionPath. + """ + self.name = {} + self.path = {} + self.generated = False + + def clear(self): + """ + Clear the cache, setting it to its initial state. + """ + self.name.clear() + self.path.clear() + self.generated = False + + def add(self, dist): + """ + Add a distribution to the cache. + :param dist: The distribution to add. + """ + if dist.path not in self.path: + self.path[dist.path] = dist + self.name.setdefault(dist.key, []).append(dist) + + +class DistributionPath(object): + """ + Represents a set of distributions installed on a path (typically sys.path). + """ + def __init__(self, path=None, include_egg=False): + """ + Create an instance from a path, optionally including legacy (distutils/ + setuptools/distribute) distributions. + :param path: The path to use, as a list of directories. If not specified, + sys.path is used. + :param include_egg: If True, this instance will look for and return legacy + distributions as well as those based on PEP 376. + """ + if path is None: + path = sys.path + self.path = path + self._include_dist = True + self._include_egg = include_egg + + self._cache = _Cache() + self._cache_egg = _Cache() + self._cache_enabled = True + self._scheme = get_scheme('default') + + def _get_cache_enabled(self): + return self._cache_enabled + + def _set_cache_enabled(self, value): + self._cache_enabled = value + + cache_enabled = property(_get_cache_enabled, _set_cache_enabled) + + def clear_cache(self): + """ + Clears the internal cache. + """ + self._cache.clear() + self._cache_egg.clear() + + + def _yield_distributions(self): + """ + Yield .dist-info and/or .egg(-info) distributions. + """ + # We need to check if we've seen some resources already, because on + # some Linux systems (e.g. some Debian/Ubuntu variants) there are + # symlinks which alias other files in the environment. + seen = set() + for path in self.path: + finder = resources.finder_for_path(path) + if finder is None: + continue + r = finder.find('') + if not r or not r.is_container: + continue + rset = sorted(r.resources) + for entry in rset: + r = finder.find(entry) + if not r or r.path in seen: + continue + if self._include_dist and entry.endswith(DISTINFO_EXT): + possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME] + for metadata_filename in possible_filenames: + metadata_path = posixpath.join(entry, metadata_filename) + pydist = finder.find(metadata_path) + if pydist: + break + else: + continue + + with contextlib.closing(pydist.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + logger.debug('Found %s', r.path) + seen.add(r.path) + yield new_dist_class(r.path, metadata=metadata, + env=self) + elif self._include_egg and entry.endswith(('.egg-info', + '.egg')): + logger.debug('Found %s', r.path) + seen.add(r.path) + yield old_dist_class(r.path, self) + + def _generate_cache(self): + """ + Scan the path for distributions and populate the cache with + those that are found. + """ + gen_dist = not self._cache.generated + gen_egg = self._include_egg and not self._cache_egg.generated + if gen_dist or gen_egg: + for dist in self._yield_distributions(): + if isinstance(dist, InstalledDistribution): + self._cache.add(dist) + else: + self._cache_egg.add(dist) + + if gen_dist: + self._cache.generated = True + if gen_egg: + self._cache_egg.generated = True + + @classmethod + def distinfo_dirname(cls, name, version): + """ + The *name* and *version* parameters are converted into their + filename-escaped form, i.e. any ``'-'`` characters are replaced + with ``'_'`` other than the one in ``'dist-info'`` and the one + separating the name from the version number. + + :parameter name: is converted to a standard distribution name by replacing + any runs of non- alphanumeric characters with a single + ``'-'``. + :type name: string + :parameter version: is converted to a standard version string. Spaces + become dots, and all other non-alphanumeric characters + (except dots) become dashes, with runs of multiple + dashes condensed to a single dash. + :type version: string + :returns: directory name + :rtype: string""" + name = name.replace('-', '_') + return '-'.join([name, version]) + DISTINFO_EXT + + def get_distributions(self): + """ + Provides an iterator that looks for distributions and returns + :class:`InstalledDistribution` or + :class:`EggInfoDistribution` instances for each one of them. + + :rtype: iterator of :class:`InstalledDistribution` and + :class:`EggInfoDistribution` instances + """ + if not self._cache_enabled: + for dist in self._yield_distributions(): + yield dist + else: + self._generate_cache() + + for dist in self._cache.path.values(): + yield dist + + if self._include_egg: + for dist in self._cache_egg.path.values(): + yield dist + + def get_distribution(self, name): + """ + Looks for a named distribution on the path. + + This function only returns the first result found, as no more than one + value is expected. If nothing is found, ``None`` is returned. + + :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` + or ``None`` + """ + result = None + name = name.lower() + if not self._cache_enabled: + for dist in self._yield_distributions(): + if dist.key == name: + result = dist + break + else: + self._generate_cache() + + if name in self._cache.name: + result = self._cache.name[name][0] + elif self._include_egg and name in self._cache_egg.name: + result = self._cache_egg.name[name][0] + return result + + def provides_distribution(self, name, version=None): + """ + Iterates over all distributions to find which distributions provide *name*. + If a *version* is provided, it will be used to filter the results. + + This function only returns the first result found, since no more than + one values are expected. If the directory is not found, returns ``None``. + + :parameter version: a version specifier that indicates the version + required, conforming to the format in ``PEP-345`` + + :type name: string + :type version: string + """ + matcher = None + if not version is None: + try: + matcher = self._scheme.matcher('%s (%s)' % (name, version)) + except ValueError: + raise DistlibException('invalid name or version: %r, %r' % + (name, version)) + + for dist in self.get_distributions(): + provided = dist.provides + + for p in provided: + p_name, p_ver = parse_name_and_version(p) + if matcher is None: + if p_name == name: + yield dist + break + else: + if p_name == name and matcher.match(p_ver): + yield dist + break + + def get_file_path(self, name, relative_path): + """ + Return the path to a resource file. + """ + dist = self.get_distribution(name) + if dist is None: + raise LookupError('no distribution named %r found' % name) + return dist.get_resource_path(relative_path) + + def get_exported_entries(self, category, name=None): + """ + Return all of the exported entries in a particular category. + + :param category: The category to search for entries. + :param name: If specified, only entries with that name are returned. + """ + for dist in self.get_distributions(): + r = dist.exports + if category in r: + d = r[category] + if name is not None: + if name in d: + yield d[name] + else: + for v in d.values(): + yield v + + +class Distribution(object): + """ + A base class for distributions, whether installed or from indexes. + Either way, it must have some metadata, so that's all that's needed + for construction. + """ + + build_time_dependency = False + """ + Set to True if it's known to be only a build-time dependency (i.e. + not needed after installation). + """ + + requested = False + """A boolean that indicates whether the ``REQUESTED`` metadata file is + present (in other words, whether the package was installed by user + request or it was installed as a dependency).""" + + def __init__(self, metadata): + """ + Initialise an instance. + :param metadata: The instance of :class:`Metadata` describing this + distribution. + """ + self.metadata = metadata + self.name = metadata.name + self.key = self.name.lower() # for case-insensitive comparisons + self.version = metadata.version + self.locator = None + self.digest = None + self.extras = None # additional features requested + self.context = None # environment marker overrides + self.download_urls = set() + self.digests = {} + + @property + def source_url(self): + """ + The source archive download URL for this distribution. + """ + return self.metadata.source_url + + download_url = source_url # Backward compatibility + + @property + def name_and_version(self): + """ + A utility property which displays the name and version in parentheses. + """ + return '%s (%s)' % (self.name, self.version) + + @property + def provides(self): + """ + A set of distribution names and versions provided by this distribution. + :return: A set of "name (version)" strings. + """ + plist = self.metadata.provides + s = '%s (%s)' % (self.name, self.version) + if s not in plist: + plist.append(s) + return plist + + def _get_requirements(self, req_attr): + md = self.metadata + logger.debug('Getting requirements from metadata %r', md.todict()) + reqts = getattr(md, req_attr) + return set(md.get_requirements(reqts, extras=self.extras, + env=self.context)) + + @property + def run_requires(self): + return self._get_requirements('run_requires') + + @property + def meta_requires(self): + return self._get_requirements('meta_requires') + + @property + def build_requires(self): + return self._get_requirements('build_requires') + + @property + def test_requires(self): + return self._get_requirements('test_requires') + + @property + def dev_requires(self): + return self._get_requirements('dev_requires') + + def matches_requirement(self, req): + """ + Say if this instance matches (fulfills) a requirement. + :param req: The requirement to match. + :rtype req: str + :return: True if it matches, else False. + """ + # Requirement may contain extras - parse to lose those + # from what's passed to the matcher + r = parse_requirement(req) + scheme = get_scheme(self.metadata.scheme) + try: + matcher = scheme.matcher(r.requirement) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + result = False + for p in self.provides: + p_name, p_ver = parse_name_and_version(p) + if p_name != name: + continue + try: + result = matcher.match(p_ver) + break + except UnsupportedVersionError: + pass + return result + + def __repr__(self): + """ + Return a textual representation of this instance, + """ + if self.source_url: + suffix = ' [%s]' % self.source_url + else: + suffix = '' + return '' % (self.name, self.version, suffix) + + def __eq__(self, other): + """ + See if this distribution is the same as another. + :param other: The distribution to compare with. To be equal to one + another. distributions must have the same type, name, + version and source_url. + :return: True if it is the same, else False. + """ + if type(other) is not type(self): + result = False + else: + result = (self.name == other.name and + self.version == other.version and + self.source_url == other.source_url) + return result + + def __hash__(self): + """ + Compute hash in a way which matches the equality test. + """ + return hash(self.name) + hash(self.version) + hash(self.source_url) + + +class BaseInstalledDistribution(Distribution): + """ + This is the base class for installed distributions (whether PEP 376 or + legacy). + """ + + hasher = None + + def __init__(self, metadata, path, env=None): + """ + Initialise an instance. + :param metadata: An instance of :class:`Metadata` which describes the + distribution. This will normally have been initialised + from a metadata file in the ``path``. + :param path: The path of the ``.dist-info`` or ``.egg-info`` + directory for the distribution. + :param env: This is normally the :class:`DistributionPath` + instance where this distribution was found. + """ + super(BaseInstalledDistribution, self).__init__(metadata) + self.path = path + self.dist_path = env + + def get_hash(self, data, hasher=None): + """ + Get the hash of some data, using a particular hash algorithm, if + specified. + + :param data: The data to be hashed. + :type data: bytes + :param hasher: The name of a hash implementation, supported by hashlib, + or ``None``. Examples of valid values are ``'sha1'``, + ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and + ``'sha512'``. If no hasher is specified, the ``hasher`` + attribute of the :class:`InstalledDistribution` instance + is used. If the hasher is determined to be ``None``, MD5 + is used as the hashing algorithm. + :returns: The hash of the data. If a hasher was explicitly specified, + the returned hash will be prefixed with the specified hasher + followed by '='. + :rtype: str + """ + if hasher is None: + hasher = self.hasher + if hasher is None: + hasher = hashlib.md5 + prefix = '' + else: + hasher = getattr(hashlib, hasher) + prefix = '%s=' % self.hasher + digest = hasher(data).digest() + digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') + return '%s%s' % (prefix, digest) + + +class InstalledDistribution(BaseInstalledDistribution): + """ + Created with the *path* of the ``.dist-info`` directory provided to the + constructor. It reads the metadata contained in ``pydist.json`` when it is + instantiated., or uses a passed in Metadata instance (useful for when + dry-run mode is being used). + """ + + hasher = 'sha256' + + def __init__(self, path, metadata=None, env=None): + self.finder = finder = resources.finder_for_path(path) + if finder is None: + import pdb; pdb.set_trace () + if env and env._cache_enabled and path in env._cache.path: + metadata = env._cache.path[path].metadata + elif metadata is None: + r = finder.find(METADATA_FILENAME) + # Temporary - for Wheel 0.23 support + if r is None: + r = finder.find(WHEEL_METADATA_FILENAME) + # Temporary - for legacy support + if r is None: + r = finder.find('METADATA') + if r is None: + raise ValueError('no %s found in %s' % (METADATA_FILENAME, + path)) + with contextlib.closing(r.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + + super(InstalledDistribution, self).__init__(metadata, path, env) + + if env and env._cache_enabled: + env._cache.add(self) + + try: + r = finder.find('REQUESTED') + except AttributeError: + import pdb; pdb.set_trace () + self.requested = r is not None + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def _get_records(self): + """ + Get the list of installed files for the distribution + :return: A list of tuples of path, hash and size. Note that hash and + size might be ``None`` for some entries. The path is exactly + as stored in the file (which is as in PEP 376). + """ + results = [] + r = self.get_distinfo_resource('RECORD') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as record_reader: + # Base location is parent dir of .dist-info dir + #base_location = os.path.dirname(self.path) + #base_location = os.path.abspath(base_location) + for row in record_reader: + missing = [None for i in range(len(row), 3)] + path, checksum, size = row + missing + #if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) + results.append((path, checksum, size)) + return results + + @cached_property + def exports(self): + """ + Return the information exported by this distribution. + :return: A dictionary of exports, mapping an export category to a dict + of :class:`ExportEntry` instances describing the individual + export entries, and keyed by name. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + result = self.read_exports() + return result + + def read_exports(self): + """ + Read exports data from a file in .ini format. + + :return: A dictionary of exports, mapping an export category to a list + of :class:`ExportEntry` instances describing the individual + export entries. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + with contextlib.closing(r.as_stream()) as stream: + result = read_exports(stream) + return result + + def write_exports(self, exports): + """ + Write a dictionary of exports to a file in .ini format. + :param exports: A dictionary of exports, mapping an export category to + a list of :class:`ExportEntry` instances describing the + individual export entries. + """ + rf = self.get_distinfo_file(EXPORTS_FILENAME) + with open(rf, 'w') as f: + write_exports(exports, f) + + def get_resource_path(self, relative_path): + """ + NOTE: This API may change in the future. + + Return the absolute path to a resource file with the given relative + path. + + :param relative_path: The path, relative to .dist-info, of the resource + of interest. + :return: The absolute path where the resource is to be found. + """ + r = self.get_distinfo_resource('RESOURCES') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as resources_reader: + for relative, destination in resources_reader: + if relative == relative_path: + return destination + raise KeyError('no resource file with relative path %r ' + 'is installed' % relative_path) + + def list_installed_files(self): + """ + Iterates over the ``RECORD`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: iterator of (path, hash, size) + """ + for result in self._get_records(): + yield result + + def write_installed_files(self, paths, prefix, dry_run=False): + """ + Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any + existing ``RECORD`` file is silently overwritten. + + prefix is used to determine when to write absolute paths. + """ + prefix = os.path.join(prefix, '') + base = os.path.dirname(self.path) + base_under_prefix = base.startswith(prefix) + base = os.path.join(base, '') + record_path = self.get_distinfo_file('RECORD') + logger.info('creating %s', record_path) + if dry_run: + return None + with CSVWriter(record_path) as writer: + for path in paths: + if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): + # do not put size and hash, as in PEP-376 + hash_value = size = '' + else: + size = '%d' % os.path.getsize(path) + with open(path, 'rb') as fp: + hash_value = self.get_hash(fp.read()) + if path.startswith(base) or (base_under_prefix and + path.startswith(prefix)): + path = os.path.relpath(path, base) + writer.writerow((path, hash_value, size)) + + # add the RECORD file itself + if record_path.startswith(base): + record_path = os.path.relpath(record_path, base) + writer.writerow((record_path, '', '')) + return record_path + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + base = os.path.dirname(self.path) + record_path = self.get_distinfo_file('RECORD') + for path, hash_value, size in self.list_installed_files(): + if not os.path.isabs(path): + path = os.path.join(base, path) + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + elif os.path.isfile(path): + actual_size = str(os.path.getsize(path)) + if size and actual_size != size: + mismatches.append((path, 'size', size, actual_size)) + elif hash_value: + if '=' in hash_value: + hasher = hash_value.split('=', 1)[0] + else: + hasher = None + + with open(path, 'rb') as f: + actual_hash = self.get_hash(f.read(), hasher) + if actual_hash != hash_value: + mismatches.append((path, 'hash', hash_value, actual_hash)) + return mismatches + + @cached_property + def shared_locations(self): + """ + A dictionary of shared locations whose keys are in the set 'prefix', + 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. + The corresponding value is the absolute path of that category for + this distribution, and takes into account any paths selected by the + user at installation time (e.g. via command-line arguments). In the + case of the 'namespace' key, this would be a list of absolute paths + for the roots of namespace packages in this distribution. + + The first time this property is accessed, the relevant information is + read from the SHARED file in the .dist-info directory. + """ + result = {} + shared_path = os.path.join(self.path, 'SHARED') + if os.path.isfile(shared_path): + with codecs.open(shared_path, 'r', encoding='utf-8') as f: + lines = f.read().splitlines() + for line in lines: + key, value = line.split('=', 1) + if key == 'namespace': + result.setdefault(key, []).append(value) + else: + result[key] = value + return result + + def write_shared_locations(self, paths, dry_run=False): + """ + Write shared location information to the SHARED file in .dist-info. + :param paths: A dictionary as described in the documentation for + :meth:`shared_locations`. + :param dry_run: If True, the action is logged but no file is actually + written. + :return: The path of the file written to. + """ + shared_path = os.path.join(self.path, 'SHARED') + logger.info('creating %s', shared_path) + if dry_run: + return None + lines = [] + for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): + path = paths[key] + if os.path.isdir(paths[key]): + lines.append('%s=%s' % (key, path)) + for ns in paths.get('namespace', ()): + lines.append('namespace=%s' % ns) + + with codecs.open(shared_path, 'w', encoding='utf-8') as f: + f.write('\n'.join(lines)) + return shared_path + + def get_distinfo_resource(self, path): + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + finder = resources.finder_for_path(self.path) + if finder is None: + raise DistlibException('Unable to get a finder for %s' % self.path) + return finder.find(path) + + def get_distinfo_file(self, path): + """ + Returns a path located under the ``.dist-info`` directory. Returns a + string representing the path. + + :parameter path: a ``'/'``-separated path relative to the + ``.dist-info`` directory or an absolute path; + If *path* is an absolute path and doesn't start + with the ``.dist-info`` directory path, + a :class:`DistlibException` is raised + :type path: str + :rtype: str + """ + # Check if it is an absolute path # XXX use relpath, add tests + if path.find(os.sep) >= 0: + # it's an absolute path? + distinfo_dirname, path = path.split(os.sep)[-2:] + if distinfo_dirname != self.path.split(os.sep)[-1]: + raise DistlibException( + 'dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) + + # The file must be relative + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + + return os.path.join(self.path, path) + + def list_distinfo_files(self): + """ + Iterates over the ``RECORD`` entries and returns paths for each line if + the path is pointing to a file located in the ``.dist-info`` directory + or one of its subdirectories. + + :returns: iterator of paths + """ + base = os.path.dirname(self.path) + for path, checksum, size in self._get_records(): + # XXX add separator or use real relpath algo + if not os.path.isabs(path): + path = os.path.join(base, path) + if path.startswith(self.path): + yield path + + def __eq__(self, other): + return (isinstance(other, InstalledDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +class EggInfoDistribution(BaseInstalledDistribution): + """Created with the *path* of the ``.egg-info`` directory or file provided + to the constructor. It reads the metadata contained in the file itself, or + if the given path happens to be a directory, the metadata is read from the + file ``PKG-INFO`` under that directory.""" + + requested = True # as we have no way of knowing, assume it was + shared_locations = {} + + def __init__(self, path, env=None): + def set_name_and_version(s, n, v): + s.name = n + s.key = n.lower() # for case-insensitive comparisons + s.version = v + + self.path = path + self.dist_path = env + if env and env._cache_enabled and path in env._cache_egg.path: + metadata = env._cache_egg.path[path].metadata + set_name_and_version(self, metadata.name, metadata.version) + else: + metadata = self._get_metadata(path) + + # Need to be set before caching + set_name_and_version(self, metadata.name, metadata.version) + + if env and env._cache_enabled: + env._cache_egg.add(self) + super(EggInfoDistribution, self).__init__(metadata, path, env) + + def _get_metadata(self, path): + requires = None + + def parse_requires_data(data): + """Create a list of dependencies from a requires.txt file. + + *data*: the contents of a setuptools-produced requires.txt file. + """ + reqs = [] + lines = data.splitlines() + for line in lines: + line = line.strip() + if line.startswith('['): + logger.warning('Unexpected line: quitting requirement scan: %r', + line) + break + r = parse_requirement(line) + if not r: + logger.warning('Not recognised as a requirement: %r', line) + continue + if r.extras: + logger.warning('extra requirements in requires.txt are ' + 'not supported') + if not r.constraints: + reqs.append(r.name) + else: + cons = ', '.join('%s%s' % c for c in r.constraints) + reqs.append('%s (%s)' % (r.name, cons)) + return reqs + + def parse_requires_path(req_path): + """Create a list of dependencies from a requires.txt file. + + *req_path*: the path to a setuptools-produced requires.txt file. + """ + + reqs = [] + try: + with codecs.open(req_path, 'r', 'utf-8') as fp: + reqs = parse_requires_data(fp.read()) + except IOError: + pass + return reqs + + if path.endswith('.egg'): + if os.path.isdir(path): + meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO') + metadata = Metadata(path=meta_path, scheme='legacy') + req_path = os.path.join(path, 'EGG-INFO', 'requires.txt') + requires = parse_requires_path(req_path) + else: + # FIXME handle the case where zipfile is not available + zipf = zipimport.zipimporter(path) + fileobj = StringIO( + zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + metadata = Metadata(fileobj=fileobj, scheme='legacy') + try: + data = zipf.get_data('EGG-INFO/requires.txt') + requires = parse_requires_data(data.decode('utf-8')) + except IOError: + requires = None + elif path.endswith('.egg-info'): + if os.path.isdir(path): + req_path = os.path.join(path, 'requires.txt') + requires = parse_requires_path(req_path) + path = os.path.join(path, 'PKG-INFO') + metadata = Metadata(path=path, scheme='legacy') + else: + raise DistlibException('path must end with .egg-info or .egg, ' + 'got %r' % path) + + if requires: + metadata.add_requirements(requires) + return metadata + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + for path, _, _ in self.list_installed_files(): + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + return mismatches + + def list_installed_files(self): + """ + Iterates over the ``installed-files.txt`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: a list of (path, hash, size) + """ + + def _md5(path): + f = open(path, 'rb') + try: + content = f.read() + finally: + f.close() + return hashlib.md5(content).hexdigest() + + def _size(path): + return os.stat(path).st_size + + record_path = os.path.join(self.path, 'installed-files.txt') + result = [] + if os.path.exists(record_path): + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + p = os.path.normpath(os.path.join(self.path, line)) + # "./" is present as a marker between installed files + # and installation metadata files + if not os.path.exists(p): + logger.warning('Non-existent file: %s', p) + if p.endswith(('.pyc', '.pyo')): + continue + #otherwise fall through and fail + if not os.path.isdir(p): + result.append((p, _md5(p), _size(p))) + result.append((record_path, None, None)) + return result + + def list_distinfo_files(self, absolute=False): + """ + Iterates over the ``installed-files.txt`` entries and returns paths for + each line if the path is pointing to a file located in the + ``.egg-info`` directory or one of its subdirectories. + + :parameter absolute: If *absolute* is ``True``, each returned path is + transformed into a local absolute path. Otherwise the + raw value from ``installed-files.txt`` is returned. + :type absolute: boolean + :returns: iterator of paths + """ + record_path = os.path.join(self.path, 'installed-files.txt') + skip = True + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + if line == './': + skip = False + continue + if not skip: + p = os.path.normpath(os.path.join(self.path, line)) + if p.startswith(self.path): + if absolute: + yield p + else: + yield line + + def __eq__(self, other): + return (isinstance(other, EggInfoDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + +new_dist_class = InstalledDistribution +old_dist_class = EggInfoDistribution + + +class DependencyGraph(object): + """ + Represents a dependency graph between distributions. + + The dependency relationships are stored in an ``adjacency_list`` that maps + distributions to a list of ``(other, label)`` tuples where ``other`` + is a distribution and the edge is labeled with ``label`` (i.e. the version + specifier, if such was provided). Also, for more efficient traversal, for + every distribution ``x``, a list of predecessors is kept in + ``reverse_list[x]``. An edge from distribution ``a`` to + distribution ``b`` means that ``a`` depends on ``b``. If any missing + dependencies are found, they are stored in ``missing``, which is a + dictionary that maps distributions to a list of requirements that were not + provided by any other distributions. + """ + + def __init__(self): + self.adjacency_list = {} + self.reverse_list = {} + self.missing = {} + + def add_distribution(self, distribution): + """Add the *distribution* to the graph. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + """ + self.adjacency_list[distribution] = [] + self.reverse_list[distribution] = [] + #self.missing[distribution] = [] + + def add_edge(self, x, y, label=None): + """Add an edge from distribution *x* to distribution *y* with the given + *label*. + + :type x: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type y: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type label: ``str`` or ``None`` + """ + self.adjacency_list[x].append((y, label)) + # multiple edges are allowed, so be careful + if x not in self.reverse_list[y]: + self.reverse_list[y].append(x) + + def add_missing(self, distribution, requirement): + """ + Add a missing *requirement* for the given *distribution*. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + :type requirement: ``str`` + """ + logger.debug('%s missing %r', distribution, requirement) + self.missing.setdefault(distribution, []).append(requirement) + + def _repr_dist(self, dist): + return '%s %s' % (dist.name, dist.version) + + def repr_node(self, dist, level=1): + """Prints only a subgraph""" + output = [self._repr_dist(dist)] + for other, label in self.adjacency_list[dist]: + dist = self._repr_dist(other) + if label is not None: + dist = '%s [%s]' % (dist, label) + output.append(' ' * level + str(dist)) + suboutput = self.repr_node(other, level + 1) + subs = suboutput.split('\n') + output.extend(subs[1:]) + return '\n'.join(output) + + def to_dot(self, f, skip_disconnected=True): + """Writes a DOT output for the graph to the provided file *f*. + + If *skip_disconnected* is set to ``True``, then all distributions + that are not dependent on any other distribution are skipped. + + :type f: has to support ``file``-like operations + :type skip_disconnected: ``bool`` + """ + disconnected = [] + + f.write("digraph dependencies {\n") + for dist, adjs in self.adjacency_list.items(): + if len(adjs) == 0 and not skip_disconnected: + disconnected.append(dist) + for other, label in adjs: + if not label is None: + f.write('"%s" -> "%s" [label="%s"]\n' % + (dist.name, other.name, label)) + else: + f.write('"%s" -> "%s"\n' % (dist.name, other.name)) + if not skip_disconnected and len(disconnected) > 0: + f.write('subgraph disconnected {\n') + f.write('label = "Disconnected"\n') + f.write('bgcolor = red\n') + + for dist in disconnected: + f.write('"%s"' % dist.name) + f.write('\n') + f.write('}\n') + f.write('}\n') + + def topological_sort(self): + """ + Perform a topological sort of the graph. + :return: A tuple, the first element of which is a topologically sorted + list of distributions, and the second element of which is a + list of distributions that cannot be sorted because they have + circular dependencies and so form a cycle. + """ + result = [] + # Make a shallow copy of the adjacency list + alist = {} + for k, v in self.adjacency_list.items(): + alist[k] = v[:] + while True: + # See what we can remove in this run + to_remove = [] + for k, v in list(alist.items())[:]: + if not v: + to_remove.append(k) + del alist[k] + if not to_remove: + # What's left in alist (if anything) is a cycle. + break + # Remove from the adjacency list of others + for k, v in alist.items(): + alist[k] = [(d, r) for d, r in v if d not in to_remove] + logger.debug('Moving to result: %s', + ['%s (%s)' % (d.name, d.version) for d in to_remove]) + result.extend(to_remove) + return result, list(alist.keys()) + + def __repr__(self): + """Representation of the graph""" + output = [] + for dist, adjs in self.adjacency_list.items(): + output.append(self.repr_node(dist)) + return '\n'.join(output) + + +def make_graph(dists, scheme='default'): + """Makes a dependency graph from the given distributions. + + :parameter dists: a list of distributions + :type dists: list of :class:`distutils2.database.InstalledDistribution` and + :class:`distutils2.database.EggInfoDistribution` instances + :rtype: a :class:`DependencyGraph` instance + """ + scheme = get_scheme(scheme) + graph = DependencyGraph() + provided = {} # maps names to lists of (version, dist) tuples + + # first, build the graph and find out what's provided + for dist in dists: + graph.add_distribution(dist) + + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + provided.setdefault(name, []).append((version, dist)) + + # now make the edges + for dist in dists: + requires = (dist.run_requires | dist.meta_requires | + dist.build_requires | dist.dev_requires) + for req in requires: + try: + matcher = scheme.matcher(req) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + matched = False + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + graph.add_edge(dist, provider, req) + matched = True + break + if not matched: + graph.add_missing(dist, req) + return graph + + +def get_dependent_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + dependent on *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + dep = [dist] # dependent distributions + todo = graph.reverse_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop() + dep.append(d) + for succ in graph.reverse_list[d]: + if succ not in dep: + todo.append(succ) + + dep.pop(0) # remove dist from dep, was there to prevent infinite loops + return dep + + +def get_required_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + required by *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + req = [] # required distributions + todo = graph.adjacency_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop()[0] + req.append(d) + for pred in graph.adjacency_list[d]: + if pred not in req: + todo.append(pred) + + return req + + +def make_dist(name, version, **kwargs): + """ + A convenience method for making a dist given just a name and version. + """ + summary = kwargs.pop('summary', 'Placeholder for summary') + md = Metadata(**kwargs) + md.name = name + md.version = version + md.summary = summary or 'Placeholder for summary' + return Distribution(md) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/index.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/index.py new file mode 100644 index 0000000..6803dd2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/index.py @@ -0,0 +1,515 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import hashlib +import logging +import os +import shutil +import subprocess +import tempfile +try: + from threading import Thread +except ImportError: + from dummy_threading import Thread + +from . import DistlibException +from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, + urlparse, build_opener, string_types) +from .util import cached_property, zip_dir, ServerProxy + +logger = logging.getLogger(__name__) + +DEFAULT_INDEX = 'https://pypi.python.org/pypi' +DEFAULT_REALM = 'pypi' + +class PackageIndex(object): + """ + This class represents a package index compatible with PyPI, the Python + Package Index. + """ + + boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' + + def __init__(self, url=None): + """ + Initialise an instance. + + :param url: The URL of the index. If not specified, the URL for PyPI is + used. + """ + self.url = url or DEFAULT_INDEX + self.read_configuration() + scheme, netloc, path, params, query, frag = urlparse(self.url) + if params or query or frag or scheme not in ('http', 'https'): + raise DistlibException('invalid repository: %s' % self.url) + self.password_handler = None + self.ssl_verifier = None + self.gpg = None + self.gpg_home = None + self.rpc_proxy = None + with open(os.devnull, 'w') as sink: + # Use gpg by default rather than gpg2, as gpg2 insists on + # prompting for passwords + for s in ('gpg', 'gpg2'): + try: + rc = subprocess.check_call([s, '--version'], stdout=sink, + stderr=sink) + if rc == 0: + self.gpg = s + break + except OSError: + pass + + def _get_pypirc_command(self): + """ + Get the distutils command for interacting with PyPI configurations. + :return: the command. + """ + from distutils.core import Distribution + from distutils.config import PyPIRCCommand + d = Distribution() + return PyPIRCCommand(d) + + def read_configuration(self): + """ + Read the PyPI access configuration as supported by distutils, getting + PyPI to do the actual work. This populates ``username``, ``password``, + ``realm`` and ``url`` attributes from the configuration. + """ + # get distutils to do the work + c = self._get_pypirc_command() + c.repository = self.url + cfg = c._read_pypirc() + self.username = cfg.get('username') + self.password = cfg.get('password') + self.realm = cfg.get('realm', 'pypi') + self.url = cfg.get('repository', self.url) + + def save_configuration(self): + """ + Save the PyPI access configuration. You must have set ``username`` and + ``password`` attributes before calling this method. + + Again, distutils is used to do the actual work. + """ + self.check_credentials() + # get distutils to do the work + c = self._get_pypirc_command() + c._store_pypirc(self.username, self.password) + + def check_credentials(self): + """ + Check that ``username`` and ``password`` have been set, and raise an + exception if not. + """ + if self.username is None or self.password is None: + raise DistlibException('username and password must be set') + pm = HTTPPasswordMgr() + _, netloc, _, _, _, _ = urlparse(self.url) + pm.add_password(self.realm, netloc, self.username, self.password) + self.password_handler = HTTPBasicAuthHandler(pm) + + def register(self, metadata): + """ + Register a distribution on PyPI, using the provided metadata. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the distribution to be + registered. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + metadata.validate() + d = metadata.todict() + d[':action'] = 'verify' + request = self.encode_request(d.items(), []) + response = self.send_request(request) + d[':action'] = 'submit' + request = self.encode_request(d.items(), []) + return self.send_request(request) + + def _reader(self, name, stream, outbuf): + """ + Thread runner for reading lines of from a subprocess into a buffer. + + :param name: The logical name of the stream (used for logging only). + :param stream: The stream to read from. This will typically a pipe + connected to the output stream of a subprocess. + :param outbuf: The list to append the read lines to. + """ + while True: + s = stream.readline() + if not s: + break + s = s.decode('utf-8').rstrip() + outbuf.append(s) + logger.debug('%s: %s' % (name, s)) + stream.close() + + def get_sign_command(self, filename, signer, sign_password, + keystore=None): + """ + Return a suitable command for signing a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The signing command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + if sign_password is not None: + cmd.extend(['--batch', '--passphrase-fd', '0']) + td = tempfile.mkdtemp() + sf = os.path.join(td, os.path.basename(filename) + '.asc') + cmd.extend(['--detach-sign', '--armor', '--local-user', + signer, '--output', sf, filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd, sf + + def run_command(self, cmd, input_data=None): + """ + Run a command in a child process , passing it any input data specified. + + :param cmd: The command to run. + :param input_data: If specified, this must be a byte string containing + data to be sent to the child process. + :return: A tuple consisting of the subprocess' exit code, a list of + lines read from the subprocess' ``stdout``, and a list of + lines read from the subprocess' ``stderr``. + """ + kwargs = { + 'stdout': subprocess.PIPE, + 'stderr': subprocess.PIPE, + } + if input_data is not None: + kwargs['stdin'] = subprocess.PIPE + stdout = [] + stderr = [] + p = subprocess.Popen(cmd, **kwargs) + # We don't use communicate() here because we may need to + # get clever with interacting with the command + t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) + t1.start() + t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) + t2.start() + if input_data is not None: + p.stdin.write(input_data) + p.stdin.close() + + p.wait() + t1.join() + t2.join() + return p.returncode, stdout, stderr + + def sign_file(self, filename, signer, sign_password, keystore=None): + """ + Sign a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The absolute pathname of the file where the signature is + stored. + """ + cmd, sig_file = self.get_sign_command(filename, signer, sign_password, + keystore) + rc, stdout, stderr = self.run_command(cmd, + sign_password.encode('utf-8')) + if rc != 0: + raise DistlibException('sign command failed with error ' + 'code %s' % rc) + return sig_file + + def upload_file(self, metadata, filename, signer=None, sign_password=None, + filetype='sdist', pyversion='source', keystore=None): + """ + Upload a release file to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the file to be uploaded. + :param filename: The pathname of the file to be uploaded. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param filetype: The type of the file being uploaded. This is the + distutils command which produced that file, e.g. + ``sdist`` or ``bdist_wheel``. + :param pyversion: The version of Python which the release relates + to. For code compatible with any Python, this would + be ``source``, otherwise it would be e.g. ``3.2``. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.exists(filename): + raise DistlibException('not found: %s' % filename) + metadata.validate() + d = metadata.todict() + sig_file = None + if signer: + if not self.gpg: + logger.warning('no signing program available - not signed') + else: + sig_file = self.sign_file(filename, signer, sign_password, + keystore) + with open(filename, 'rb') as f: + file_data = f.read() + md5_digest = hashlib.md5(file_data).hexdigest() + sha256_digest = hashlib.sha256(file_data).hexdigest() + d.update({ + ':action': 'file_upload', + 'protocol_version': '1', + 'filetype': filetype, + 'pyversion': pyversion, + 'md5_digest': md5_digest, + 'sha256_digest': sha256_digest, + }) + files = [('content', os.path.basename(filename), file_data)] + if sig_file: + with open(sig_file, 'rb') as f: + sig_data = f.read() + files.append(('gpg_signature', os.path.basename(sig_file), + sig_data)) + shutil.rmtree(os.path.dirname(sig_file)) + request = self.encode_request(d.items(), files) + return self.send_request(request) + + def upload_documentation(self, metadata, doc_dir): + """ + Upload documentation to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the documentation to be + uploaded. + :param doc_dir: The pathname of the directory which contains the + documentation. This should be the directory that + contains the ``index.html`` for the documentation. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.isdir(doc_dir): + raise DistlibException('not a directory: %r' % doc_dir) + fn = os.path.join(doc_dir, 'index.html') + if not os.path.exists(fn): + raise DistlibException('not found: %r' % fn) + metadata.validate() + name, version = metadata.name, metadata.version + zip_data = zip_dir(doc_dir).getvalue() + fields = [(':action', 'doc_upload'), + ('name', name), ('version', version)] + files = [('content', name, zip_data)] + request = self.encode_request(fields, files) + return self.send_request(request) + + def get_verify_command(self, signature_filename, data_filename, + keystore=None): + """ + Return a suitable command for verifying a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The verifying command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + cmd.extend(['--verify', signature_filename, data_filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd + + def verify_signature(self, signature_filename, data_filename, + keystore=None): + """ + Verify a signature for a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: True if the signature was verified, else False. + """ + if not self.gpg: + raise DistlibException('verification unavailable because gpg ' + 'unavailable') + cmd = self.get_verify_command(signature_filename, data_filename, + keystore) + rc, stdout, stderr = self.run_command(cmd) + if rc not in (0, 1): + raise DistlibException('verify command failed with error ' + 'code %s' % rc) + return rc == 0 + + def download_file(self, url, destfile, digest=None, reporthook=None): + """ + This is a convenience method for downloading a file from an URL. + Normally, this will be a file from the index, though currently + no check is made for this (i.e. a file can be downloaded from + anywhere). + + The method is just like the :func:`urlretrieve` function in the + standard library, except that it allows digest computation to be + done during download and checking that the downloaded data + matched any expected value. + + :param url: The URL of the file to be downloaded (assumed to be + available via an HTTP GET request). + :param destfile: The pathname where the downloaded file is to be + saved. + :param digest: If specified, this must be a (hasher, value) + tuple, where hasher is the algorithm used (e.g. + ``'md5'``) and ``value`` is the expected value. + :param reporthook: The same as for :func:`urlretrieve` in the + standard library. + """ + if digest is None: + digester = None + logger.debug('No digest specified') + else: + if isinstance(digest, (list, tuple)): + hasher, digest = digest + else: + hasher = 'md5' + digester = getattr(hashlib, hasher)() + logger.debug('Digest specified: %s' % digest) + # The following code is equivalent to urlretrieve. + # We need to do it this way so that we can compute the + # digest of the file as we go. + with open(destfile, 'wb') as dfp: + # addinfourl is not a context manager on 2.x + # so we have to use try/finally + sfp = self.send_request(Request(url)) + try: + headers = sfp.info() + blocksize = 8192 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, blocksize, size) + while True: + block = sfp.read(blocksize) + if not block: + break + read += len(block) + dfp.write(block) + if digester: + digester.update(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, blocksize, size) + finally: + sfp.close() + + # check that we got the whole file, if we can + if size >= 0 and read < size: + raise DistlibException( + 'retrieval incomplete: got only %d out of %d bytes' + % (read, size)) + # if we have a digest, it must match. + if digester: + actual = digester.hexdigest() + if digest != actual: + raise DistlibException('%s digest mismatch for %s: expected ' + '%s, got %s' % (hasher, destfile, + digest, actual)) + logger.debug('Digest verified: %s', digest) + + def send_request(self, req): + """ + Send a standard library :class:`Request` to PyPI and return its + response. + + :param req: The request to send. + :return: The HTTP response from PyPI (a standard library HTTPResponse). + """ + handlers = [] + if self.password_handler: + handlers.append(self.password_handler) + if self.ssl_verifier: + handlers.append(self.ssl_verifier) + opener = build_opener(*handlers) + return opener.open(req) + + def encode_request(self, fields, files): + """ + Encode fields and files for posting to an HTTP server. + + :param fields: The fields to send as a list of (fieldname, value) + tuples. + :param files: The files to send as a list of (fieldname, filename, + file_bytes) tuple. + """ + # Adapted from packaging, which in turn was adapted from + # http://code.activestate.com/recipes/146306 + + parts = [] + boundary = self.boundary + for k, values in fields: + if not isinstance(values, (list, tuple)): + values = [values] + + for v in values: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"' % + k).encode('utf-8'), + b'', + v.encode('utf-8'))) + for key, filename, value in files: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"; filename="%s"' % + (key, filename)).encode('utf-8'), + b'', + value)) + + parts.extend((b'--' + boundary + b'--', b'')) + + body = b'\r\n'.join(parts) + ct = b'multipart/form-data; boundary=' + boundary + headers = { + 'Content-type': ct, + 'Content-length': str(len(body)) + } + return Request(self.url, body, headers) + + def search(self, terms, operator=None): + if isinstance(terms, string_types): + terms = {'name': terms} + if self.rpc_proxy is None: + self.rpc_proxy = ServerProxy(self.url, timeout=3.0) + return self.rpc_proxy.search(terms, operator or 'and') diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/locators.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/locators.py new file mode 100644 index 0000000..14789ef --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/locators.py @@ -0,0 +1,1283 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# + +import gzip +from io import BytesIO +import json +import logging +import os +import posixpath +import re +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import zlib + +from . import DistlibException +from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, + queue, quote, unescape, string_types, build_opener, + HTTPRedirectHandler as BaseRedirectHandler, text_type, + Request, HTTPError, URLError) +from .database import Distribution, DistributionPath, make_dist +from .metadata import Metadata +from .util import (cached_property, parse_credentials, ensure_slash, + split_filename, get_project_data, parse_requirement, + parse_name_and_version, ServerProxy, normalize_name) +from .version import get_scheme, UnsupportedVersionError +from .wheel import Wheel, is_compatible + +logger = logging.getLogger(__name__) + +HASHER_HASH = re.compile('^(\w+)=([a-f0-9]+)') +CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) +HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') +DEFAULT_INDEX = 'https://pypi.python.org/pypi' + +def get_all_distribution_names(url=None): + """ + Return all distribution names known by an index. + :param url: The URL of the index. + :return: A list of all known distribution names. + """ + if url is None: + url = DEFAULT_INDEX + client = ServerProxy(url, timeout=3.0) + return client.list_packages() + +class RedirectHandler(BaseRedirectHandler): + """ + A class to work around a bug in some Python 3.2.x releases. + """ + # There's a bug in the base version for some 3.2.x + # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header + # returns e.g. /abc, it bails because it says the scheme '' + # is bogus, when actually it should use the request's + # URL for the scheme. See Python issue #13696. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + newurl = None + for key in ('location', 'uri'): + if key in headers: + newurl = headers[key] + break + if newurl is None: + return + urlparts = urlparse(newurl) + if urlparts.scheme == '': + newurl = urljoin(req.get_full_url(), newurl) + if hasattr(headers, 'replace_header'): + headers.replace_header(key, newurl) + else: + headers[key] = newurl + return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, + headers) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + +class Locator(object): + """ + A base class for locators - things that locate distributions. + """ + source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') + binary_extensions = ('.egg', '.exe', '.whl') + excluded_extensions = ('.pdf',) + + # A list of tags indicating which wheels you want to match. The default + # value of None matches against the tags compatible with the running + # Python. If you want to match other values, set wheel_tags on a locator + # instance to a list of tuples (pyver, abi, arch) which you want to match. + wheel_tags = None + + downloadable_extensions = source_extensions + ('.whl',) + + def __init__(self, scheme='default'): + """ + Initialise an instance. + :param scheme: Because locators look for most recent versions, they + need to know the version scheme to use. This specifies + the current PEP-recommended scheme - use ``'legacy'`` + if you need to support existing distributions on PyPI. + """ + self._cache = {} + self.scheme = scheme + # Because of bugs in some of the handlers on some of the platforms, + # we use our own opener rather than just using urlopen. + self.opener = build_opener(RedirectHandler()) + # If get_project() is called from locate(), the matcher instance + # is set from the requirement passed to locate(). See issue #18 for + # why this can be useful to know. + self.matcher = None + self.errors = queue.Queue() + + def get_errors(self): + """ + Return any errors which have occurred. + """ + result = [] + while not self.errors.empty(): # pragma: no cover + try: + e = self.errors.get(False) + result.append(e) + except self.errors.Empty: + continue + self.errors.task_done() + return result + + def clear_errors(self): + """ + Clear any errors which may have been logged. + """ + # Just get the errors and throw them away + self.get_errors() + + def clear_cache(self): + self._cache.clear() + + def _get_scheme(self): + return self._scheme + + def _set_scheme(self, value): + self._scheme = value + + scheme = property(_get_scheme, _set_scheme) + + def _get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This should be implemented in subclasses. + + If called from a locate() request, self.matcher will be set to a + matcher for the requirement to satisfy, otherwise it will be None. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This calls _get_project to do all the work, and just implements a caching layer on top. + """ + if self._cache is None: + result = self._get_project(name) + elif name in self._cache: + result = self._cache[name] + else: + self.clear_errors() + result = self._get_project(name) + self._cache[name] = result + return result + + def score_url(self, url): + """ + Give an url a score which can be used to choose preferred URLs + for a given project release. + """ + t = urlparse(url) + basename = posixpath.basename(t.path) + compatible = True + is_wheel = basename.endswith('.whl') + if is_wheel: + compatible = is_compatible(Wheel(basename), self.wheel_tags) + return (t.scheme != 'https', 'pypi.python.org' in t.netloc, + is_wheel, compatible, basename) + + def prefer_url(self, url1, url2): + """ + Choose one of two URLs where both are candidates for distribution + archives for the same version of a distribution (for example, + .tar.gz vs. zip). + + The current implementation favours https:// URLs over http://, archives + from PyPI over those from other locations, wheel compatibility (if a + wheel) and then the archive name. + """ + result = url2 + if url1: + s1 = self.score_url(url1) + s2 = self.score_url(url2) + if s1 > s2: + result = url1 + if result != url2: + logger.debug('Not replacing %r with %r', url1, url2) + else: + logger.debug('Replacing %r with %r', url1, url2) + return result + + def split_filename(self, filename, project_name): + """ + Attempt to split a filename in project name, version and Python version. + """ + return split_filename(filename, project_name) + + def convert_url_to_download_info(self, url, project_name): + """ + See if a URL is a candidate for a download URL for a project (the URL + has typically been scraped from an HTML page). + + If it is, a dictionary is returned with keys "name", "version", + "filename" and "url"; otherwise, None is returned. + """ + def same_project(name1, name2): + return normalize_name(name1) == normalize_name(name2) + + result = None + scheme, netloc, path, params, query, frag = urlparse(url) + if frag.lower().startswith('egg='): + logger.debug('%s: version hint in fragment: %r', + project_name, frag) + m = HASHER_HASH.match(frag) + if m: + algo, digest = m.groups() + else: + algo, digest = None, None + origpath = path + if path and path[-1] == '/': + path = path[:-1] + if path.endswith('.whl'): + try: + wheel = Wheel(path) + if is_compatible(wheel, self.wheel_tags): + if project_name is None: + include = True + else: + include = same_project(wheel.name, project_name) + if include: + result = { + 'name': wheel.name, + 'version': wheel.version, + 'filename': wheel.filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + 'python-version': ', '.join( + ['.'.join(list(v[2:])) for v in wheel.pyver]), + } + except Exception as e: # pragma: no cover + logger.warning('invalid path for wheel: %s', path) + elif path.endswith(self.downloadable_extensions): + path = filename = posixpath.basename(path) + for ext in self.downloadable_extensions: + if path.endswith(ext): + path = path[:-len(ext)] + t = self.split_filename(path, project_name) + if not t: + logger.debug('No match for project/version: %s', path) + else: + name, version, pyver = t + if not project_name or same_project(project_name, name): + result = { + 'name': name, + 'version': version, + 'filename': filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + #'packagetype': 'sdist', + } + if pyver: + result['python-version'] = pyver + break + if result and algo: + result['%s_digest' % algo] = digest + return result + + def _get_digest(self, info): + """ + Get a digest from a dictionary by looking at keys of the form + 'algo_digest'. + + Returns a 2-tuple (algo, digest) if found, else None. Currently + looks only for SHA256, then MD5. + """ + result = None + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break + return result + + def _update_version_data(self, result, info): + """ + Update a result dictionary (the final result from _get_project) with a + dictionary for a specific version, which typically holds information + gleaned from a filename or URL for an archive for the distribution. + """ + name = info.pop('name') + version = info.pop('version') + if version in result: + dist = result[version] + md = dist.metadata + else: + dist = make_dist(name, version, scheme=self.scheme) + md = dist.metadata + dist.digest = digest = self._get_digest(info) + url = info['url'] + result['digests'][url] = digest + if md.source_url != info['url']: + md.source_url = self.prefer_url(md.source_url, url) + result['urls'].setdefault(version, set()).add(url) + dist.locator = self + result[version] = dist + + def locate(self, requirement, prereleases=False): + """ + Find the most recent distribution which matches the given + requirement. + + :param requirement: A requirement of the form 'foo (1.0)' or perhaps + 'foo (>= 1.0, < 2.0, != 1.3)' + :param prereleases: If ``True``, allow pre-release versions + to be located. Otherwise, pre-release versions + are not returned. + :return: A :class:`Distribution` instance, or ``None`` if no such + distribution could be located. + """ + result = None + r = parse_requirement(requirement) + if r is None: + raise DistlibException('Not a valid requirement: %r' % requirement) + scheme = get_scheme(self.scheme) + self.matcher = matcher = scheme.matcher(r.requirement) + logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) + versions = self.get_project(r.name) + if len(versions) > 2: # urls and digests keys are present + # sometimes, versions are invalid + slist = [] + vcls = matcher.version_class + for k in versions: + if k in ('urls', 'digests'): + continue + try: + if not matcher.match(k): + logger.debug('%s did not match %r', matcher, k) + else: + if prereleases or not vcls(k).is_prerelease: + slist.append(k) + else: + logger.debug('skipping pre-release ' + 'version %s of %s', k, matcher.name) + except Exception: # pragma: no cover + logger.warning('error matching %s with %r', matcher, k) + pass # slist.append(k) + if len(slist) > 1: + slist = sorted(slist, key=scheme.key) + if slist: + logger.debug('sorted list: %s', slist) + version = slist[-1] + result = versions[version] + if result: + if r.extras: + result.extras = r.extras + result.download_urls = versions.get('urls', {}).get(version, set()) + d = {} + sd = versions.get('digests', {}) + for url in result.download_urls: + if url in sd: + d[url] = sd[url] + result.digests = d + self.matcher = None + return result + + +class PyPIRPCLocator(Locator): + """ + This locator uses XML-RPC to locate distributions. It therefore + cannot be used with simple mirrors (that only mirror file content). + """ + def __init__(self, url, **kwargs): + """ + Initialise an instance. + + :param url: The URL to use for XML-RPC. + :param kwargs: Passed to the superclass constructor. + """ + super(PyPIRPCLocator, self).__init__(**kwargs) + self.base_url = url + self.client = ServerProxy(url, timeout=3.0) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + return set(self.client.list_packages()) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + versions = self.client.package_releases(name, True) + for v in versions: + urls = self.client.release_urls(name, v) + data = self.client.release_data(name, v) + metadata = Metadata(scheme=self.scheme) + metadata.name = data['name'] + metadata.version = data['version'] + metadata.license = data.get('license') + metadata.keywords = data.get('keywords', []) + metadata.summary = data.get('summary') + dist = Distribution(metadata) + if urls: + info = urls[0] + metadata.source_url = info['url'] + dist.digest = self._get_digest(info) + dist.locator = self + result[v] = dist + for info in urls: + url = info['url'] + digest = self._get_digest(info) + result['urls'].setdefault(v, set()).add(url) + result['digests'][url] = digest + return result + +class PyPIJSONLocator(Locator): + """ + This locator uses PyPI's JSON interface. It's very limited in functionality + and probably not worth using. + """ + def __init__(self, url, **kwargs): + super(PyPIJSONLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + url = urljoin(self.base_url, '%s/json' % quote(name)) + try: + resp = self.opener.open(url) + data = resp.read().decode() # for now + d = json.loads(data) + md = Metadata(scheme=self.scheme) + data = d['info'] + md.name = data['name'] + md.version = data['version'] + md.license = data.get('license') + md.keywords = data.get('keywords', []) + md.summary = data.get('summary') + dist = Distribution(md) + dist.locator = self + urls = d['urls'] + result[md.version] = dist + for info in d['urls']: + url = info['url'] + dist.download_urls.add(url) + dist.digests[url] = self._get_digest(info) + result['urls'].setdefault(md.version, set()).add(url) + result['digests'][url] = self._get_digest(info) + # Now get other releases + for version, infos in d['releases'].items(): + if version == md.version: + continue # already done + omd = Metadata(scheme=self.scheme) + omd.name = md.name + omd.version = version + odist = Distribution(omd) + odist.locator = self + result[version] = odist + for info in infos: + url = info['url'] + odist.download_urls.add(url) + odist.digests[url] = self._get_digest(info) + result['urls'].setdefault(version, set()).add(url) + result['digests'][url] = self._get_digest(info) +# for info in urls: +# md.source_url = info['url'] +# dist.digest = self._get_digest(info) +# dist.locator = self +# for info in urls: +# url = info['url'] +# result['urls'].setdefault(md.version, set()).add(url) +# result['digests'][url] = self._get_digest(info) + except Exception as e: + self.errors.put(text_type(e)) + logger.exception('JSON fetch failed: %s', e) + return result + + +class Page(object): + """ + This class represents a scraped HTML page. + """ + # The following slightly hairy-looking regex just looks for the contents of + # an anchor link, which has an attribute "href" either immediately preceded + # or immediately followed by a "rel" attribute. The attribute values can be + # declared with double quotes, single quotes or no quotes - which leads to + # the length of the expression. + _href = re.compile(""" +(rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*))\s+)? +href\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*)) +(\s+rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*)))? +""", re.I | re.S | re.X) + _base = re.compile(r"""]+)""", re.I | re.S) + + def __init__(self, data, url): + """ + Initialise an instance with the Unicode page contents and the URL they + came from. + """ + self.data = data + self.base_url = self.url = url + m = self._base.search(self.data) + if m: + self.base_url = m.group(1) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + @cached_property + def links(self): + """ + Return the URLs of all the links on a page together with information + about their "rel" attribute, for determining which ones to treat as + downloads and which ones to queue for further scraping. + """ + def clean(url): + "Tidy up an URL." + scheme, netloc, path, params, query, frag = urlparse(url) + return urlunparse((scheme, netloc, quote(path), + params, query, frag)) + + result = set() + for match in self._href.finditer(self.data): + d = match.groupdict('') + rel = (d['rel1'] or d['rel2'] or d['rel3'] or + d['rel4'] or d['rel5'] or d['rel6']) + url = d['url1'] or d['url2'] or d['url3'] + url = urljoin(self.base_url, url) + url = unescape(url) + url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) + result.add((url, rel)) + # We sort the result, hoping to bring the most recent versions + # to the front + result = sorted(result, key=lambda t: t[0], reverse=True) + return result + + +class SimpleScrapingLocator(Locator): + """ + A locator which scrapes HTML pages to locate downloads for a distribution. + This runs multiple threads to do the I/O; performance is at least as good + as pip's PackageFinder, which works in an analogous fashion. + """ + + # These are used to deal with various Content-Encoding schemes. + decoders = { + 'deflate': zlib.decompress, + 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(), + 'none': lambda b: b, + } + + def __init__(self, url, timeout=None, num_workers=10, **kwargs): + """ + Initialise an instance. + :param url: The root URL to use for scraping. + :param timeout: The timeout, in seconds, to be applied to requests. + This defaults to ``None`` (no timeout specified). + :param num_workers: The number of worker threads you want to do I/O, + This defaults to 10. + :param kwargs: Passed to the superclass. + """ + super(SimpleScrapingLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + self.timeout = timeout + self._page_cache = {} + self._seen = set() + self._to_fetch = queue.Queue() + self._bad_hosts = set() + self.skip_externals = False + self.num_workers = num_workers + self._lock = threading.RLock() + # See issue #45: we need to be resilient when the locator is used + # in a thread, e.g. with concurrent.futures. We can't use self._lock + # as it is for coordinating our internal threads - the ones created + # in _prepare_threads. + self._gplock = threading.RLock() + + def _prepare_threads(self): + """ + Threads are created only when get_project is called, and terminate + before it returns. They are there primarily to parallelise I/O (i.e. + fetching web pages). + """ + self._threads = [] + for i in range(self.num_workers): + t = threading.Thread(target=self._fetch) + t.setDaemon(True) + t.start() + self._threads.append(t) + + def _wait_threads(self): + """ + Tell all the threads to terminate (by sending a sentinel value) and + wait for them to do so. + """ + # Note that you need two loops, since you can't say which + # thread will get each sentinel + for t in self._threads: + self._to_fetch.put(None) # sentinel + for t in self._threads: + t.join() + self._threads = [] + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + with self._gplock: + self.result = result + self.project_name = name + url = urljoin(self.base_url, '%s/' % quote(name)) + self._seen.clear() + self._page_cache.clear() + self._prepare_threads() + try: + logger.debug('Queueing %s', url) + self._to_fetch.put(url) + self._to_fetch.join() + finally: + self._wait_threads() + del self.result + return result + + platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|' + r'win(32|-amd64)|macosx-?\d+)\b', re.I) + + def _is_platform_dependent(self, url): + """ + Does an URL refer to a platform-specific download? + """ + return self.platform_dependent.search(url) + + def _process_download(self, url): + """ + See if an URL is a suitable download for a project. + + If it is, register information in the result dictionary (for + _get_project) about the specific version it's for. + + Note that the return value isn't actually used other than as a boolean + value. + """ + if self._is_platform_dependent(url): + info = None + else: + info = self.convert_url_to_download_info(url, self.project_name) + logger.debug('process_download: %s -> %s', url, info) + if info: + with self._lock: # needed because self.result is shared + self._update_version_data(self.result, info) + return info + + def _should_queue(self, link, referrer, rel): + """ + Determine whether a link URL from a referring page and with a + particular "rel" attribute should be queued for scraping. + """ + scheme, netloc, path, _, _, _ = urlparse(link) + if path.endswith(self.source_extensions + self.binary_extensions + + self.excluded_extensions): + result = False + elif self.skip_externals and not link.startswith(self.base_url): + result = False + elif not referrer.startswith(self.base_url): + result = False + elif rel not in ('homepage', 'download'): + result = False + elif scheme not in ('http', 'https', 'ftp'): + result = False + elif self._is_platform_dependent(link): + result = False + else: + host = netloc.split(':', 1)[0] + if host.lower() == 'localhost': + result = False + else: + result = True + logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, + referrer, result) + return result + + def _fetch(self): + """ + Get a URL to fetch from the work queue, get the HTML page, examine its + links for download candidates and candidates for further scraping. + + This is a handy method to run in a thread. + """ + while True: + url = self._to_fetch.get() + try: + if url: + page = self.get_page(url) + if page is None: # e.g. after an error + continue + for link, rel in page.links: + if link not in self._seen: + self._seen.add(link) + if (not self._process_download(link) and + self._should_queue(link, url, rel)): + logger.debug('Queueing %s from %s', link, url) + self._to_fetch.put(link) + except Exception as e: # pragma: no cover + self.errors.put(text_type(e)) + finally: + # always do this, to avoid hangs :-) + self._to_fetch.task_done() + if not url: + #logger.debug('Sentinel seen, quitting.') + break + + def get_page(self, url): + """ + Get the HTML for an URL, possibly from an in-memory cache. + + XXX TODO Note: this cache is never actually cleared. It's assumed that + the data won't get stale over the lifetime of a locator instance (not + necessarily true for the default_locator). + """ + # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api + scheme, netloc, path, _, _, _ = urlparse(url) + if scheme == 'file' and os.path.isdir(url2pathname(path)): + url = urljoin(ensure_slash(url), 'index.html') + + if url in self._page_cache: + result = self._page_cache[url] + logger.debug('Returning %s from cache: %s', url, result) + else: + host = netloc.split(':', 1)[0] + result = None + if host in self._bad_hosts: + logger.debug('Skipping %s due to bad host %s', url, host) + else: + req = Request(url, headers={'Accept-encoding': 'identity'}) + try: + logger.debug('Fetching %s', url) + resp = self.opener.open(req, timeout=self.timeout) + logger.debug('Fetched %s', url) + headers = resp.info() + content_type = headers.get('Content-Type', '') + if HTML_CONTENT_TYPE.match(content_type): + final_url = resp.geturl() + data = resp.read() + encoding = headers.get('Content-Encoding') + if encoding: + decoder = self.decoders[encoding] # fail if not found + data = decoder(data) + encoding = 'utf-8' + m = CHARSET.search(content_type) + if m: + encoding = m.group(1) + try: + data = data.decode(encoding) + except UnicodeError: # pragma: no cover + data = data.decode('latin-1') # fallback + result = Page(data, final_url) + self._page_cache[final_url] = result + except HTTPError as e: + if e.code != 404: + logger.exception('Fetch failed: %s: %s', url, e) + except URLError as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + with self._lock: + self._bad_hosts.add(host) + except Exception as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + finally: + self._page_cache[url] = result # even if None (failure) + return result + + _distname_re = re.compile(']*>([^<]+)<') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + page = self.get_page(self.base_url) + if not page: + raise DistlibException('Unable to get %s' % self.base_url) + for match in self._distname_re.finditer(page.data): + result.add(match.group(1)) + return result + +class DirectoryLocator(Locator): + """ + This class locates distributions in a directory tree. + """ + + def __init__(self, path, **kwargs): + """ + Initialise an instance. + :param path: The root of the directory tree to search. + :param kwargs: Passed to the superclass constructor, + except for: + * recursive - if True (the default), subdirectories are + recursed into. If False, only the top-level directory + is searched, + """ + self.recursive = kwargs.pop('recursive', True) + super(DirectoryLocator, self).__init__(**kwargs) + path = os.path.abspath(path) + if not os.path.isdir(path): # pragma: no cover + raise DistlibException('Not a directory: %r' % path) + self.base_dir = path + + def should_include(self, filename, parent): + """ + Should a filename be considered as a candidate for a distribution + archive? As well as the filename, the directory which contains it + is provided, though not used by the current implementation. + """ + return filename.endswith(self.downloadable_extensions) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, name) + if info: + self._update_version_data(result, info) + if not self.recursive: + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, None) + if info: + result.add(info['name']) + if not self.recursive: + break + return result + +class JSONLocator(Locator): + """ + This locator uses special extended metadata (not available on PyPI) and is + the basis of performant dependency resolution in distlib. Other locators + require archive downloads before dependencies can be determined! As you + might imagine, that can be slow. + """ + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + data = get_project_data(name) + if data: + for info in data.get('files', []): + if info['ptype'] != 'sdist' or info['pyversion'] != 'source': + continue + # We don't store summary in project metadata as it makes + # the data bigger for no benefit during dependency + # resolution + dist = make_dist(data['name'], info['version'], + summary=data.get('summary', + 'Placeholder for summary'), + scheme=self.scheme) + md = dist.metadata + md.source_url = info['url'] + # TODO SHA256 digest + if 'digest' in info and info['digest']: + dist.digest = ('md5', info['digest']) + md.dependencies = info.get('requirements', {}) + dist.exports = info.get('exports', {}) + result[dist.version] = dist + result['urls'].setdefault(dist.version, set()).add(info['url']) + return result + +class DistPathLocator(Locator): + """ + This locator finds installed distributions in a path. It can be useful for + adding to an :class:`AggregatingLocator`. + """ + def __init__(self, distpath, **kwargs): + """ + Initialise an instance. + + :param distpath: A :class:`DistributionPath` instance to search. + """ + super(DistPathLocator, self).__init__(**kwargs) + assert isinstance(distpath, DistributionPath) + self.distpath = distpath + + def _get_project(self, name): + dist = self.distpath.get_distribution(name) + if dist is None: + result = {'urls': {}, 'digests': {}} + else: + result = { + dist.version: dist, + 'urls': {dist.version: set([dist.source_url])}, + 'digests': {dist.version: set([None])} + } + return result + + +class AggregatingLocator(Locator): + """ + This class allows you to chain and/or merge a list of locators. + """ + def __init__(self, *locators, **kwargs): + """ + Initialise an instance. + + :param locators: The list of locators to search. + :param kwargs: Passed to the superclass constructor, + except for: + * merge - if False (the default), the first successful + search from any of the locators is returned. If True, + the results from all locators are merged (this can be + slow). + """ + self.merge = kwargs.pop('merge', False) + self.locators = locators + super(AggregatingLocator, self).__init__(**kwargs) + + def clear_cache(self): + super(AggregatingLocator, self).clear_cache() + for locator in self.locators: + locator.clear_cache() + + def _set_scheme(self, value): + self._scheme = value + for locator in self.locators: + locator.scheme = value + + scheme = property(Locator.scheme.fget, _set_scheme) + + def _get_project(self, name): + result = {} + for locator in self.locators: + d = locator.get_project(name) + if d: + if self.merge: + files = result.get('urls', {}) + digests = result.get('digests', {}) + # next line could overwrite result['urls'], result['digests'] + result.update(d) + df = result.get('urls') + if files and df: + for k, v in files.items(): + if k in df: + df[k] |= v + else: + df[k] = v + dd = result.get('digests') + if digests and dd: + dd.update(digests) + else: + # See issue #18. If any dists are found and we're looking + # for specific constraints, we only return something if + # a match is found. For example, if a DirectoryLocator + # returns just foo (1.0) while we're looking for + # foo (>= 2.0), we'll pretend there was nothing there so + # that subsequent locators can be queried. Otherwise we + # would just return foo (1.0) which would then lead to a + # failure to find foo (>= 2.0), because other locators + # weren't searched. Note that this only matters when + # merge=False. + if self.matcher is None: + found = True + else: + found = False + for k in d: + if self.matcher.match(k): + found = True + break + if found: + result = d + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for locator in self.locators: + try: + result |= locator.get_distribution_names() + except NotImplementedError: + pass + return result + + +# We use a legacy scheme simply because most of the dists on PyPI use legacy +# versions which don't conform to PEP 426 / PEP 440. +default_locator = AggregatingLocator( + JSONLocator(), + SimpleScrapingLocator('https://pypi.python.org/simple/', + timeout=3.0), + scheme='legacy') + +locate = default_locator.locate + +NAME_VERSION_RE = re.compile(r'(?P[\w-]+)\s*' + r'\(\s*(==\s*)?(?P[^)]+)\)$') + +class DependencyFinder(object): + """ + Locate dependencies for distributions. + """ + + def __init__(self, locator=None): + """ + Initialise an instance, using the specified locator + to locate distributions. + """ + self.locator = locator or default_locator + self.scheme = get_scheme(self.locator.scheme) + + def add_distribution(self, dist): + """ + Add a distribution to the finder. This will update internal information + about who provides what. + :param dist: The distribution to add. + """ + logger.debug('adding distribution %s', dist) + name = dist.key + self.dists_by_name[name] = dist + self.dists[(name, dist.version)] = dist + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + self.provided.setdefault(name, set()).add((version, dist)) + + def remove_distribution(self, dist): + """ + Remove a distribution from the finder. This will update internal + information about who provides what. + :param dist: The distribution to remove. + """ + logger.debug('removing distribution %s', dist) + name = dist.key + del self.dists_by_name[name] + del self.dists[(name, dist.version)] + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Remove from provided: %s, %s, %s', name, version, dist) + s = self.provided[name] + s.remove((version, dist)) + if not s: + del self.provided[name] + + def get_matcher(self, reqt): + """ + Get a version matcher for a requirement. + :param reqt: The requirement + :type reqt: str + :return: A version matcher (an instance of + :class:`distlib.version.Matcher`). + """ + try: + matcher = self.scheme.matcher(reqt) + except UnsupportedVersionError: # pragma: no cover + # XXX compat-mode if cannot read the version + name = reqt.split()[0] + matcher = self.scheme.matcher(name) + return matcher + + def find_providers(self, reqt): + """ + Find the distributions which can fulfill a requirement. + + :param reqt: The requirement. + :type reqt: str + :return: A set of distribution which can fulfill the requirement. + """ + matcher = self.get_matcher(reqt) + name = matcher.key # case-insensitive + result = set() + provided = self.provided + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + result.add(provider) + break + return result + + def try_to_replace(self, provider, other, problems): + """ + Attempt to replace one provider with another. This is typically used + when resolving dependencies from multiple sources, e.g. A requires + (B >= 1.0) while C requires (B >= 1.1). + + For successful replacement, ``provider`` must meet all the requirements + which ``other`` fulfills. + + :param provider: The provider we are trying to replace with. + :param other: The provider we're trying to replace. + :param problems: If False is returned, this will contain what + problems prevented replacement. This is currently + a tuple of the literal string 'cantreplace', + ``provider``, ``other`` and the set of requirements + that ``provider`` couldn't fulfill. + :return: True if we can replace ``other`` with ``provider``, else + False. + """ + rlist = self.reqts[other] + unmatched = set() + for s in rlist: + matcher = self.get_matcher(s) + if not matcher.match(provider.version): + unmatched.add(s) + if unmatched: + # can't replace other with provider + problems.add(('cantreplace', provider, other, + frozenset(unmatched))) + result = False + else: + # can replace other with provider + self.remove_distribution(other) + del self.reqts[other] + for s in rlist: + self.reqts.setdefault(provider, set()).add(s) + self.add_distribution(provider) + result = True + return result + + def find(self, requirement, meta_extras=None, prereleases=False): + """ + Find a distribution and all distributions it depends on. + + :param requirement: The requirement specifying the distribution to + find, or a Distribution instance. + :param meta_extras: A list of meta extras such as :test:, :build: and + so on. + :param prereleases: If ``True``, allow pre-release versions to be + returned - otherwise, don't return prereleases + unless they're all that's available. + + Return a set of :class:`Distribution` instances and a set of + problems. + + The distributions returned should be such that they have the + :attr:`required` attribute set to ``True`` if they were + from the ``requirement`` passed to ``find()``, and they have the + :attr:`build_time_dependency` attribute set to ``True`` unless they + are post-installation dependencies of the ``requirement``. + + The problems should be a tuple consisting of the string + ``'unsatisfied'`` and the requirement which couldn't be satisfied + by any distribution known to the locator. + """ + + self.provided = {} + self.dists = {} + self.dists_by_name = {} + self.reqts = {} + + meta_extras = set(meta_extras or []) + if ':*:' in meta_extras: + meta_extras.remove(':*:') + # :meta: and :run: are implicitly included + meta_extras |= set([':test:', ':build:', ':dev:']) + + if isinstance(requirement, Distribution): + dist = odist = requirement + logger.debug('passed %s as requirement', odist) + else: + dist = odist = self.locator.locate(requirement, + prereleases=prereleases) + if dist is None: + raise DistlibException('Unable to locate %r' % requirement) + logger.debug('located %s', odist) + dist.requested = True + problems = set() + todo = set([dist]) + install_dists = set([odist]) + while todo: + dist = todo.pop() + name = dist.key # case-insensitive + if name not in self.dists_by_name: + self.add_distribution(dist) + else: + #import pdb; pdb.set_trace() + other = self.dists_by_name[name] + if other != dist: + self.try_to_replace(dist, other, problems) + + ireqts = dist.run_requires | dist.meta_requires + sreqts = dist.build_requires + ereqts = set() + if dist in install_dists: + for key in ('test', 'build', 'dev'): + e = ':%s:' % key + if e in meta_extras: + ereqts |= getattr(dist, '%s_requires' % key) + all_reqts = ireqts | sreqts | ereqts + for r in all_reqts: + providers = self.find_providers(r) + if not providers: + logger.debug('No providers found for %r', r) + provider = self.locator.locate(r, prereleases=prereleases) + # If no provider is found and we didn't consider + # prereleases, consider them now. + if provider is None and not prereleases: + provider = self.locator.locate(r, prereleases=True) + if provider is None: + logger.debug('Cannot satisfy %r', r) + problems.add(('unsatisfied', r)) + else: + n, v = provider.key, provider.version + if (n, v) not in self.dists: + todo.add(provider) + providers.add(provider) + if r in ireqts and dist in install_dists: + install_dists.add(provider) + logger.debug('Adding %s to install_dists', + provider.name_and_version) + for p in providers: + name = p.key + if name not in self.dists_by_name: + self.reqts.setdefault(p, set()).add(r) + else: + other = self.dists_by_name[name] + if other != p: + # see if other can be replaced by p + self.try_to_replace(p, other, problems) + + dists = set(self.dists.values()) + for dist in dists: + dist.build_time_dependency = dist not in install_dists + if dist.build_time_dependency: + logger.debug('%s is a build-time dependency only.', + dist.name_and_version) + logger.debug('find done for %s', odist) + return dists, problems diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/manifest.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/manifest.py new file mode 100644 index 0000000..9f03364 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/manifest.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2013 Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Class representing the list of files in a distribution. + +Equivalent to distutils.filelist, but fixes some problems. +""" +import fnmatch +import logging +import os +import re +import sys + +from . import DistlibException +from .compat import fsdecode +from .util import convert_path + + +__all__ = ['Manifest'] + +logger = logging.getLogger(__name__) + +# a \ followed by some spaces + EOL +_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M) +_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) + +# +# Due to the different results returned by fnmatch.translate, we need +# to do slightly different processing for Python 2.7 and 3.2 ... this needed +# to be brought in for Python 3.6 onwards. +# +_PYTHON_VERSION = sys.version_info[:2] + +class Manifest(object): + """A list of files built by on exploring the filesystem and filtered by + applying various patterns to what we find there. + """ + + def __init__(self, base=None): + """ + Initialise an instance. + + :param base: The base directory to explore under. + """ + self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) + self.prefix = self.base + os.sep + self.allfiles = None + self.files = set() + + # + # Public API + # + + def findall(self): + """Find all files under the base and set ``allfiles`` to the absolute + pathnames of files found. + """ + from stat import S_ISREG, S_ISDIR, S_ISLNK + + self.allfiles = allfiles = [] + root = self.base + stack = [root] + pop = stack.pop + push = stack.append + + while stack: + root = pop() + names = os.listdir(root) + + for name in names: + fullname = os.path.join(root, name) + + # Avoid excess stat calls -- just one will do, thank you! + stat = os.stat(fullname) + mode = stat.st_mode + if S_ISREG(mode): + allfiles.append(fsdecode(fullname)) + elif S_ISDIR(mode) and not S_ISLNK(mode): + push(fullname) + + def add(self, item): + """ + Add a file to the manifest. + + :param item: The pathname to add. This can be relative to the base. + """ + if not item.startswith(self.prefix): + item = os.path.join(self.base, item) + self.files.add(os.path.normpath(item)) + + def add_many(self, items): + """ + Add a list of files to the manifest. + + :param items: The pathnames to add. These can be relative to the base. + """ + for item in items: + self.add(item) + + def sorted(self, wantdirs=False): + """ + Return sorted files in directory order + """ + + def add_dir(dirs, d): + dirs.add(d) + logger.debug('add_dir added %s', d) + if d != self.base: + parent, _ = os.path.split(d) + assert parent not in ('', '/') + add_dir(dirs, parent) + + result = set(self.files) # make a copy! + if wantdirs: + dirs = set() + for f in result: + add_dir(dirs, os.path.dirname(f)) + result |= dirs + return [os.path.join(*path_tuple) for path_tuple in + sorted(os.path.split(path) for path in result)] + + def clear(self): + """Clear all collected files.""" + self.files = set() + self.allfiles = [] + + def process_directive(self, directive): + """ + Process a directive which either adds some files from ``allfiles`` to + ``files``, or removes some files from ``files``. + + :param directive: The directive to process. This should be in a format + compatible with distutils ``MANIFEST.in`` files: + + http://docs.python.org/distutils/sourcedist.html#commands + """ + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dirpattern). + action, patterns, thedir, dirpattern = self._parse_directive(directive) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=True): + logger.warning('no files found matching %r', pattern) + + elif action == 'exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=True) + #if not found: + # logger.warning('no previously-included files ' + # 'found matching %r', pattern) + + elif action == 'global-include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=False): + logger.warning('no files found matching %r ' + 'anywhere in distribution', pattern) + + elif action == 'global-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=False) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found anywhere in ' + # 'distribution', pattern) + + elif action == 'recursive-include': + for pattern in patterns: + if not self._include_pattern(pattern, prefix=thedir): + logger.warning('no files found matching %r ' + 'under directory %r', pattern, thedir) + + elif action == 'recursive-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, prefix=thedir) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found under directory %r', + # pattern, thedir) + + elif action == 'graft': + if not self._include_pattern(None, prefix=dirpattern): + logger.warning('no directories found matching %r', + dirpattern) + + elif action == 'prune': + if not self._exclude_pattern(None, prefix=dirpattern): + logger.warning('no previously-included directories found ' + 'matching %r', dirpattern) + else: # pragma: no cover + # This should never happen, as it should be caught in + # _parse_template_line + raise DistlibException( + 'invalid action %r' % action) + + # + # Private API + # + + def _parse_directive(self, directive): + """ + Validate a directive. + :param directive: The directive to validate. + :return: A tuple of action, patterns, thedir, dir_patterns + """ + words = directive.split() + if len(words) == 1 and words[0] not in ('include', 'exclude', + 'global-include', + 'global-exclude', + 'recursive-include', + 'recursive-exclude', + 'graft', 'prune'): + # no action given, let's use the default 'include' + words.insert(0, 'include') + + action = words[0] + patterns = thedir = dir_pattern = None + + if action in ('include', 'exclude', + 'global-include', 'global-exclude'): + if len(words) < 2: + raise DistlibException( + '%r expects ...' % action) + + patterns = [convert_path(word) for word in words[1:]] + + elif action in ('recursive-include', 'recursive-exclude'): + if len(words) < 3: + raise DistlibException( + '%r expects ...' % action) + + thedir = convert_path(words[1]) + patterns = [convert_path(word) for word in words[2:]] + + elif action in ('graft', 'prune'): + if len(words) != 2: + raise DistlibException( + '%r expects a single ' % action) + + dir_pattern = convert_path(words[1]) + + else: + raise DistlibException('unknown action %r' % action) + + return action, patterns, thedir, dir_pattern + + def _include_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. + + Patterns are not quite the same as implemented by the 'fnmatch' + module: '*' and '?' match non-special characters, where "special" + is platform-dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found. + """ + # XXX docstring lying about what the special chars are? + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + + # delayed loading of allfiles list + if self.allfiles is None: + self.findall() + + for name in self.allfiles: + if pattern_re.search(name): + self.files.add(name) + found = True + return found + + def _exclude_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. + + Other parameters are the same as for 'include_pattern()', above. + The list 'self.files' is modified in place. Return True if files are + found. + + This API is public to allow e.g. exclusion of SCM subdirs, e.g. when + packaging source distributions + """ + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + for f in list(self.files): + if pattern_re.search(f): + self.files.remove(f) + found = True + return found + + def _translate_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Translate a shell-like wildcard pattern to a compiled regular + expression. + + Return the compiled regex. If 'is_regex' true, + then 'pattern' is directly compiled to a regex (if it's a string) + or just returned as-is (assumes it's a regex object). + """ + if is_regex: + if isinstance(pattern, str): + return re.compile(pattern) + else: + return pattern + + if _PYTHON_VERSION > (3, 2): + # ditch start and end characters + start, _, end = self._glob_to_re('_').partition('_') + + if pattern: + pattern_re = self._glob_to_re(pattern) + if _PYTHON_VERSION > (3, 2): + assert pattern_re.startswith(start) and pattern_re.endswith(end) + else: + pattern_re = '' + + base = re.escape(os.path.join(self.base, '')) + if prefix is not None: + # ditch end of pattern character + if _PYTHON_VERSION <= (3, 2): + empty_pattern = self._glob_to_re('') + prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] + else: + prefix_re = self._glob_to_re(prefix) + assert prefix_re.startswith(start) and prefix_re.endswith(end) + prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] + sep = os.sep + if os.sep == '\\': + sep = r'\\' + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + sep.join((prefix_re, + '.*' + pattern_re)) + else: + pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] + pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, + pattern_re, end) + else: # no prefix -- respect anchor flag + if anchor: + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + pattern_re + else: + pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) + + return re.compile(pattern_re) + + def _glob_to_re(self, pattern): + """Translate a shell-like glob pattern to a regular expression. + + Return a string containing the regex. Differs from + 'fnmatch.translate()' in that '*' does not match "special characters" + (which are platform-specific). + """ + pattern_re = fnmatch.translate(pattern) + + # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which + # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, + # and by extension they shouldn't match such "special characters" under + # any OS. So change all non-escaped dots in the RE to match any + # character except the special characters (currently: just os.sep). + sep = os.sep + if os.sep == '\\': + # we're using a regex to manipulate a regex, so we need + # to escape the backslash twice + sep = r'\\\\' + escaped = r'\1[^%s]' % sep + pattern_re = re.sub(r'((? y, + 'gte': lambda x, y: x >= y, + 'in': lambda x, y: x in y, + 'lt': lambda x, y: x < y, + 'lte': lambda x, y: x <= y, + 'not': lambda x: not x, + 'noteq': lambda x, y: x != y, + 'notin': lambda x, y: x not in y, + } + + allowed_values = { + 'sys_platform': sys.platform, + 'python_version': '%s.%s' % sys.version_info[:2], + # parsing sys.platform is not reliable, but there is no other + # way to get e.g. 2.7.2+, and the PEP is defined with sys.version + 'python_full_version': sys.version.split(' ', 1)[0], + 'os_name': os.name, + 'platform_in_venv': str(in_venv()), + 'platform_release': platform.release(), + 'platform_version': platform.version(), + 'platform_machine': platform.machine(), + 'platform_python_implementation': python_implementation(), + } + + def __init__(self, context=None): + """ + Initialise an instance. + + :param context: If specified, names are looked up in this mapping. + """ + self.context = context or {} + self.source = None + + def get_fragment(self, offset): + """ + Get the part of the source which is causing a problem. + """ + fragment_len = 10 + s = '%r' % (self.source[offset:offset + fragment_len]) + if offset + fragment_len < len(self.source): + s += '...' + return s + + def get_handler(self, node_type): + """ + Get a handler for the specified AST node type. + """ + return getattr(self, 'do_%s' % node_type, None) + + def evaluate(self, node, filename=None): + """ + Evaluate a source string or node, using ``filename`` when + displaying errors. + """ + if isinstance(node, string_types): + self.source = node + kwargs = {'mode': 'eval'} + if filename: + kwargs['filename'] = filename + try: + node = ast.parse(node, **kwargs) + except SyntaxError as e: + s = self.get_fragment(e.offset) + raise SyntaxError('syntax error %s' % s) + node_type = node.__class__.__name__.lower() + handler = self.get_handler(node_type) + if handler is None: + if self.source is None: + s = '(source not available)' + else: + s = self.get_fragment(node.col_offset) + raise SyntaxError("don't know how to evaluate %r %s" % ( + node_type, s)) + return handler(node) + + def get_attr_key(self, node): + assert isinstance(node, ast.Attribute), 'attribute node expected' + return '%s.%s' % (node.value.id, node.attr) + + def do_attribute(self, node): + if not isinstance(node.value, ast.Name): + valid = False + else: + key = self.get_attr_key(node) + valid = key in self.context or key in self.allowed_values + if not valid: + raise SyntaxError('invalid expression: %s' % key) + if key in self.context: + result = self.context[key] + else: + result = self.allowed_values[key] + return result + + def do_boolop(self, node): + result = self.evaluate(node.values[0]) + is_or = node.op.__class__ is ast.Or + is_and = node.op.__class__ is ast.And + assert is_or or is_and + if (is_and and result) or (is_or and not result): + for n in node.values[1:]: + result = self.evaluate(n) + if (is_or and result) or (is_and and not result): + break + return result + + def do_compare(self, node): + def sanity_check(lhsnode, rhsnode): + valid = True + if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str): + valid = False + #elif (isinstance(lhsnode, ast.Attribute) + # and isinstance(rhsnode, ast.Attribute)): + # klhs = self.get_attr_key(lhsnode) + # krhs = self.get_attr_key(rhsnode) + # valid = klhs != krhs + if not valid: + s = self.get_fragment(node.col_offset) + raise SyntaxError('Invalid comparison: %s' % s) + + lhsnode = node.left + lhs = self.evaluate(lhsnode) + result = True + for op, rhsnode in zip(node.ops, node.comparators): + sanity_check(lhsnode, rhsnode) + op = op.__class__.__name__.lower() + if op not in self.operators: + raise SyntaxError('unsupported operation: %r' % op) + rhs = self.evaluate(rhsnode) + result = self.operators[op](lhs, rhs) + if not result: + break + lhs = rhs + lhsnode = rhsnode + return result + + def do_expression(self, node): + return self.evaluate(node.body) + + def do_name(self, node): + valid = False + if node.id in self.context: + valid = True + result = self.context[node.id] + elif node.id in self.allowed_values: + valid = True + result = self.allowed_values[node.id] + if not valid: + raise SyntaxError('invalid expression: %s' % node.id) + return result + + def do_str(self, node): + return node.s + + +def interpret(marker, execution_context=None): + """ + Interpret a marker and return a result depending on environment. + + :param marker: The marker to interpret. + :type marker: str + :param execution_context: The context used for name lookup. + :type execution_context: mapping + """ + return Evaluator(execution_context).evaluate(marker.strip()) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/metadata.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/metadata.py new file mode 100644 index 0000000..75bfd68 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/metadata.py @@ -0,0 +1,1068 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Implementation of the Metadata for Python packages PEPs. + +Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). +""" +from __future__ import unicode_literals + +import codecs +from email import message_from_file +import json +import logging +import re + + +from . import DistlibException, __version__ +from .compat import StringIO, string_types, text_type +from .markers import interpret +from .util import extract_by_key, get_extras +from .version import get_scheme, PEP440_VERSION_RE + +logger = logging.getLogger(__name__) + + +class MetadataMissingError(DistlibException): + """A required metadata is missing""" + + +class MetadataConflictError(DistlibException): + """Attempt to read or write metadata fields that are conflictual.""" + + +class MetadataUnrecognizedVersionError(DistlibException): + """Unknown metadata version number.""" + + +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" + +# public API of this module +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] + +# Encoding used for the PKG-INFO files +PKG_INFO_ENCODING = 'utf-8' + +# preferred version. Hopefully will be changed +# to 1.2 once PEP 345 is supported everywhere +PKG_INFO_PREFERRED_VERSION = '1.1' + +_LINE_PREFIX_1_2 = re.compile('\n \|') +_LINE_PREFIX_PRE_1_2 = re.compile('\n ') +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License', 'Classifier', 'Download-URL', 'Obsoletes', + 'Provides', 'Requires') + +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', + 'Download-URL') + +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External') + +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', + 'Obsoletes-Dist', 'Requires-External', 'Maintainer', + 'Maintainer-email', 'Project-URL') + +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', + 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', + 'Provides-Extra') + +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', + 'Setup-Requires-Dist', 'Extension') + +_ALL_FIELDS = set() +_ALL_FIELDS.update(_241_FIELDS) +_ALL_FIELDS.update(_314_FIELDS) +_ALL_FIELDS.update(_345_FIELDS) +_ALL_FIELDS.update(_426_FIELDS) + +EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + + +def _version2fieldlist(version): + if version == '1.0': + return _241_FIELDS + elif version == '1.1': + return _314_FIELDS + elif version == '1.2': + return _345_FIELDS + elif version == '2.0': + return _426_FIELDS + raise MetadataUnrecognizedVersionError(version) + + +def _best_version(fields): + """Detect the best version depending on the fields used.""" + def _has_marker(keys, markers): + for marker in markers: + if marker in keys: + return True + return False + + keys = [] + for key, value in fields.items(): + if value in ([], 'UNKNOWN', None): + continue + keys.append(key) + + possible_versions = ['1.0', '1.1', '1.2', '2.0'] + + # first let's try to see if a field is not part of one of the version + for key in keys: + if key not in _241_FIELDS and '1.0' in possible_versions: + possible_versions.remove('1.0') + if key not in _314_FIELDS and '1.1' in possible_versions: + possible_versions.remove('1.1') + if key not in _345_FIELDS and '1.2' in possible_versions: + possible_versions.remove('1.2') + if key not in _426_FIELDS and '2.0' in possible_versions: + possible_versions.remove('2.0') + + # possible_version contains qualified versions + if len(possible_versions) == 1: + return possible_versions[0] # found ! + elif len(possible_versions) == 0: + raise MetadataConflictError('Unknown metadata set') + + # let's see if one unique marker is found + is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) + is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) + is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) + if int(is_1_1) + int(is_1_2) + int(is_2_0) > 1: + raise MetadataConflictError('You used incompatible 1.1/1.2/2.0 fields') + + # we have the choice, 1.0, or 1.2, or 2.0 + # - 1.0 has a broken Summary field but works with all tools + # - 1.1 is to avoid + # - 1.2 fixes Summary but has little adoption + # - 2.0 adds more features and is very new + if not is_1_1 and not is_1_2 and not is_2_0: + # we couldn't find any specific marker + if PKG_INFO_PREFERRED_VERSION in possible_versions: + return PKG_INFO_PREFERRED_VERSION + if is_1_1: + return '1.1' + if is_1_2: + return '1.2' + + return '2.0' + +_ATTR2FIELD = { + 'metadata_version': 'Metadata-Version', + 'name': 'Name', + 'version': 'Version', + 'platform': 'Platform', + 'supported_platform': 'Supported-Platform', + 'summary': 'Summary', + 'description': 'Description', + 'keywords': 'Keywords', + 'home_page': 'Home-page', + 'author': 'Author', + 'author_email': 'Author-email', + 'maintainer': 'Maintainer', + 'maintainer_email': 'Maintainer-email', + 'license': 'License', + 'classifier': 'Classifier', + 'download_url': 'Download-URL', + 'obsoletes_dist': 'Obsoletes-Dist', + 'provides_dist': 'Provides-Dist', + 'requires_dist': 'Requires-Dist', + 'setup_requires_dist': 'Setup-Requires-Dist', + 'requires_python': 'Requires-Python', + 'requires_external': 'Requires-External', + 'requires': 'Requires', + 'provides': 'Provides', + 'obsoletes': 'Obsoletes', + 'project_url': 'Project-URL', + 'private_version': 'Private-Version', + 'obsoleted_by': 'Obsoleted-By', + 'extension': 'Extension', + 'provides_extra': 'Provides-Extra', +} + +_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') +_VERSIONS_FIELDS = ('Requires-Python',) +_VERSION_FIELDS = ('Version',) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', + 'Requires', 'Provides', 'Obsoletes-Dist', + 'Provides-Dist', 'Requires-Dist', 'Requires-External', + 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', + 'Provides-Extra', 'Extension') +_LISTTUPLEFIELDS = ('Project-URL',) + +_ELEMENTSFIELD = ('Keywords',) + +_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') + +_MISSING = object() + +_FILESAFE = re.compile('[^A-Za-z0-9.]+') + + +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + + +class LegacyMetadata(object): + """The legacy metadata of a release. + + Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can + instantiate the class with one of these arguments (or none): + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content + - *mapping* is a dict-like object + - *scheme* is a version scheme name + """ + # TODO document the mapping API and UNKNOWN default key + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._fields = {} + self.requires_files = [] + self._dependencies = None + self.scheme = scheme + if path is not None: + self.read(path) + elif fileobj is not None: + self.read_file(fileobj) + elif mapping is not None: + self.update(mapping) + self.set_metadata_version() + + def set_metadata_version(self): + self._fields['Metadata-Version'] = _best_version(self._fields) + + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) + + def __getitem__(self, name): + return self.get(name) + + def __setitem__(self, name, value): + return self.set(name, value) + + def __delitem__(self, name): + field_name = self._convert_name(name) + try: + del self._fields[field_name] + except KeyError: + raise KeyError(name) + + def __contains__(self, name): + return (name in self._fields or + self._convert_name(name) in self._fields) + + def _convert_name(self, name): + if name in _ALL_FIELDS: + return name + name = name.replace('-', '_').lower() + return _ATTR2FIELD.get(name, name) + + def _default_value(self, name): + if name in _LISTFIELDS or name in _ELEMENTSFIELD: + return [] + return 'UNKNOWN' + + def _remove_line_prefix(self, value): + if self.metadata_version in ('1.0', '1.1'): + return _LINE_PREFIX_PRE_1_2.sub('\n', value) + else: + return _LINE_PREFIX_1_2.sub('\n', value) + + def __getattr__(self, name): + if name in _ATTR2FIELD: + return self[name] + raise AttributeError(name) + + # + # Public API + # + +# dependencies = property(_get_dependencies, _set_dependencies) + + def get_fullname(self, filesafe=False): + """Return the distribution name with version. + + If filesafe is true, return a filename-escaped form.""" + return _get_name_and_version(self['Name'], self['Version'], filesafe) + + def is_field(self, name): + """return True if name is a valid metadata key""" + name = self._convert_name(name) + return name in _ALL_FIELDS + + def is_multi_field(self, name): + name = self._convert_name(name) + return name in _LISTFIELDS + + def read(self, filepath): + """Read the metadata values from a file path.""" + fp = codecs.open(filepath, 'r', encoding='utf-8') + try: + self.read_file(fp) + finally: + fp.close() + + def read_file(self, fileob): + """Read the metadata values from a file object.""" + msg = message_from_file(fileob) + self._fields['Metadata-Version'] = msg['metadata-version'] + + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue + if field in _LISTFIELDS: + # we can have multiple lines + values = msg.get_all(field) + if field in _LISTTUPLEFIELDS and values is not None: + values = [tuple(value.split(',')) for value in values] + self.set(field, values) + else: + # single line + value = msg[field] + if value is not None and value != 'UNKNOWN': + self.set(field, value) + self.set_metadata_version() + + def write(self, filepath, skip_unknown=False): + """Write the metadata fields to filepath.""" + fp = codecs.open(filepath, 'w', encoding='utf-8') + try: + self.write_file(fp, skip_unknown) + finally: + fp.close() + + def write_file(self, fileobject, skip_unknown=False): + """Write the PKG-INFO format data to a file object.""" + self.set_metadata_version() + + for field in _version2fieldlist(self['Metadata-Version']): + values = self.get(field) + if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): + continue + if field in _ELEMENTSFIELD: + self._write_field(fileobject, field, ','.join(values)) + continue + if field not in _LISTFIELDS: + if field == 'Description': + if self.metadata_version in ('1.0', '1.1'): + values = values.replace('\n', '\n ') + else: + values = values.replace('\n', '\n |') + values = [values] + + if field in _LISTTUPLEFIELDS: + values = [','.join(value) for value in values] + + for value in values: + self._write_field(fileobject, field, value) + + def update(self, other=None, **kwargs): + """Set metadata values from the given iterable `other` and kwargs. + + Behavior is like `dict.update`: If `other` has a ``keys`` method, + they are looped over and ``self[key]`` is assigned ``other[key]``. + Else, ``other`` is an iterable of ``(key, value)`` iterables. + + Keys that don't match a metadata field or that have an empty value are + dropped. + """ + def _set(key, value): + if key in _ATTR2FIELD and value: + self.set(self._convert_name(key), value) + + if not other: + # other is None or empty container + pass + elif hasattr(other, 'keys'): + for k in other.keys(): + _set(k, other[k]) + else: + for k, v in other: + _set(k, v) + + if kwargs: + for k, v in kwargs.items(): + _set(k, v) + + def set(self, name, value): + """Control then set a metadata field.""" + name = self._convert_name(name) + + if ((name in _ELEMENTSFIELD or name == 'Platform') and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [v.strip() for v in value.split(',')] + else: + value = [] + elif (name in _LISTFIELDS and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [value] + else: + value = [] + + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + + scheme = get_scheme(self.scheme) + if name in _PREDICATE_FIELDS and value is not None: + for v in value: + # check that the values are valid + if not scheme.is_valid_matcher(v.split(';')[0]): + logger.warning( + "'%s': '%s' is not valid (field '%s')", + project_name, v, name) + # FIXME this rejects UNKNOWN, is that right? + elif name in _VERSIONS_FIELDS and value is not None: + if not scheme.is_valid_constraint_list(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + elif name in _VERSION_FIELDS and value is not None: + if not scheme.is_valid_version(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + + if name in _UNICODEFIELDS: + if name == 'Description': + value = self._remove_line_prefix(value) + + self._fields[name] = value + + def get(self, name, default=_MISSING): + """Get a metadata field.""" + name = self._convert_name(name) + if name not in self._fields: + if default is _MISSING: + default = self._default_value(name) + return default + if name in _UNICODEFIELDS: + value = self._fields[name] + return value + elif name in _LISTFIELDS: + value = self._fields[name] + if value is None: + return [] + res = [] + for val in value: + if name not in _LISTTUPLEFIELDS: + res.append(val) + else: + # That's for Project-URL + res.append((val[0], val[1])) + return res + + elif name in _ELEMENTSFIELD: + value = self._fields[name] + if isinstance(value, string_types): + return value.split(',') + return self._fields[name] + + def check(self, strict=False): + """Check if the metadata is compliant. If strict is True then raise if + no Name or Version are provided""" + self.set_metadata_version() + + # XXX should check the versions (if the file was loaded) + missing, warnings = [], [] + + for attr in ('Name', 'Version'): # required by PEP 345 + if attr not in self: + missing.append(attr) + + if strict and missing != []: + msg = 'missing required metadata: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + + for attr in ('Home-page', 'Author'): + if attr not in self: + missing.append(attr) + + # checking metadata 1.2 (XXX needs to check 1.1, 1.0) + if self['Metadata-Version'] != '1.2': + return missing, warnings + + scheme = get_scheme(self.scheme) + + def are_valid_constraints(value): + for v in value: + if not scheme.is_valid_matcher(v.split(';')[0]): + return False + return True + + for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), + (_VERSIONS_FIELDS, + scheme.is_valid_constraint_list), + (_VERSION_FIELDS, + scheme.is_valid_version)): + for field in fields: + value = self.get(field, None) + if value is not None and not controller(value): + warnings.append("Wrong value for '%s': %s" % (field, value)) + + return missing, warnings + + def todict(self, skip_missing=False): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + """ + self.set_metadata_version() + + mapping_1_0 = ( + ('metadata_version', 'Metadata-Version'), + ('name', 'Name'), + ('version', 'Version'), + ('summary', 'Summary'), + ('home_page', 'Home-page'), + ('author', 'Author'), + ('author_email', 'Author-email'), + ('license', 'License'), + ('description', 'Description'), + ('keywords', 'Keywords'), + ('platform', 'Platform'), + ('classifiers', 'Classifier'), + ('download_url', 'Download-URL'), + ) + + data = {} + for key, field_name in mapping_1_0: + if not skip_missing or field_name in self._fields: + data[key] = self[field_name] + + if self['Metadata-Version'] == '1.2': + mapping_1_2 = ( + ('requires_dist', 'Requires-Dist'), + ('requires_python', 'Requires-Python'), + ('requires_external', 'Requires-External'), + ('provides_dist', 'Provides-Dist'), + ('obsoletes_dist', 'Obsoletes-Dist'), + ('project_url', 'Project-URL'), + ('maintainer', 'Maintainer'), + ('maintainer_email', 'Maintainer-email'), + ) + for key, field_name in mapping_1_2: + if not skip_missing or field_name in self._fields: + if key != 'project_url': + data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] + + elif self['Metadata-Version'] == '1.1': + mapping_1_1 = ( + ('provides', 'Provides'), + ('requires', 'Requires'), + ('obsoletes', 'Obsoletes'), + ) + for key, field_name in mapping_1_1: + if not skip_missing or field_name in self._fields: + data[key] = self[field_name] + + return data + + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + + # Mapping API + # TODO could add iter* variants + + def keys(self): + return list(_version2fieldlist(self['Metadata-Version'])) + + def __iter__(self): + for key in self.keys(): + yield key + + def values(self): + return [self[key] for key in self.keys()] + + def items(self): + return [(key, self[key]) for key in self.keys()] + + def __repr__(self): + return '<%s %s %s>' % (self.__class__.__name__, self.name, + self.version) + + +METADATA_FILENAME = 'pydist.json' +WHEEL_METADATA_FILENAME = 'metadata.json' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.0 (JSON) + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile('^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP440_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy',), + } + + INDEX_KEYS = ('name version license summary description author ' + 'author_email keywords platform home_page classifiers ' + 'download_url') + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy',)), + 'version': (VERSION_MATCHER, ('legacy',)), + 'summary': (SUMMARY_MATCHER, ('legacy',)), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + #import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with open(path, 'rb') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + if not isinstance(data, text_type): + data = data.decode('utf-8') + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), + scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + none_list = (None, list) + none_dict = (None, dict) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': none_list, + 'test_requires': none_list, + 'meta_requires': none_list, + 'extras': ('Provides-Extra', list), + 'modules': none_list, + 'namespaces': none_list, + 'exports': none_dict, + 'commands': none_dict, + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + del none_list, none_dict + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + if key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + result = self._data.get(key, value) + else: + # special cases for PEP 459 + sentinel = object() + result = sentinel + d = self._data.get('extensions') + if d: + if key == 'commands': + result = d.get('python.commands', value) + elif key == 'classifiers': + d = d.get('python.details') + if d: + result = d.get(key, value) + else: + d = d.get('python.exports') + if not d: + d = self._data.get('python.exports') + if d: + result = d.get(key, value) + if result is sentinel: + result = value + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError("'%s' is an invalid value for " + "the '%s' property" % (value, + key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + elif key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + self._data[key] = value + else: + # special cases for PEP 459 + d = self._data.setdefault('extensions', {}) + if key == 'commands': + d['python.commands'] = value + elif key == 'classifiers': + d = d.setdefault('python.details', {}) + d[key] = value + else: + d = d.setdefault('python.exports', {}) + d[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = get_extras(extras or [], self.extras) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, + env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', + missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', + 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), + ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + author = {} + maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + 'license': 'License', + 'summary': 'Summary', + 'description': 'Description', + 'classifiers': 'Classifier', + } + + def _to_legacy(self): + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == "%s"' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + for nk, ok in self.LEGACY_MAPPING.items(): + if nk in nmd: + result[ok] = nmd[nk] + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: other fields such as contacts + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2, + sort_keys=True) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2, + sort_keys=True) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + run_requires = self._data.setdefault('run_requires', []) + always = None + for entry in run_requires: + if 'environment' not in entry and 'extra' not in entry: + always = entry + break + if always is None: + always = { 'requires': requirements } + run_requires.insert(0, always) + else: + rset = set(always['requires']) | set(requirements) + always['requires'] = sorted(rset) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, + self.metadata_version, name, version) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/resources.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/resources.py new file mode 100644 index 0000000..f07cde2 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/resources.py @@ -0,0 +1,355 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2016 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import bisect +import io +import logging +import os +import pkgutil +import shutil +import sys +import types +import zipimport + +from . import DistlibException +from .util import cached_property, get_cache_base, path_to_cache_dir, Cache + +logger = logging.getLogger(__name__) + + +cache = None # created when needed + + +class ResourceCache(Cache): + def __init__(self, base=None): + if base is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('resource-cache')) + super(ResourceCache, self).__init__(base) + + def is_stale(self, resource, path): + """ + Is the cache stale for the given resource? + + :param resource: The :class:`Resource` being cached. + :param path: The path of the resource in the cache. + :return: True if the cache is stale. + """ + # Cache invalidation is a hard problem :-) + return True + + def get(self, resource): + """ + Get a resource into the cache, + + :param resource: A :class:`Resource` instance. + :return: The pathname of the resource in the cache. + """ + prefix, path = resource.finder.get_cache_info(resource) + if prefix is None: + result = path + else: + result = os.path.join(self.base, self.prefix_to_dir(prefix), path) + dirname = os.path.dirname(result) + if not os.path.isdir(dirname): + os.makedirs(dirname) + if not os.path.exists(result): + stale = True + else: + stale = self.is_stale(resource, path) + if stale: + # write the bytes of the resource to the cache location + with open(result, 'wb') as f: + f.write(resource.bytes) + return result + + +class ResourceBase(object): + def __init__(self, finder, name): + self.finder = finder + self.name = name + + +class Resource(ResourceBase): + """ + A class representing an in-package resource, such as a data file. This is + not normally instantiated by user code, but rather by a + :class:`ResourceFinder` which manages the resource. + """ + is_container = False # Backwards compatibility + + def as_stream(self): + """ + Get the resource as a stream. + + This is not a property to make it obvious that it returns a new stream + each time. + """ + return self.finder.get_stream(self) + + @cached_property + def file_path(self): + global cache + if cache is None: + cache = ResourceCache() + return cache.get(self) + + @cached_property + def bytes(self): + return self.finder.get_bytes(self) + + @cached_property + def size(self): + return self.finder.get_size(self) + + +class ResourceContainer(ResourceBase): + is_container = True # Backwards compatibility + + @cached_property + def resources(self): + return self.finder.get_resources(self) + + +class ResourceFinder(object): + """ + Resource finder for file system resources. + """ + + if sys.platform.startswith('java'): + skipped_extensions = ('.pyc', '.pyo', '.class') + else: + skipped_extensions = ('.pyc', '.pyo') + + def __init__(self, module): + self.module = module + self.loader = getattr(module, '__loader__', None) + self.base = os.path.dirname(getattr(module, '__file__', '')) + + def _adjust_path(self, path): + return os.path.realpath(path) + + def _make_path(self, resource_name): + # Issue #50: need to preserve type of path on Python 2.x + # like os.path._get_sep + if isinstance(resource_name, bytes): # should only happen on 2.x + sep = b'/' + else: + sep = '/' + parts = resource_name.split(sep) + parts.insert(0, self.base) + result = os.path.join(*parts) + return self._adjust_path(result) + + def _find(self, path): + return os.path.exists(path) + + def get_cache_info(self, resource): + return None, resource.path + + def find(self, resource_name): + path = self._make_path(resource_name) + if not self._find(path): + result = None + else: + if self._is_directory(path): + result = ResourceContainer(self, resource_name) + else: + result = Resource(self, resource_name) + result.path = path + return result + + def get_stream(self, resource): + return open(resource.path, 'rb') + + def get_bytes(self, resource): + with open(resource.path, 'rb') as f: + return f.read() + + def get_size(self, resource): + return os.path.getsize(resource.path) + + def get_resources(self, resource): + def allowed(f): + return (f != '__pycache__' and not + f.endswith(self.skipped_extensions)) + return set([f for f in os.listdir(resource.path) if allowed(f)]) + + def is_container(self, resource): + return self._is_directory(resource.path) + + _is_directory = staticmethod(os.path.isdir) + + def iterator(self, resource_name): + resource = self.find(resource_name) + if resource is not None: + todo = [resource] + while todo: + resource = todo.pop(0) + yield resource + if resource.is_container: + rname = resource.name + for name in resource.resources: + if not rname: + new_name = name + else: + new_name = '/'.join([rname, name]) + child = self.find(new_name) + if child.is_container: + todo.append(child) + else: + yield child + + +class ZipResourceFinder(ResourceFinder): + """ + Resource finder for resources in .zip files. + """ + def __init__(self, module): + super(ZipResourceFinder, self).__init__(module) + archive = self.loader.archive + self.prefix_len = 1 + len(archive) + # PyPy doesn't have a _files attr on zipimporter, and you can't set one + if hasattr(self.loader, '_files'): + self._files = self.loader._files + else: + self._files = zipimport._zip_directory_cache[archive] + self.index = sorted(self._files) + + def _adjust_path(self, path): + return path + + def _find(self, path): + path = path[self.prefix_len:] + if path in self._files: + result = True + else: + if path and path[-1] != os.sep: + path = path + os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + if not result: + logger.debug('_find failed: %r %r', path, self.loader.prefix) + else: + logger.debug('_find worked: %r %r', path, self.loader.prefix) + return result + + def get_cache_info(self, resource): + prefix = self.loader.archive + path = resource.path[1 + len(prefix):] + return prefix, path + + def get_bytes(self, resource): + return self.loader.get_data(resource.path) + + def get_stream(self, resource): + return io.BytesIO(self.get_bytes(resource)) + + def get_size(self, resource): + path = resource.path[self.prefix_len:] + return self._files[path][3] + + def get_resources(self, resource): + path = resource.path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + plen = len(path) + result = set() + i = bisect.bisect(self.index, path) + while i < len(self.index): + if not self.index[i].startswith(path): + break + s = self.index[i][plen:] + result.add(s.split(os.sep, 1)[0]) # only immediate children + i += 1 + return result + + def _is_directory(self, path): + path = path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + return result + +_finder_registry = { + type(None): ResourceFinder, + zipimport.zipimporter: ZipResourceFinder +} + +try: + # In Python 3.6, _frozen_importlib -> _frozen_importlib_external + try: + import _frozen_importlib_external as _fi + except ImportError: + import _frozen_importlib as _fi + _finder_registry[_fi.SourceFileLoader] = ResourceFinder + _finder_registry[_fi.FileFinder] = ResourceFinder + del _fi +except (ImportError, AttributeError): + pass + + +def register_finder(loader, finder_maker): + _finder_registry[type(loader)] = finder_maker + +_finder_cache = {} + + +def finder(package): + """ + Return a resource finder for a package. + :param package: The name of the package. + :return: A :class:`ResourceFinder` instance for the package. + """ + if package in _finder_cache: + result = _finder_cache[package] + else: + if package not in sys.modules: + __import__(package) + module = sys.modules[package] + path = getattr(module, '__path__', None) + if path is None: + raise DistlibException('You cannot get a finder for a module, ' + 'only for a package') + loader = getattr(module, '__loader__', None) + finder_maker = _finder_registry.get(type(loader)) + if finder_maker is None: + raise DistlibException('Unable to locate finder for %r' % package) + result = finder_maker(module) + _finder_cache[package] = result + return result + + +_dummy_module = types.ModuleType(str('__dummy__')) + + +def finder_for_path(path): + """ + Return a resource finder for a path, which should represent a container. + + :param path: The path. + :return: A :class:`ResourceFinder` instance for the path. + """ + result = None + # calls any path hooks, gets importer into cache + pkgutil.get_importer(path) + loader = sys.path_importer_cache.get(path) + finder = _finder_registry.get(type(loader)) + if finder: + module = _dummy_module + module.__file__ = os.path.join(path, '') + module.__loader__ = loader + result = finder(module) + return result diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/scripts.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/scripts.py new file mode 100644 index 0000000..792fc2e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/scripts.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from io import BytesIO +import logging +import os +import re +import struct +import sys + +from .compat import sysconfig, detect_encoding, ZipFile +from .resources import finder +from .util import (FileOperator, get_export_entry, convert_path, + get_executable, in_venv) + +logger = logging.getLogger(__name__) + +_DEFAULT_MANIFEST = ''' + + + + + + + + + + + + +'''.strip() + +# check if Python is called on the first line with this expression +FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') +SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*- +if __name__ == '__main__': + import sys, re + + def _resolve(module, func): + __import__(module) + mod = sys.modules[module] + parts = func.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + try: + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + + func = _resolve('%(module)s', '%(func)s') + rc = func() # None interpreted as 0 + except Exception as e: # only supporting Python >= 2.6 + sys.stderr.write('%%s\\n' %% e) + rc = 1 + sys.exit(rc) +''' + + +def _enquote_executable(executable): + if ' ' in executable: + # make sure we quote only the executable in case of env + # for example /usr/bin/env "/dir with spaces/bin/jython" + # instead of "/usr/bin/env /dir with spaces/bin/jython" + # otherwise whole + if executable.startswith('/usr/bin/env '): + env, _executable = executable.split(' ', 1) + if ' ' in _executable and not _executable.startswith('"'): + executable = '%s "%s"' % (env, _executable) + else: + if not executable.startswith('"'): + executable = '"%s"' % executable + return executable + + +class ScriptMaker(object): + """ + A class to copy or create scripts from source scripts or callable + specifications. + """ + script_template = SCRIPT_TEMPLATE + + executable = None # for shebangs + + def __init__(self, source_dir, target_dir, add_launchers=True, + dry_run=False, fileop=None): + self.source_dir = source_dir + self.target_dir = target_dir + self.add_launchers = add_launchers + self.force = False + self.clobber = False + # It only makes sense to set mode bits on POSIX. + self.set_mode = (os.name == 'posix') or (os.name == 'java' and + os._name == 'posix') + self.variants = set(('', 'X.Y')) + self._fileop = fileop or FileOperator(dry_run) + + self._is_nt = os.name == 'nt' or ( + os.name == 'java' and os._name == 'nt') + + def _get_alternate_executable(self, executable, options): + if options.get('gui', False) and self._is_nt: # pragma: no cover + dn, fn = os.path.split(executable) + fn = fn.replace('python', 'pythonw') + executable = os.path.join(dn, fn) + return executable + + if sys.platform.startswith('java'): # pragma: no cover + def _is_shell(self, executable): + """ + Determine if the specified executable is a script + (contains a #! line) + """ + try: + with open(executable) as fp: + return fp.read(2) == '#!' + except (OSError, IOError): + logger.warning('Failed to open %s', executable) + return False + + def _fix_jython_executable(self, executable): + if self._is_shell(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty('os.name') == 'Linux': + return executable + elif executable.lower().endswith('jython.exe'): + # Use wrapper exe for Jython on Windows + return executable + return '/usr/bin/env %s' % executable + + def _get_shebang(self, encoding, post_interp=b'', options=None): + enquote = True + if self.executable: + executable = self.executable + enquote = False # assume this will be taken care of + elif not sysconfig.is_python_build(): + executable = get_executable() + elif in_venv(): # pragma: no cover + executable = os.path.join(sysconfig.get_path('scripts'), + 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) + if options: + executable = self._get_alternate_executable(executable, options) + + if sys.platform.startswith('java'): # pragma: no cover + executable = self._fix_jython_executable(executable) + # Normalise case for Windows + executable = os.path.normcase(executable) + # If the user didn't specify an executable, it may be necessary to + # cater for executable paths with spaces (not uncommon on Windows) + if enquote: + executable = _enquote_executable(executable) + # Issue #51: don't use fsencode, since we later try to + # check that the shebang is decodable using utf-8. + executable = executable.encode('utf-8') + # in case of IronPython, play safe and enable frames support + if (sys.platform == 'cli' and '-X:Frames' not in post_interp + and '-X:FullFrames' not in post_interp): # pragma: no cover + post_interp += b' -X:Frames' + shebang = b'#!' + executable + post_interp + b'\n' + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable from utf-8' % shebang) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + if encoding != 'utf-8': + try: + shebang.decode(encoding) + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % (shebang, encoding)) + return shebang + + def _get_script_text(self, entry): + return self.script_template % dict(module=entry.prefix, + func=entry.suffix) + + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + use_launcher = self.add_launchers and self._is_nt + linesep = os.linesep.encode('utf-8') + if not use_launcher: + script_bytes = shebang + linesep + script_bytes + else: # pragma: no cover + if ext == 'py': + launcher = self._get_launcher('t') + else: + launcher = self._get_launcher('w') + stream = BytesIO() + with ZipFile(stream, 'w') as zf: + zf.writestr('__main__.py', script_bytes) + zip_data = stream.getvalue() + script_bytes = launcher + shebang + linesep + zip_data + for name in names: + outname = os.path.join(self.target_dir, name) + if use_launcher: # pragma: no cover + n, e = os.path.splitext(outname) + if e.startswith('.py'): + outname = n + outname = '%s.exe' % outname + try: + self._fileop.write_binary_file(outname, script_bytes) + except Exception: + # Failed writing an executable - it might be in use. + logger.warning('Failed to write executable - trying to ' + 'use .deleteme logic') + dfname = '%s.deleteme' % outname + if os.path.exists(dfname): + os.remove(dfname) # Not allowed to fail here + os.rename(outname, dfname) # nor here + self._fileop.write_binary_file(outname, script_bytes) + logger.debug('Able to replace executable using ' + '.deleteme logic') + try: + os.remove(dfname) + except Exception: + pass # still in use - ignore error + else: + if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + outname = '%s.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_binary_file(outname, script_bytes) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + + def _make_script(self, entry, filenames, options=None): + post_interp = b'' + if options: + args = options.get('interpreter_args', []) + if args: + args = ' %s' % ' '.join(args) + post_interp = args.encode('utf-8') + shebang = self._get_shebang('utf-8', post_interp, options=options) + script = self._get_script_text(entry).encode('utf-8') + name = entry.name + scriptnames = set() + if '' in self.variants: + scriptnames.add(name) + if 'X' in self.variants: + scriptnames.add('%s%s' % (name, sys.version[0])) + if 'X.Y' in self.variants: + scriptnames.add('%s-%s' % (name, sys.version[:3])) + if options and options.get('gui', False): + ext = 'pyw' + else: + ext = 'py' + self._write_script(scriptnames, shebang, script, filenames, ext) + + def _copy_script(self, script, filenames): + adjust = False + script = os.path.join(self.source_dir, convert_path(script)) + outname = os.path.join(self.target_dir, os.path.basename(script)) + if not self.force and not self._fileop.newer(script, outname): + logger.debug('not copying %s (up-to-date)', script) + return + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, 'rb') + except IOError: # pragma: no cover + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: # pragma: no cover + logger.warning('%s: %s is an empty file (skipping)', + self.get_command_name(), script) + return + + match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if not adjust: + if f: + f.close() + self._fileop.copy_file(script, outname) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + else: + logger.info('copying and adjusting %s -> %s', script, + self.target_dir) + if not self._fileop.dry_run: + encoding, lines = detect_encoding(f.readline) + f.seek(0) + shebang = self._get_shebang(encoding, post_interp) + if b'pythonw' in first_line: # pragma: no cover + ext = 'pyw' + else: + ext = 'py' + n = os.path.basename(outname) + self._write_script([n], shebang, f.read(), filenames, ext) + if f: + f.close() + + @property + def dry_run(self): + return self._fileop.dry_run + + @dry_run.setter + def dry_run(self, value): + self._fileop.dry_run = value + + if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + # Executable launcher support. + # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ + + def _get_launcher(self, kind): + if struct.calcsize('P') == 8: # 64-bit + bits = '64' + else: + bits = '32' + name = '%s%s.exe' % (kind, bits) + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + distlib_package = __name__.rsplit('.', 1)[0] + result = finder(distlib_package).find(name).bytes + return result + + # Public API follows + + def make(self, specification, options=None): + """ + Make a script. + + :param specification: The specification, which is either a valid export + entry specification (to make a script from a + callable) or a filename (to make a script by + copying from a source location). + :param options: A dictionary of options controlling script generation. + :return: A list of all absolute pathnames written to. + """ + filenames = [] + entry = get_export_entry(specification) + if entry is None: + self._copy_script(specification, filenames) + else: + self._make_script(entry, filenames, options=options) + return filenames + + def make_multiple(self, specifications, options=None): + """ + Take a list of specifications and make scripts from them, + :param specifications: A list of specifications. + :return: A list of all absolute pathnames written to, + """ + filenames = [] + for specification in specifications: + filenames.extend(self.make(specification, options)) + return filenames diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/util.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/util.py new file mode 100644 index 0000000..aadc874 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/util.py @@ -0,0 +1,1611 @@ +# +# Copyright (C) 2012-2016 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import codecs +from collections import deque +import contextlib +import csv +from glob import iglob as std_iglob +import io +import json +import logging +import os +import py_compile +import re +import shutil +import socket +try: + import ssl +except ImportError: # pragma: no cover + ssl = None +import subprocess +import sys +import tarfile +import tempfile +import textwrap + +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import time + +from . import DistlibException +from .compat import (string_types, text_type, shutil, raw_input, StringIO, + cache_from_source, urlopen, urljoin, httplib, xmlrpclib, + splittype, HTTPHandler, BaseConfigurator, valid_ident, + Container, configparser, URLError, ZipFile, fsdecode, + unquote) + +logger = logging.getLogger(__name__) + +# +# Requirement parsing code for name + optional constraints + optional extras +# +# e.g. 'foo >= 1.2, < 2.0 [bar, baz]' +# +# The regex can seem a bit hairy, so we build it up out of smaller pieces +# which are manageable. +# + +COMMA = r'\s*,\s*' +COMMA_RE = re.compile(COMMA) + +IDENT = r'(\w|[.-])+' +EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')' +VERSPEC = IDENT + r'\*?' + +RELOP = '([<>=!~]=)|[<>]' + +# +# The first relop is optional - if absent, will be taken as '~=' +# +BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' + + RELOP + r')\s*(' + VERSPEC + '))*') + +DIRECT_REF = '(from\s+(?P.*))' + +# +# Either the bare constraints or the bare constraints in parentheses +# +CONSTRAINTS = (r'\(\s*(?P' + BARE_CONSTRAINTS + '|' + DIRECT_REF + + r')\s*\)|(?P' + BARE_CONSTRAINTS + '\s*)') + +EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*' +EXTRAS = r'\[\s*(?P' + EXTRA_LIST + r')?\s*\]' +REQUIREMENT = ('(?P' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' + + CONSTRAINTS + ')?$') +REQUIREMENT_RE = re.compile(REQUIREMENT) + +# +# Used to scan through the constraints +# +RELOP_IDENT = '(?P' + RELOP + r')\s*(?P' + VERSPEC + ')' +RELOP_IDENT_RE = re.compile(RELOP_IDENT) + +def parse_requirement(s): + + def get_constraint(m): + d = m.groupdict() + return d['op'], d['vn'] + + result = None + m = REQUIREMENT_RE.match(s) + if m: + d = m.groupdict() + name = d['dn'] + cons = d['c1'] or d['c2'] + if not d['diref']: + url = None + else: + # direct reference + cons = None + url = d['diref'].strip() + if not cons: + cons = None + constr = '' + rs = d['dn'] + else: + if cons[0] not in '<>!=': + cons = '~=' + cons + iterator = RELOP_IDENT_RE.finditer(cons) + cons = [get_constraint(m) for m in iterator] + rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons])) + if not d['ex']: + extras = None + else: + extras = COMMA_RE.split(d['ex']) + result = Container(name=name, constraints=cons, extras=extras, + requirement=rs, source=s, url=url) + return result + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + + def get_rel_path(base, path): + # normalizes and returns a lstripped-/-separated path + base = base.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(base) + return path[len(base):].lstrip('/') + + + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = get_rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = get_rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + +def in_venv(): + if hasattr(sys, 'real_prefix'): + # virtualenv venvs + result = True + else: + # PEP 405 venvs + result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + return result + + +def get_executable(): +# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as +# changes to the stub launcher mean that sys.executable always points +# to the stub on macOS +# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' +# in os.environ): +# result = os.environ['__PYVENV_LAUNCHER__'] +# else: +# result = sys.executable +# return result + result = os.path.normcase(sys.executable) + if not isinstance(result, text_type): + result = fsdecode(result) + return result + + +def proceed(prompt, allowed_chars, error_prompt=None, default=None): + p = prompt + while True: + s = raw_input(p) + p = prompt + if not s and default: + s = default + if s: + c = s[0].lower() + if c in allowed_chars: + break + if error_prompt: + p = '%c: %s\n%s' % (c, error_prompt, prompt) + return c + + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + +def read_exports(stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + # Try to load as JSON, falling back on legacy format + data = stream.read() + stream = StringIO(data) + try: + jdata = json.load(stream) + result = jdata['extensions']['python.exports']['exports'] + for group, entries in result.items(): + for k, v in entries.items(): + s = '%s = %s' % (k, v) + entry = get_export_entry(s) + assert entry is not None + entries[k] = entry + return result + except Exception: + stream.seek(0, 0) + + def read_stream(cp, stream): + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + + cp = configparser.ConfigParser() + try: + read_stream(cp, stream) + except configparser.MissingSectionHeaderError: + stream.close() + data = textwrap.dedent(data) + stream = StringIO(data) + read_stream(cp, stream) + + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + #entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getwriter('utf-8')(stream) + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + +@contextlib.contextmanager +def chdir(d): + cwd = os.getcwd() + try: + os.chdir(d) + yield + finally: + os.chdir(cwd) + + +@contextlib.contextmanager +def socket_timeout(seconds=15): + cto = socket.getdefaulttimeout() + try: + socket.setdefaulttimeout(seconds) + yield + finally: + socket.setdefaulttimeout(cto) + + +class cached_property(object): + def __init__(self, func): + self.func = func + #for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) + + def __get__(self, obj, cls=None): + if obj is None: + return self + value = self.func(obj) + object.__setattr__(obj, self.func.__name__, value) + #obj.__dict__[self.func.__name__] = value = self.func(obj) + return value + +def convert_path(pathname): + """Return 'pathname' as a name that will work on the native filesystem. + + The path is split on '/' and put back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while os.curdir in paths: + paths.remove(os.curdir) + if not paths: + return os.curdir + return os.path.join(*paths) + + +class FileOperator(object): + def __init__(self, dry_run=False): + self.dry_run = dry_run + self.ensured = set() + self._init_record() + + def _init_record(self): + self.record = False + self.files_written = set() + self.dirs_created = set() + + def record_as_written(self, path): + if self.record: + self.files_written.add(path) + + def newer(self, source, target): + """Tell if the target is newer than the source. + + Returns true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. + + Returns false if both exist and 'target' is the same age or younger + than 'source'. Raise PackagingFileError if 'source' does not exist. + + Note that this test is not very accurate: files created in the same + second will have the same "age". + """ + if not os.path.exists(source): + raise DistlibException("file '%r' does not exist" % + os.path.abspath(source)) + if not os.path.exists(target): + return True + + return os.stat(source).st_mtime > os.stat(target).st_mtime + + def copy_file(self, infile, outfile, check=True): + """Copy a file respecting dry-run and force flags. + """ + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying %s to %s', infile, outfile) + if not self.dry_run: + msg = None + if check: + if os.path.islink(outfile): + msg = '%s is a symlink' % outfile + elif os.path.exists(outfile) and not os.path.isfile(outfile): + msg = '%s is a non-regular file' % outfile + if msg: + raise ValueError(msg + ' which would be overwritten') + shutil.copyfile(infile, outfile) + self.record_as_written(outfile) + + def copy_stream(self, instream, outfile, encoding=None): + assert not os.path.isdir(outfile) + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying stream %s to %s', instream, outfile) + if not self.dry_run: + if encoding is None: + outstream = open(outfile, 'wb') + else: + outstream = codecs.open(outfile, 'w', encoding=encoding) + try: + shutil.copyfileobj(instream, outstream) + finally: + outstream.close() + self.record_as_written(outfile) + + def write_binary_file(self, path, data): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + with open(path, 'wb') as f: + f.write(data) + self.record_as_written(path) + + def write_text_file(self, path, data, encoding): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + with open(path, 'wb') as f: + f.write(data.encode(encoding)) + self.record_as_written(path) + + def set_mode(self, bits, mask, files): + if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): + # Set the executable bits (owner, group, and world) on + # all the files specified. + for f in files: + if self.dry_run: + logger.info("changing mode of %s", f) + else: + mode = (os.stat(f).st_mode | bits) & mask + logger.info("changing mode of %s to %o", f, mode) + os.chmod(f, mode) + + set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) + + def ensure_dir(self, path): + path = os.path.abspath(path) + if path not in self.ensured and not os.path.exists(path): + self.ensured.add(path) + d, f = os.path.split(path) + self.ensure_dir(d) + logger.info('Creating %s' % path) + if not self.dry_run: + os.mkdir(path) + if self.record: + self.dirs_created.add(path) + + def byte_compile(self, path, optimize=False, force=False, prefix=None): + dpath = cache_from_source(path, not optimize) + logger.info('Byte-compiling %s to %s', path, dpath) + if not self.dry_run: + if force or self.newer(path, dpath): + if not prefix: + diagpath = None + else: + assert path.startswith(prefix) + diagpath = path[len(prefix):] + py_compile.compile(path, dpath, diagpath, True) # raise error + self.record_as_written(dpath) + return dpath + + def ensure_removed(self, path): + if os.path.exists(path): + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record: + if path in self.dirs_created: + self.dirs_created.remove(path) + else: + if os.path.islink(path): + s = 'link' + else: + s = 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record: + if path in self.files_written: + self.files_written.remove(path) + + def is_writable(self, path): + result = False + while not result: + if os.path.exists(path): + result = os.access(path, os.W_OK) + break + parent = os.path.dirname(path) + if parent == path: + break + path = parent + return result + + def commit(self): + """ + Commit recorded changes, turn off recording, return + changes. + """ + assert self.record + result = self.files_written, self.dirs_created + self._init_record() + return result + + def rollback(self): + if not self.dry_run: + for f in list(self.files_written): + if os.path.exists(f): + os.remove(f) + # dirs should all be empty now, except perhaps for + # __pycache__ subdirs + # reverse so that subdirs appear before their parents + dirs = sorted(self.dirs_created, reverse=True) + for d in dirs: + flist = os.listdir(d) + if flist: + assert flist == ['__pycache__'] + sd = os.path.join(d, flist[0]) + os.rmdir(sd) + os.rmdir(d) # should fail if non-empty + self._init_record() + +def resolve(module_name, dotted_path): + if module_name in sys.modules: + mod = sys.modules[module_name] + else: + mod = __import__(module_name) + if dotted_path is None: + result = mod + else: + parts = dotted_path.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + +class ExportEntry(object): + def __init__(self, name, prefix, suffix, flags): + self.name = name + self.prefix = prefix + self.suffix = suffix + self.flags = flags + + @cached_property + def value(self): + return resolve(self.prefix, self.suffix) + + def __repr__(self): # pragma: no cover + return '' % (self.name, self.prefix, + self.suffix, self.flags) + + def __eq__(self, other): + if not isinstance(other, ExportEntry): + result = False + else: + result = (self.name == other.name and + self.prefix == other.prefix and + self.suffix == other.suffix and + self.flags == other.flags) + return result + + __hash__ = object.__hash__ + + +ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+) + \s*=\s*(?P(\w+)([:\.]\w+)*) + \s*(\[\s*(?P\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + ''', re.VERBOSE) + +def get_export_entry(specification): + m = ENTRY_RE.search(specification) + if not m: + result = None + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + else: + d = m.groupdict() + name = d['name'] + path = d['callable'] + colons = path.count(':') + if colons == 0: + prefix, suffix = path, None + else: + if colons != 1: + raise DistlibException("Invalid specification " + "'%s'" % specification) + prefix, suffix = path.split(':') + flags = d['flags'] + if flags is None: + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + flags = [] + else: + flags = [f.strip() for f in flags.split(',')] + result = ExportEntry(name, prefix, suffix, flags) + return result + + +def get_cache_base(suffix=None): + """ + Return the default base location for distlib caches. If the directory does + not exist, it is created. Use the suffix provided for the base directory, + and default to '.distlib' if it isn't provided. + + On Windows, if LOCALAPPDATA is defined in the environment, then it is + assumed to be a directory, and will be the parent directory of the result. + On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home + directory - using os.expanduser('~') - will be the parent directory of + the result. + + The result is just the directory '.distlib' in the parent directory as + determined above, or with the name specified with ``suffix``. + """ + if suffix is None: + suffix = '.distlib' + if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: + result = os.path.expandvars('$localappdata') + else: + # Assume posix, or old Windows + result = os.path.expanduser('~') + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if os.path.isdir(result): + usable = os.access(result, os.W_OK) + if not usable: + logger.warning('Directory exists but is not writable: %s', result) + else: + try: + os.makedirs(result) + usable = True + except OSError: + logger.warning('Unable to create %s', result, exc_info=True) + usable = False + if not usable: + result = tempfile.mkdtemp() + logger.warning('Default location unusable, using %s', result) + return os.path.join(result, suffix) + + +def path_to_cache_dir(path): + """ + Convert an absolute path to a directory name for use in a cache. + + The algorithm used is: + + #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. + #. Any occurrence of ``os.sep`` is replaced with ``'--'``. + #. ``'.cache'`` is appended. + """ + d, p = os.path.splitdrive(os.path.abspath(path)) + if d: + d = d.replace(':', '---') + p = p.replace(os.sep, '--') + return d + p + '.cache' + + +def ensure_slash(s): + if not s.endswith('/'): + return s + '/' + return s + + +def parse_credentials(netloc): + username = password = None + if '@' in netloc: + prefix, netloc = netloc.split('@', 1) + if ':' not in prefix: + username = prefix + else: + username, password = prefix.split(':', 1) + return username, password, netloc + + +def get_process_umask(): + result = os.umask(0o22) + os.umask(result) + return result + +def is_string_sequence(seq): + result = True + i = None + for i, s in enumerate(seq): + if not isinstance(s, string_types): + result = False + break + assert i is not None + return result + +PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) +PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') + + +def split_filename(filename, project_name=None): + """ + Extract name, version, python version from a filename (no extension) + + Return name, version, pyver or None + """ + result = None + pyver = None + filename = unquote(filename).replace(' ', '-') + m = PYTHON_VERSION.search(filename) + if m: + pyver = m.group(1) + filename = filename[:m.start()] + if project_name and len(filename) > len(project_name) + 1: + m = re.match(re.escape(project_name) + r'\b', filename) + if m: + n = m.end() + result = filename[:n], filename[n + 1:], pyver + if result is None: + m = PROJECT_NAME_AND_VERSION.match(filename) + if m: + result = m.group(1), m.group(3), pyver + return result + +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' + r'\(\s*(?P[^\s)]+)\)$') + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + +def get_extras(requested, available): + result = set() + requested = set(requested or []) + available = set(available or []) + if '*' in requested: + requested.remove('*') + result |= available + for r in requested: + if r == '-': + result.add(r) + elif r.startswith('-'): + unwanted = r[1:] + if unwanted not in available: + logger.warning('undeclared extra: %s' % unwanted) + if unwanted in result: + result.remove(unwanted) + else: + if r not in available: + logger.warning('undeclared extra: %s' % r) + result.add(r) + return result +# +# Extended metadata functionality +# + +def _get_external_data(url): + result = {} + try: + # urlopen might fail if it runs into redirections, + # because of Python issue #13696. Fixed in locators + # using a custom redirect handler. + resp = urlopen(url) + headers = resp.info() + ct = headers.get('Content-Type') + if not ct.startswith('application/json'): + logger.debug('Unexpected response for JSON request: %s', ct) + else: + reader = codecs.getreader('utf-8')(resp) + #data = reader.read().decode('utf-8') + #result = json.loads(data) + result = json.load(reader) + except Exception as e: + logger.exception('Failed to get external data for %s: %s', url, e) + return result + +_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + +def get_project_data(name): + url = '%s/%s/project.json' % (name[0].upper(), name) + url = urljoin(_external_data_base_url, url) + result = _get_external_data(url) + return result + +def get_package_data(name, version): + url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = urljoin(_external_data_base_url, url) + return _get_external_data(url) + + +class Cache(object): + """ + A class implementing a cache for resources that need to live in the file system + e.g. shared libraries. This class was moved from resources to here because it + could be used by other modules, e.g. the wheel module. + """ + + def __init__(self, base): + """ + Initialise an instance. + + :param base: The base directory where the cache should be located. + """ + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if not os.path.isdir(base): # pragma: no cover + os.makedirs(base) + if (os.stat(base).st_mode & 0o77) != 0: + logger.warning('Directory \'%s\' is not private', base) + self.base = os.path.abspath(os.path.normpath(base)) + + def prefix_to_dir(self, prefix): + """ + Converts a resource prefix to a directory name in the cache. + """ + return path_to_cache_dir(prefix) + + def clear(self): + """ + Clear the cache. + """ + not_removed = [] + for fn in os.listdir(self.base): + fn = os.path.join(self.base, fn) + try: + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + except Exception: + not_removed.append(fn) + return not_removed + + +class EventMixin(object): + """ + A very simple publish/subscribe system. + """ + def __init__(self): + self._subscribers = {} + + def add(self, event, subscriber, append=True): + """ + Add a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be added (and called when the + event is published). + :param append: Whether to append or prepend the subscriber to an + existing subscriber list for the event. + """ + subs = self._subscribers + if event not in subs: + subs[event] = deque([subscriber]) + else: + sq = subs[event] + if append: + sq.append(subscriber) + else: + sq.appendleft(subscriber) + + def remove(self, event, subscriber): + """ + Remove a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be removed. + """ + subs = self._subscribers + if event not in subs: + raise ValueError('No subscribers: %r' % event) + subs[event].remove(subscriber) + + def get_subscribers(self, event): + """ + Return an iterator for the subscribers for an event. + :param event: The event to return subscribers for. + """ + return iter(self._subscribers.get(event, ())) + + def publish(self, event, *args, **kwargs): + """ + Publish a event and return a list of values returned by its + subscribers. + + :param event: The event to publish. + :param args: The positional arguments to pass to the event's + subscribers. + :param kwargs: The keyword arguments to pass to the event's + subscribers. + """ + result = [] + for subscriber in self.get_subscribers(event): + try: + value = subscriber(event, *args, **kwargs) + except Exception: + logger.exception('Exception during event publication') + value = None + result.append(value) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', + event, args, kwargs, result) + return result + +# +# Simple sequencing +# +class Sequencer(object): + def __init__(self): + self._preds = {} + self._succs = {} + self._nodes = set() # nodes with no preds/succs + + def add_node(self, node): + self._nodes.add(node) + + def remove_node(self, node, edges=False): + if node in self._nodes: + self._nodes.remove(node) + if edges: + for p in set(self._preds.get(node, ())): + self.remove(p, node) + for s in set(self._succs.get(node, ())): + self.remove(node, s) + # Remove empties + for k, v in list(self._preds.items()): + if not v: + del self._preds[k] + for k, v in list(self._succs.items()): + if not v: + del self._succs[k] + + def add(self, pred, succ): + assert pred != succ + self._preds.setdefault(succ, set()).add(pred) + self._succs.setdefault(pred, set()).add(succ) + + def remove(self, pred, succ): + assert pred != succ + try: + preds = self._preds[succ] + succs = self._succs[pred] + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of anything' % succ) + try: + preds.remove(pred) + succs.remove(succ) + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of %r' % (succ, pred)) + + def is_step(self, step): + return (step in self._preds or step in self._succs or + step in self._nodes) + + def get_steps(self, final): + if not self.is_step(final): + raise ValueError('Unknown: %r' % final) + result = [] + todo = [] + seen = set() + todo.append(final) + while todo: + step = todo.pop(0) + if step in seen: + # if a step was already seen, + # move it to the end (so it will appear earlier + # when reversed on return) ... but not for the + # final step, as that would be confusing for + # users + if step != final: + result.remove(step) + result.append(step) + else: + seen.add(step) + result.append(step) + preds = self._preds.get(step, ()) + todo.extend(preds) + return reversed(result) + + @property + def strong_connections(self): + #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + index_counter = [0] + stack = [] + lowlinks = {} + index = {} + result = [] + + graph = self._succs + + def strongconnect(node): + # set the depth index for this node to the smallest unused index + index[node] = index_counter[0] + lowlinks[node] = index_counter[0] + index_counter[0] += 1 + stack.append(node) + + # Consider successors + try: + successors = graph[node] + except Exception: + successors = [] + for successor in successors: + if successor not in lowlinks: + # Successor has not yet been visited + strongconnect(successor) + lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + elif successor in stack: + # the successor is in the stack and hence in the current + # strongly connected component (SCC) + lowlinks[node] = min(lowlinks[node],index[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == index[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: break + component = tuple(connected_component) + # storing the result + result.append(component) + + for node in graph: + if node not in lowlinks: + strongconnect(node) + + return result + + @property + def dot(self): + result = ['digraph G {'] + for succ in self._preds: + preds = self._preds[succ] + for pred in preds: + result.append(' %s -> %s;' % (pred, succ)) + for node in self._nodes: + result.append(' %s;' % node) + result.append('}') + return '\n'.join(result) + +# +# Unarchiving functionality for zip, tar, tgz, tbz, whl +# + +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', + '.tgz', '.tbz', '.whl') + +def unarchive(archive_filename, dest_dir, format=None, check=True): + + def check_path(path): + if not isinstance(path, text_type): + path = path.decode('utf-8') + p = os.path.abspath(os.path.join(dest_dir, path)) + if not p.startswith(dest_dir) or p[plen] != os.sep: + raise ValueError('path outside destination: %r' % p) + + dest_dir = os.path.abspath(dest_dir) + plen = len(dest_dir) + archive = None + if format is None: + if archive_filename.endswith(('.zip', '.whl')): + format = 'zip' + elif archive_filename.endswith(('.tar.gz', '.tgz')): + format = 'tgz' + mode = 'r:gz' + elif archive_filename.endswith(('.tar.bz2', '.tbz')): + format = 'tbz' + mode = 'r:bz2' + elif archive_filename.endswith('.tar'): + format = 'tar' + mode = 'r' + else: # pragma: no cover + raise ValueError('Unknown format for %r' % archive_filename) + try: + if format == 'zip': + archive = ZipFile(archive_filename, 'r') + if check: + names = archive.namelist() + for name in names: + check_path(name) + else: + archive = tarfile.open(archive_filename, mode) + if check: + names = archive.getnames() + for name in names: + check_path(name) + if format != 'zip' and sys.version_info[0] < 3: + # See Python issue 17153. If the dest path contains Unicode, + # tarfile extraction fails on Python 2.x if a member path name + # contains non-ASCII characters - it leads to an implicit + # bytes -> unicode conversion using ASCII to decode. + for tarinfo in archive.getmembers(): + if not isinstance(tarinfo.name, text_type): + tarinfo.name = tarinfo.name.decode('utf-8') + archive.extractall(dest_dir) + + finally: + if archive: + archive.close() + + +def zip_dir(directory): + """zip a directory tree into a BytesIO object""" + result = io.BytesIO() + dlen = len(directory) + with ZipFile(result, "w") as zf: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + rel = root[dlen:] + dest = os.path.join(rel, name) + zf.write(full, dest) + return result + +# +# Simple progress bar +# + +UNITS = ('', 'K', 'M', 'G','T','P') + + +class Progress(object): + unknown = 'UNKNOWN' + + def __init__(self, minval=0, maxval=100): + assert maxval is None or maxval >= minval + self.min = self.cur = minval + self.max = maxval + self.started = None + self.elapsed = 0 + self.done = False + + def update(self, curval): + assert self.min <= curval + assert self.max is None or curval <= self.max + self.cur = curval + now = time.time() + if self.started is None: + self.started = now + else: + self.elapsed = now - self.started + + def increment(self, incr): + assert incr >= 0 + self.update(self.cur + incr) + + def start(self): + self.update(self.min) + return self + + def stop(self): + if self.max is not None: + self.update(self.max) + self.done = True + + @property + def maximum(self): + return self.unknown if self.max is None else self.max + + @property + def percentage(self): + if self.done: + result = '100 %' + elif self.max is None: + result = ' ?? %' + else: + v = 100.0 * (self.cur - self.min) / (self.max - self.min) + result = '%3d %%' % v + return result + + def format_duration(self, duration): + if (duration <= 0) and self.max is None or self.cur == self.min: + result = '??:??:??' + #elif duration < 1: + # result = '--:--:--' + else: + result = time.strftime('%H:%M:%S', time.gmtime(duration)) + return result + + @property + def ETA(self): + if self.done: + prefix = 'Done' + t = self.elapsed + #import pdb; pdb.set_trace() + else: + prefix = 'ETA ' + if self.max is None: + t = -1 + elif self.elapsed == 0 or (self.cur == self.min): + t = 0 + else: + #import pdb; pdb.set_trace() + t = float(self.max - self.min) + t /= self.cur - self.min + t = (t - 1) * self.elapsed + return '%s: %s' % (prefix, self.format_duration(t)) + + @property + def speed(self): + if self.elapsed == 0: + result = 0.0 + else: + result = (self.cur - self.min) / self.elapsed + for unit in UNITS: + if result < 1000: + break + result /= 1000.0 + return '%d %sB/s' % (result, unit) + +# +# Glob functionality +# + +RICH_GLOB = re.compile(r'\{([^}]*)\}') +_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') +_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') + + +def iglob(path_glob): + """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" + if _CHECK_RECURSIVE_GLOB.search(path_glob): + msg = """invalid glob %r: recursive glob "**" must be used alone""" + raise ValueError(msg % path_glob) + if _CHECK_MISMATCH_SET.search(path_glob): + msg = """invalid glob %r: mismatching set marker '{' or '}'""" + raise ValueError(msg % path_glob) + return _iglob(path_glob) + + +def _iglob(path_glob): + rich_path_glob = RICH_GLOB.split(path_glob, 1) + if len(rich_path_glob) > 1: + assert len(rich_path_glob) == 3, rich_path_glob + prefix, set, suffix = rich_path_glob + for item in set.split(','): + for path in _iglob(''.join((prefix, item, suffix))): + yield path + else: + if '**' not in path_glob: + for item in std_iglob(path_glob): + yield item + else: + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' + else: + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + for fn in _iglob(os.path.join(path, radical)): + yield fn + +if ssl: + from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, + CertificateError) + + +# +# HTTPSConnection which verifies certificates/matches domains +# + + class HTTPSConnection(httplib.HTTPSConnection): + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None + + # noinspection PyPropertyAccess + def connect(self): + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, '_tunnel_host', False): + self.sock = sock + self._tunnel() + + if not hasattr(ssl, 'SSLContext'): + # For 2.x + if self.ca_certs: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, + cert_reqs=cert_reqs, + ssl_version=ssl.PROTOCOL_SSLv23, + ca_certs=self.ca_certs) + else: # pragma: no cover + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.options |= ssl.OP_NO_SSLv2 + if self.cert_file: + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + self.sock = context.wrap_socket(sock, **kwargs) + if self.ca_certs and self.check_domain: + try: + match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) + except CertificateError: # pragma: no cover + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + class HTTPSHandler(BaseHTTPSHandler): + def __init__(self, ca_certs, check_domain=True): + BaseHTTPSHandler.__init__(self) + self.ca_certs = ca_certs + self.check_domain = check_domain + + def _conn_maker(self, *args, **kwargs): + """ + This is called to create a connection instance. Normally you'd + pass a connection class to do_open, but it doesn't actually check for + a class, and just expects a callable. As long as we behave just as a + constructor would have, we should be OK. If it ever changes so that + we *must* pass a class, we'll create an UnsafeHTTPSConnection class + which just sets check_domain to False in the class definition, and + choose which one to pass to do_open. + """ + result = HTTPSConnection(*args, **kwargs) + if self.ca_certs: + result.ca_certs = self.ca_certs + result.check_domain = self.check_domain + return result + + def https_open(self, req): + try: + return self.do_open(self._conn_maker, req) + except URLError as e: + if 'certificate verify failed' in str(e.reason): + raise CertificateError('Unable to verify server certificate ' + 'for %s' % req.host) + else: + raise + + # + # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- + # Middle proxy using HTTP listens on port 443, or an index mistakenly serves + # HTML containing a http://xyz link when it should be https://xyz), + # you can use the following handler class, which does not allow HTTP traffic. + # + # It works by inheriting from HTTPHandler - so build_opener won't add a + # handler for HTTP itself. + # + class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + def http_open(self, req): + raise URLError('Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + +# +# XML-RPC with timeouts +# + +_ver_info = sys.version_info[:2] + +if _ver_info == (2, 6): + class HTTP(httplib.HTTP): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + + if ssl: + class HTTPS(httplib.HTTPS): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + +class Transport(xmlrpclib.Transport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.Transport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, x509 = self.get_host_info(host) + if _ver_info == (2, 6): + result = HTTP(h, timeout=self.timeout) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + result = self._connection[1] + return result + +if ssl: + class SafeTransport(xmlrpclib.SafeTransport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.SafeTransport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, kwargs = self.get_host_info(host) + if not kwargs: + kwargs = {} + kwargs['timeout'] = self.timeout + if _ver_info == (2, 6): + result = HTTPS(host, None, **kwargs) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, + **kwargs) + result = self._connection[1] + return result + + +class ServerProxy(xmlrpclib.ServerProxy): + def __init__(self, uri, **kwargs): + self.timeout = timeout = kwargs.pop('timeout', None) + # The above classes only come into play if a timeout + # is specified + if timeout is not None: + scheme, _ = splittype(uri) + use_datetime = kwargs.get('use_datetime', 0) + if scheme == 'https': + tcls = SafeTransport + else: + tcls = Transport + kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) + self.transport = t + xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + +# +# CSV functionality. This is provided because on 2.x, the csv module can't +# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. +# + +def _csv_open(fn, mode, **kwargs): + if sys.version_info[0] < 3: + mode += 'b' + else: + kwargs['newline'] = '' + return open(fn, mode, **kwargs) + + +class CSVBase(object): + defaults = { + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) + } + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.stream.close() + + +class CSVReader(CSVBase): + def __init__(self, **kwargs): + if 'stream' in kwargs: + stream = kwargs['stream'] + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + self.stream = stream + else: + self.stream = _csv_open(kwargs['path'], 'r') + self.reader = csv.reader(self.stream, **self.defaults) + + def __iter__(self): + return self + + def next(self): + result = next(self.reader) + if sys.version_info[0] < 3: + for i, item in enumerate(result): + if not isinstance(item, text_type): + result[i] = item.decode('utf-8') + return result + + __next__ = next + +class CSVWriter(CSVBase): + def __init__(self, fn, **kwargs): + self.stream = _csv_open(fn, 'w') + self.writer = csv.writer(self.stream, **self.defaults) + + def writerow(self, row): + if sys.version_info[0] < 3: + r = [] + for item in row: + if isinstance(item, text_type): + item = item.encode('utf-8') + r.append(item) + row = r + self.writer.writerow(row) + +# +# Configurator functionality +# + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result + +# +# Mixin for running subprocesses and capturing their output +# + +class SubprocessMixin(object): + def __init__(self, verbose=False, progress=None): + self.verbose = verbose + self.progress = progress + + def reader(self, stream, context): + """ + Read lines from a subprocess' output stream and either pass to a progress + callable (if specified) or write progress information to sys.stderr. + """ + progress = self.progress + verbose = self.verbose + while True: + s = stream.readline() + if not s: + break + if progress is not None: + progress(s, context) + else: + if not verbose: + sys.stderr.write('.') + else: + sys.stderr.write(s.decode('utf-8')) + sys.stderr.flush() + stream.close() + + def run_command(self, cmd, **kwargs): + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, **kwargs) + t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) + t1.start() + t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) + t2.start() + p.wait() + t1.join() + t2.join() + if self.progress is not None: + self.progress('done.', 'main') + elif self.verbose: + sys.stderr.write('done.\n') + return p + + +def normalize_name(name): + """Normalize a python package name a la PEP 503""" + # https://www.python.org/dev/peps/pep-0503/#normalized-names + return re.sub('[-_.]+', '-', name).lower() diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/version.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/version.py new file mode 100644 index 0000000..48c17c0 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/version.py @@ -0,0 +1,742 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2016 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Implementation of a flexible versioning scheme providing support for PEP-440, +setuptools-compatible and semantic versioning. +""" + +import logging +import re + +from .compat import string_types + +__all__ = ['NormalizedVersion', 'NormalizedMatcher', + 'LegacyVersion', 'LegacyMatcher', + 'SemanticVersion', 'SemanticMatcher', + 'UnsupportedVersionError', 'get_scheme'] + +logger = logging.getLogger(__name__) + + +class UnsupportedVersionError(ValueError): + """This is an unsupported version.""" + pass + + +class Version(object): + def __init__(self, s): + self._string = s = s.strip() + self._parts = parts = self.parse(s) + assert isinstance(parts, tuple) + assert len(parts) > 0 + + def parse(self, s): + raise NotImplementedError('please implement in a subclass') + + def _check_compatible(self, other): + if type(self) != type(other): + raise TypeError('cannot compare %r and %r' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + self._check_compatible(other) + return self._parts < other._parts + + def __gt__(self, other): + return not (self.__lt__(other) or self.__eq__(other)) + + def __le__(self, other): + return self.__lt__(other) or self.__eq__(other) + + def __ge__(self, other): + return self.__gt__(other) or self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self._parts) + + def __repr__(self): + return "%s('%s')" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + @property + def is_prerelease(self): + raise NotImplementedError('Please implement in subclasses.') + + +class Matcher(object): + version_class = None + + dist_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?") + comp_re = re.compile(r'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$') + num_re = re.compile(r'^\d+(\.\d+)*$') + + # value is either a callable or the name of a method + _operators = { + '<': lambda v, c, p: v < c, + '>': lambda v, c, p: v > c, + '<=': lambda v, c, p: v == c or v < c, + '>=': lambda v, c, p: v == c or v > c, + '==': lambda v, c, p: v == c, + '===': lambda v, c, p: v == c, + # by default, compatible => >=. + '~=': lambda v, c, p: v == c or v > c, + '!=': lambda v, c, p: v != c, + } + + def __init__(self, s): + if self.version_class is None: + raise ValueError('Please specify a version class') + self._string = s = s.strip() + m = self.dist_re.match(s) + if not m: + raise ValueError('Not valid: %r' % s) + groups = m.groups('') + self.name = groups[0].strip() + self.key = self.name.lower() # for case-insensitive comparisons + clist = [] + if groups[2]: + constraints = [c.strip() for c in groups[2].split(',')] + for c in constraints: + m = self.comp_re.match(c) + if not m: + raise ValueError('Invalid %r in %r' % (c, s)) + groups = m.groups() + op = groups[0] or '~=' + s = groups[1] + if s.endswith('.*'): + if op not in ('==', '!='): + raise ValueError('\'.*\' not allowed for ' + '%r constraints' % op) + # Could be a partial version (e.g. for '2.*') which + # won't parse as a version, so keep it as a string + vn, prefix = s[:-2], True + if not self.num_re.match(vn): + # Just to check that vn is a valid version + self.version_class(vn) + else: + # Should parse as a version, so we can create an + # instance for the comparison + vn, prefix = self.version_class(s), False + clist.append((op, vn, prefix)) + self._parts = tuple(clist) + + def match(self, version): + """ + Check if the provided version matches the constraints. + + :param version: The version to match against this instance. + :type version: String or :class:`Version` instance. + """ + if isinstance(version, string_types): + version = self.version_class(version) + for operator, constraint, prefix in self._parts: + f = self._operators.get(operator) + if isinstance(f, string_types): + f = getattr(self, f) + if not f: + msg = ('%r not implemented ' + 'for %s' % (operator, self.__class__.__name__)) + raise NotImplementedError(msg) + if not f(version, constraint, prefix): + return False + return True + + @property + def exact_version(self): + result = None + if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): + result = self._parts[0][1] + return result + + def _check_compatible(self, other): + if type(self) != type(other) or self.name != other.name: + raise TypeError('cannot compare %s and %s' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self.key == other.key and self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self.key) + hash(self._parts) + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + +PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' + r'(\.(post)(\d+))?(\.(dev)(\d+))?' + r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') + + +def _pep_440_key(s): + s = s.strip() + m = PEP440_VERSION_RE.match(s) + if not m: + raise UnsupportedVersionError('Not a valid version: %s' % s) + groups = m.groups() + nums = tuple(int(v) for v in groups[1].split('.')) + while len(nums) > 1 and nums[-1] == 0: + nums = nums[:-1] + + if not groups[0]: + epoch = 0 + else: + epoch = int(groups[0]) + pre = groups[4:6] + post = groups[7:9] + dev = groups[10:12] + local = groups[13] + if pre == (None, None): + pre = () + else: + pre = pre[0], int(pre[1]) + if post == (None, None): + post = () + else: + post = post[0], int(post[1]) + if dev == (None, None): + dev = () + else: + dev = dev[0], int(dev[1]) + if local is None: + local = () + else: + parts = [] + for part in local.split('.'): + # to ensure that numeric compares as > lexicographic, avoid + # comparing them directly, but encode a tuple which ensures + # correct sorting + if part.isdigit(): + part = (1, int(part)) + else: + part = (0, part) + parts.append(part) + local = tuple(parts) + if not pre: + # either before pre-release, or final release and after + if not post and dev: + # before pre-release + pre = ('a', -1) # to sort before a0 + else: + pre = ('z',) # to sort after all pre-releases + # now look at the state of post and dev. + if not post: + post = ('_',) # sort before 'a' + if not dev: + dev = ('final',) + + #print('%s -> %s' % (s, m.groups())) + return epoch, nums, pre, post, dev, local + + +_normalized_key = _pep_440_key + + +class NormalizedVersion(Version): + """A rational version. + + Good: + 1.2 # equivalent to "1.2.0" + 1.2.0 + 1.2a1 + 1.2.3a2 + 1.2.3b1 + 1.2.3c1 + 1.2.3.4 + TODO: fill this out + + Bad: + 1 # minimum two numbers + 1.2a # release level must have a release serial + 1.2.3b + """ + def parse(self, s): + result = _normalized_key(s) + # _normalized_key loses trailing zeroes in the release + # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 + # However, PEP 440 prefix matching needs it: for example, + # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). + m = PEP440_VERSION_RE.match(s) # must succeed + groups = m.groups() + self._release_clause = tuple(int(v) for v in groups[1].split('.')) + return result + + PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) + + @property + def is_prerelease(self): + return any(t[0] in self.PREREL_TAGS for t in self._parts if t) + + +def _match_prefix(x, y): + x = str(x) + y = str(y) + if x == y: + return True + if not x.startswith(y): + return False + n = len(y) + return x[n] == '.' + + +class NormalizedMatcher(Matcher): + version_class = NormalizedVersion + + # value is either a callable or the name of a method + _operators = { + '~=': '_match_compatible', + '<': '_match_lt', + '>': '_match_gt', + '<=': '_match_le', + '>=': '_match_ge', + '==': '_match_eq', + '===': '_match_arbitrary', + '!=': '_match_ne', + } + + def _adjust_local(self, version, constraint, prefix): + if prefix: + strip_local = '+' not in constraint and version._parts[-1] + else: + # both constraint and version are + # NormalizedVersion instances. + # If constraint does not have a local component, + # ensure the version doesn't, either. + strip_local = not constraint._parts[-1] and version._parts[-1] + if strip_local: + s = version._string.split('+', 1)[0] + version = self.version_class(s) + return version, constraint + + def _match_lt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version >= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_gt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version <= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_le(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version <= constraint + + def _match_ge(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version >= constraint + + def _match_eq(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version == constraint) + else: + result = _match_prefix(version, constraint) + return result + + def _match_arbitrary(self, version, constraint, prefix): + return str(version) == str(constraint) + + def _match_ne(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version != constraint) + else: + result = not _match_prefix(version, constraint) + return result + + def _match_compatible(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version == constraint: + return True + if version < constraint: + return False +# if not prefix: +# return True + release_clause = constraint._release_clause + if len(release_clause) > 1: + release_clause = release_clause[:-1] + pfx = '.'.join([str(i) for i in release_clause]) + return _match_prefix(version, pfx) + +_REPLACEMENTS = ( + (re.compile('[.+-]$'), ''), # remove trailing puncts + (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start + (re.compile('^[.-]'), ''), # remove leading puncts + (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses + (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha + (re.compile(r'\b(pre-alpha|prealpha)\b'), + 'pre.alpha'), # standardise + (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses +) + +_SUFFIX_REPLACEMENTS = ( + (re.compile('^[:~._+-]+'), ''), # remove leading puncts + (re.compile('[,*")([\]]'), ''), # remove unwanted chars + (re.compile('[~:+_ -]'), '.'), # replace illegal chars + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\.$'), ''), # trailing '.' +) + +_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') + + +def _suggest_semantic_version(s): + """ + Try to suggest a semantic form for a version for which + _suggest_normalized_version couldn't come up with anything. + """ + result = s.strip().lower() + for pat, repl in _REPLACEMENTS: + result = pat.sub(repl, result) + if not result: + result = '0.0.0' + + # Now look for numeric prefix, and separate it out from + # the rest. + #import pdb; pdb.set_trace() + m = _NUMERIC_PREFIX.match(result) + if not m: + prefix = '0.0.0' + suffix = result + else: + prefix = m.groups()[0].split('.') + prefix = [int(i) for i in prefix] + while len(prefix) < 3: + prefix.append(0) + if len(prefix) == 3: + suffix = result[m.end():] + else: + suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] + prefix = prefix[:3] + prefix = '.'.join([str(i) for i in prefix]) + suffix = suffix.strip() + if suffix: + #import pdb; pdb.set_trace() + # massage the suffix. + for pat, repl in _SUFFIX_REPLACEMENTS: + suffix = pat.sub(repl, suffix) + + if not suffix: + result = prefix + else: + sep = '-' if 'dev' in suffix else '+' + result = prefix + sep + suffix + if not is_semver(result): + result = None + return result + + +def _suggest_normalized_version(s): + """Suggest a normalized version close to the given version string. + + If you have a version string that isn't rational (i.e. NormalizedVersion + doesn't like it) then you might be able to get an equivalent (or close) + rational version from this function. + + This does a number of simple normalizations to the given string, based + on observation of versions currently in use on PyPI. Given a dump of + those version during PyCon 2009, 4287 of them: + - 2312 (53.93%) match NormalizedVersion without change + with the automatic suggestion + - 3474 (81.04%) match when using this suggestion method + + @param s {str} An irrational version string. + @returns A rational version string, or None, if couldn't determine one. + """ + try: + _normalized_key(s) + return s # already rational + except UnsupportedVersionError: + pass + + rs = s.lower() + + # part of this could use maketrans + for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), + ('beta', 'b'), ('rc', 'c'), ('-final', ''), + ('-pre', 'c'), + ('-release', ''), ('.release', ''), ('-stable', ''), + ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), + ('final', '')): + rs = rs.replace(orig, repl) + + # if something ends with dev or pre, we add a 0 + rs = re.sub(r"pre$", r"pre0", rs) + rs = re.sub(r"dev$", r"dev0", rs) + + # if we have something like "b-2" or "a.2" at the end of the + # version, that is probably beta, alpha, etc + # let's remove the dash or dot + rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) + + # 1.0-dev-r371 -> 1.0.dev371 + # 0.1-dev-r79 -> 0.1.dev79 + rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) + + # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 + rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) + + # Clean: v0.3, v1.0 + if rs.startswith('v'): + rs = rs[1:] + + # Clean leading '0's on numbers. + #TODO: unintended side-effect on, e.g., "2003.05.09" + # PyPI stats: 77 (~2%) better + rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) + + # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers + # zero. + # PyPI stats: 245 (7.56%) better + rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) + + # the 'dev-rNNN' tag is a dev tag + rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) + + # clean the - when used as a pre delimiter + rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) + + # a terminal "dev" or "devel" can be changed into ".dev0" + rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) + + # a terminal "dev" can be changed into ".dev0" + rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) + + # a terminal "final" or "stable" can be removed + rs = re.sub(r"(final|stable)$", "", rs) + + # The 'r' and the '-' tags are post release tags + # 0.4a1.r10 -> 0.4a1.post10 + # 0.9.33-17222 -> 0.9.33.post17222 + # 0.9.33-r17222 -> 0.9.33.post17222 + rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) + + # Clean 'r' instead of 'dev' usage: + # 0.9.33+r17222 -> 0.9.33.dev17222 + # 1.0dev123 -> 1.0.dev123 + # 1.0.git123 -> 1.0.dev123 + # 1.0.bzr123 -> 1.0.dev123 + # 0.1a0dev.123 -> 0.1a0.dev123 + # PyPI stats: ~150 (~4%) better + rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) + + # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: + # 0.2.pre1 -> 0.2c1 + # 0.2-c1 -> 0.2c1 + # 1.0preview123 -> 1.0c123 + # PyPI stats: ~21 (0.62%) better + rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) + + # Tcl/Tk uses "px" for their post release markers + rs = re.sub(r"p(\d+)$", r".post\1", rs) + + try: + _normalized_key(rs) + except UnsupportedVersionError: + rs = None + return rs + +# +# Legacy version processing (distribute-compatible) +# + +_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) +_VERSION_REPLACE = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + '': None, + '.': None, +} + + +def _legacy_key(s): + def get_parts(s): + result = [] + for p in _VERSION_PART.split(s.lower()): + p = _VERSION_REPLACE.get(p, p) + if p: + if '0' <= p[:1] <= '9': + p = p.zfill(8) + else: + p = '*' + p + result.append(p) + result.append('*final') + return result + + result = [] + for p in get_parts(s): + if p.startswith('*'): + if p < '*final': + while result and result[-1] == '*final-': + result.pop() + while result and result[-1] == '00000000': + result.pop() + result.append(p) + return tuple(result) + + +class LegacyVersion(Version): + def parse(self, s): + return _legacy_key(s) + + @property + def is_prerelease(self): + result = False + for x in self._parts: + if (isinstance(x, string_types) and x.startswith('*') and + x < '*final'): + result = True + break + return result + + +class LegacyMatcher(Matcher): + version_class = LegacyVersion + + _operators = dict(Matcher._operators) + _operators['~='] = '_match_compatible' + + numeric_re = re.compile('^(\d+(\.\d+)*)') + + def _match_compatible(self, version, constraint, prefix): + if version < constraint: + return False + m = self.numeric_re.match(str(constraint)) + if not m: + logger.warning('Cannot compute compatible match for version %s ' + ' and constraint %s', version, constraint) + return True + s = m.groups()[0] + if '.' in s: + s = s.rsplit('.', 1)[0] + return _match_prefix(version, s) + +# +# Semantic versioning +# + +_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' + r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' + r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) + + +def is_semver(s): + return _SEMVER_RE.match(s) + + +def _semantic_key(s): + def make_tuple(s, absent): + if s is None: + result = (absent,) + else: + parts = s[1:].split('.') + # We can't compare ints and strings on Python 3, so fudge it + # by zero-filling numeric values so simulate a numeric comparison + result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) + return result + + m = is_semver(s) + if not m: + raise UnsupportedVersionError(s) + groups = m.groups() + major, minor, patch = [int(i) for i in groups[:3]] + # choose the '|' and '*' so that versions sort correctly + pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') + return (major, minor, patch), pre, build + + +class SemanticVersion(Version): + def parse(self, s): + return _semantic_key(s) + + @property + def is_prerelease(self): + return self._parts[1][0] != '|' + + +class SemanticMatcher(Matcher): + version_class = SemanticVersion + + +class VersionScheme(object): + def __init__(self, key, matcher, suggester=None): + self.key = key + self.matcher = matcher + self.suggester = suggester + + def is_valid_version(self, s): + try: + self.matcher.version_class(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_matcher(self, s): + try: + self.matcher(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_constraint_list(self, s): + """ + Used for processing some metadata fields + """ + return self.is_valid_matcher('dummy_name (%s)' % s) + + def suggest(self, s): + if self.suggester is None: + result = None + else: + result = self.suggester(s) + return result + +_SCHEMES = { + 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, + _suggest_normalized_version), + 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), + 'semantic': VersionScheme(_semantic_key, SemanticMatcher, + _suggest_semantic_version), +} + +_SCHEMES['default'] = _SCHEMES['normalized'] + + +def get_scheme(name): + if name not in _SCHEMES: + raise ValueError('unknown scheme name: %r' % name) + return _SCHEMES[name] diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/wheel.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/wheel.py new file mode 100644 index 0000000..2952b8e --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distlib/wheel.py @@ -0,0 +1,978 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2016 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import base64 +import codecs +import datetime +import distutils.util +from email import message_from_file +import hashlib +import imp +import json +import logging +import os +import posixpath +import re +import shutil +import sys +import tempfile +import zipfile + +from . import __version__, DistlibException +from .compat import sysconfig, ZipFile, fsdecode, text_type, filter +from .database import InstalledDistribution +from .metadata import Metadata, METADATA_FILENAME +from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, + cached_property, get_cache_base, read_exports, tempdir) +from .version import NormalizedVersion, UnsupportedVersionError + +logger = logging.getLogger(__name__) + +cache = None # created when needed + +if hasattr(sys, 'pypy_version_info'): + IMP_PREFIX = 'pp' +elif sys.platform.startswith('java'): + IMP_PREFIX = 'jy' +elif sys.platform == 'cli': + IMP_PREFIX = 'ip' +else: + IMP_PREFIX = 'cp' + +VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') +if not VER_SUFFIX: # pragma: no cover + VER_SUFFIX = '%s%s' % sys.version_info[:2] +PYVER = 'py' + VER_SUFFIX +IMPVER = IMP_PREFIX + VER_SUFFIX + +ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') + +ABI = sysconfig.get_config_var('SOABI') +if ABI and ABI.startswith('cpython-'): + ABI = ABI.replace('cpython-', 'cp') +else: + def _derive_abi(): + parts = ['cp', VER_SUFFIX] + if sysconfig.get_config_var('Py_DEBUG'): + parts.append('d') + if sysconfig.get_config_var('WITH_PYMALLOC'): + parts.append('m') + if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: + parts.append('u') + return ''.join(parts) + ABI = _derive_abi() + del _derive_abi + +FILENAME_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))? +-(?P\w+\d+(\.\w+\d+)*) +-(?P\w+) +-(?P\w+(\.\w+)*) +\.whl$ +''', re.IGNORECASE | re.VERBOSE) + +NAME_VERSION_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))?$ +''', re.IGNORECASE | re.VERBOSE) + +SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') +SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') +SHEBANG_PYTHON = b'#!python' +SHEBANG_PYTHONW = b'#!pythonw' + +if os.sep == '/': + to_posix = lambda o: o +else: + to_posix = lambda o: o.replace(os.sep, '/') + + +class Mounter(object): + def __init__(self): + self.impure_wheels = {} + self.libs = {} + + def add(self, pathname, extensions): + self.impure_wheels[pathname] = extensions + self.libs.update(extensions) + + def remove(self, pathname): + extensions = self.impure_wheels.pop(pathname) + for k, v in extensions: + if k in self.libs: + del self.libs[k] + + def find_module(self, fullname, path=None): + if fullname in self.libs: + result = self + else: + result = None + return result + + def load_module(self, fullname): + if fullname in sys.modules: + result = sys.modules[fullname] + else: + if fullname not in self.libs: + raise ImportError('unable to find extension for %s' % fullname) + result = imp.load_dynamic(fullname, self.libs[fullname]) + result.__loader__ = self + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] + return result + +_hook = Mounter() + + +class Wheel(object): + """ + Class to build and install from Wheel files (PEP 427). + """ + + wheel_version = (1, 1) + hash_kind = 'sha256' + + def __init__(self, filename=None, sign=False, verify=False): + """ + Initialise an instance using a (valid) filename. + """ + self.sign = sign + self.should_verify = verify + self.buildver = '' + self.pyver = [PYVER] + self.abi = ['none'] + self.arch = ['any'] + self.dirname = os.getcwd() + if filename is None: + self.name = 'dummy' + self.version = '0.1' + self._filename = self.filename + else: + m = NAME_VERSION_RE.match(filename) + if m: + info = m.groupdict('') + self.name = info['nm'] + # Reinstate the local version separator + self.version = info['vn'].replace('_', '-') + self.buildver = info['bn'] + self._filename = self.filename + else: + dirname, filename = os.path.split(filename) + m = FILENAME_RE.match(filename) + if not m: + raise DistlibException('Invalid name or ' + 'filename: %r' % filename) + if dirname: + self.dirname = os.path.abspath(dirname) + self._filename = filename + info = m.groupdict('') + self.name = info['nm'] + self.version = info['vn'] + self.buildver = info['bn'] + self.pyver = info['py'].split('.') + self.abi = info['bi'].split('.') + self.arch = info['ar'].split('.') + + @property + def filename(self): + """ + Build and return a filename from the various components. + """ + if self.buildver: + buildver = '-' + self.buildver + else: + buildver = '' + pyver = '.'.join(self.pyver) + abi = '.'.join(self.abi) + arch = '.'.join(self.arch) + # replace - with _ as a local version separator + version = self.version.replace('-', '_') + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, + pyver, abi, arch) + + @property + def exists(self): + path = os.path.join(self.dirname, self.filename) + return os.path.isfile(path) + + @property + def tags(self): + for pyver in self.pyver: + for abi in self.abi: + for arch in self.arch: + yield pyver, abi, arch + + @cached_property + def metadata(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + wrapper = codecs.getreader('utf-8') + with ZipFile(pathname, 'r') as zf: + wheel_metadata = self.get_wheel_metadata(zf) + wv = wheel_metadata['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if file_version < (1, 1): + fn = 'METADATA' + else: + fn = METADATA_FILENAME + try: + metadata_filename = posixpath.join(info_dir, fn) + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + except KeyError: + raise ValueError('Invalid wheel, because %s is ' + 'missing' % fn) + return result + + def get_wheel_metadata(self, zf): + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + metadata_filename = posixpath.join(info_dir, 'WHEEL') + with zf.open(metadata_filename) as bf: + wf = codecs.getreader('utf-8')(bf) + message = message_from_file(wf) + return dict(message) + + @cached_property + def info(self): + pathname = os.path.join(self.dirname, self.filename) + with ZipFile(pathname, 'r') as zf: + result = self.get_wheel_metadata(zf) + return result + + def process_shebang(self, data): + m = SHEBANG_RE.match(data) + if m: + end = m.end() + shebang, data_after_shebang = data[:end], data[end:] + # Preserve any arguments after the interpreter + if b'pythonw' in shebang.lower(): + shebang_python = SHEBANG_PYTHONW + else: + shebang_python = SHEBANG_PYTHON + m = SHEBANG_DETAIL_RE.match(shebang) + if m: + args = b' ' + m.groups()[-1] + else: + args = b'' + shebang = shebang_python + args + data = shebang + data_after_shebang + else: + cr = data.find(b'\r') + lf = data.find(b'\n') + if cr < 0 or cr > lf: + term = b'\n' + else: + if data[cr:cr + 2] == b'\r\n': + term = b'\r\n' + else: + term = b'\r' + data = SHEBANG_PYTHON + term + data + return data + + def get_hash(self, data, hash_kind=None): + if hash_kind is None: + hash_kind = self.hash_kind + try: + hasher = getattr(hashlib, hash_kind) + except AttributeError: + raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + result = hasher(data).digest() + result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') + return hash_kind, result + + def write_record(self, records, record_path, base): + records = list(records) # make a copy for sorting + p = to_posix(os.path.relpath(record_path, base)) + records.append((p, '', '')) + records.sort() + with CSVWriter(record_path) as writer: + for row in records: + writer.writerow(row) + + def write_records(self, info, libdir, archive_paths): + records = [] + distinfo, info_dir = info + hasher = getattr(hashlib, self.hash_kind) + for ap, p in archive_paths: + with open(p, 'rb') as f: + data = f.read() + digest = '%s=%s' % self.get_hash(data) + size = os.path.getsize(p) + records.append((ap, digest, size)) + + p = os.path.join(distinfo, 'RECORD') + self.write_record(records, p, libdir) + ap = to_posix(os.path.join(info_dir, 'RECORD')) + archive_paths.append((ap, p)) + + def build_zip(self, pathname, archive_paths): + with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: + for ap, p in archive_paths: + logger.debug('Wrote %s to %s in wheel', p, ap) + zf.write(p, ap) + + def build(self, paths, tags=None, wheel_version=None): + """ + Build a wheel from files in specified paths, and use any specified tags + when determining the name of the wheel. + """ + if tags is None: + tags = {} + + libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] + if libkey == 'platlib': + is_pure = 'false' + default_pyver = [IMPVER] + default_abi = [ABI] + default_arch = [ARCH] + else: + is_pure = 'true' + default_pyver = [PYVER] + default_abi = ['none'] + default_arch = ['any'] + + self.pyver = tags.get('pyver', default_pyver) + self.abi = tags.get('abi', default_abi) + self.arch = tags.get('arch', default_arch) + + libdir = paths[libkey] + + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + archive_paths = [] + + # First, stuff which is not in site-packages + for key in ('data', 'headers', 'scripts'): + if key not in paths: + continue + path = paths[key] + if os.path.isdir(path): + for root, dirs, files in os.walk(path): + for fn in files: + p = fsdecode(os.path.join(root, fn)) + rp = os.path.relpath(p, path) + ap = to_posix(os.path.join(data_dir, key, rp)) + archive_paths.append((ap, p)) + if key == 'scripts' and not p.endswith('.exe'): + with open(p, 'rb') as f: + data = f.read() + data = self.process_shebang(data) + with open(p, 'wb') as f: + f.write(data) + + # Now, stuff which is in site-packages, other than the + # distinfo stuff. + path = libdir + distinfo = None + for root, dirs, files in os.walk(path): + if root == path: + # At the top level only, save distinfo for later + # and skip it for now + for i, dn in enumerate(dirs): + dn = fsdecode(dn) + if dn.endswith('.dist-info'): + distinfo = os.path.join(root, dn) + del dirs[i] + break + assert distinfo, '.dist-info directory expected, not found' + + for fn in files: + # comment out next suite to leave .pyc files in + if fsdecode(fn).endswith(('.pyc', '.pyo')): + continue + p = os.path.join(root, fn) + rp = to_posix(os.path.relpath(p, path)) + archive_paths.append((rp, p)) + + # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. + files = os.listdir(distinfo) + for fn in files: + if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): + p = fsdecode(os.path.join(distinfo, fn)) + ap = to_posix(os.path.join(info_dir, fn)) + archive_paths.append((ap, p)) + + wheel_metadata = [ + 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), + 'Generator: distlib %s' % __version__, + 'Root-Is-Purelib: %s' % is_pure, + ] + for pyver, abi, arch in self.tags: + wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) + p = os.path.join(distinfo, 'WHEEL') + with open(p, 'w') as f: + f.write('\n'.join(wheel_metadata)) + ap = to_posix(os.path.join(info_dir, 'WHEEL')) + archive_paths.append((ap, p)) + + # Now, at last, RECORD. + # Paths in here are archive paths - nothing else makes sense. + self.write_records((distinfo, info_dir), libdir, archive_paths) + # Now, ready to build the zip file + pathname = os.path.join(self.dirname, self.filename) + self.build_zip(pathname, archive_paths) + return pathname + + def install(self, paths, maker, **kwargs): + """ + Install a wheel to the specified paths. If kwarg ``warner`` is + specified, it should be a callable, which will be called with two + tuples indicating the wheel version of this software and the wheel + version in the file, if there is a discrepancy in the versions. + This can be used to issue any warnings to raise any exceptions. + If kwarg ``lib_only`` is True, only the purelib/platlib files are + installed, and the headers, scripts, data and dist-info metadata are + not written. + + The return value is a :class:`InstalledDistribution` instance unless + ``options.lib_only`` is True, in which case the return value is ``None``. + """ + + dry_run = maker.dry_run + warner = kwargs.get('warner') + lib_only = kwargs.get('lib_only', False) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if (file_version != self.wheel_version) and warner: + warner(self.wheel_version, file_version) + + if message['Root-Is-Purelib'] == 'true': + libdir = paths['purelib'] + else: + libdir = paths['platlib'] + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') + script_pfx = posixpath.join(data_dir, 'scripts', '') + + # make a new instance rather than a copy of maker's, + # as we mutate it + fileop = FileOperator(dry_run=dry_run) + fileop.record = True # so we can rollback if needed + + bc = not sys.dont_write_bytecode # Double negatives. Lovely! + + outfiles = [] # for RECORD writing + + # for script copying/shebang processing + workdir = tempfile.mkdtemp() + # set target dir later + # we default add_launchers to False, as the + # Python Launcher should be used instead + maker.source_dir = workdir + maker.target_dir = None + try: + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + if u_arcname.endswith('/RECORD.jws'): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue + is_script = (u_arcname.startswith(script_pfx) + and not u_arcname.endswith('.exe')) + + if u_arcname.startswith(data_pfx): + _, where, rp = u_arcname.split('/', 2) + outfile = os.path.join(paths[where], convert_path(rp)) + else: + # meant for site-packages. + if u_arcname in (wheel_metadata_name, record_name): + continue + outfile = os.path.join(libdir, convert_path(u_arcname)) + if not is_script: + with zf.open(arcname) as bf: + fileop.copy_stream(bf, outfile) + outfiles.append(outfile) + # Double check the digest of the written file + if not dry_run and row[1]: + with open(outfile, 'rb') as bf: + data = bf.read() + _, newdigest = self.get_hash(data, kind) + if newdigest != digest: + raise DistlibException('digest mismatch ' + 'on write for ' + '%s' % outfile) + if bc and outfile.endswith('.py'): + try: + pyc = fileop.byte_compile(outfile) + outfiles.append(pyc) + except Exception: + # Don't give up if byte-compilation fails, + # but log it and perhaps warn the user + logger.warning('Byte-compilation failed', + exc_info=True) + else: + fn = os.path.basename(convert_path(arcname)) + workname = os.path.join(workdir, fn) + with zf.open(arcname) as bf: + fileop.copy_stream(bf, workname) + + dn, fn = os.path.split(outfile) + maker.target_dir = dn + filenames = maker.make(fn) + fileop.set_executable_mode(filenames) + outfiles.extend(filenames) + + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + # Generate scripts + + # Try to get pydist.json so we can see if there are + # any commands to generate. If this fails (e.g. because + # of a legacy wheel), log a warning but don't give up. + commands = None + file_version = self.info['Wheel-Version'] + if file_version == '1.0': + # Use legacy info + ep = posixpath.join(info_dir, 'entry_points.txt') + try: + with zf.open(ep) as bwf: + epdata = read_exports(bwf) + commands = {} + for key in ('console', 'gui'): + k = '%s_scripts' % key + if k in epdata: + commands['wrap_%s' % key] = d = {} + for v in epdata[k].values(): + s = '%s:%s' % (v.prefix, v.suffix) + if v.flags: + s += ' %s' % v.flags + d[v.name] = s + except Exception: + logger.warning('Unable to read legacy script ' + 'metadata, so cannot generate ' + 'scripts') + else: + try: + with zf.open(metadata_name) as bwf: + wf = wrapper(bwf) + commands = json.load(wf).get('extensions') + if commands: + commands = commands.get('python.commands') + except Exception: + logger.warning('Unable to read JSON metadata, so ' + 'cannot generate scripts') + if commands: + console_scripts = commands.get('wrap_console', {}) + gui_scripts = commands.get('wrap_gui', {}) + if console_scripts or gui_scripts: + script_dir = paths.get('scripts', '') + if not os.path.isdir(script_dir): + raise ValueError('Valid script path not ' + 'specified') + maker.target_dir = script_dir + for k, v in console_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script) + fileop.set_executable_mode(filenames) + + if gui_scripts: + options = {'gui': True } + for k, v in gui_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script, options) + fileop.set_executable_mode(filenames) + + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], + dry_run) + return dist + except Exception: # pragma: no cover + logger.exception('installation failed.') + fileop.rollback() + raise + finally: + shutil.rmtree(workdir) + + def _get_dylib_cache(self): + global cache + if cache is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('dylib-cache'), + sys.version[:3]) + cache = Cache(base) + return cache + + def _get_extensions(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + arcname = posixpath.join(info_dir, 'EXTENSIONS') + wrapper = codecs.getreader('utf-8') + result = [] + with ZipFile(pathname, 'r') as zf: + try: + with zf.open(arcname) as bf: + wf = wrapper(bf) + extensions = json.load(wf) + cache = self._get_dylib_cache() + prefix = cache.prefix_to_dir(pathname) + cache_base = os.path.join(cache.base, prefix) + if not os.path.isdir(cache_base): + os.makedirs(cache_base) + for name, relpath in extensions.items(): + dest = os.path.join(cache_base, convert_path(relpath)) + if not os.path.exists(dest): + extract = True + else: + file_time = os.stat(dest).st_mtime + file_time = datetime.datetime.fromtimestamp(file_time) + info = zf.getinfo(relpath) + wheel_time = datetime.datetime(*info.date_time) + extract = wheel_time > file_time + if extract: + zf.extract(relpath, cache_base) + result.append((name, dest)) + except KeyError: + pass + return result + + def is_compatible(self): + """ + Determine if a wheel is compatible with the running system. + """ + return is_compatible(self) + + def is_mountable(self): + """ + Determine if a wheel is asserted as mountable by its metadata. + """ + return True # for now - metadata details TBD + + def mount(self, append=False): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if not self.is_compatible(): + msg = 'Wheel %s not compatible with this Python.' % pathname + raise DistlibException(msg) + if not self.is_mountable(): + msg = 'Wheel %s is marked as not mountable.' % pathname + raise DistlibException(msg) + if pathname in sys.path: + logger.debug('%s already in path', pathname) + else: + if append: + sys.path.append(pathname) + else: + sys.path.insert(0, pathname) + extensions = self._get_extensions() + if extensions: + if _hook not in sys.meta_path: + sys.meta_path.append(_hook) + _hook.add(pathname, extensions) + + def unmount(self): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if pathname not in sys.path: + logger.debug('%s not in path', pathname) + else: + sys.path.remove(pathname) + if pathname in _hook.impure_wheels: + _hook.remove(pathname) + if not _hook.impure_wheels: + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + def verify(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # TODO version verification + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + if u_arcname.endswith('/RECORD.jws'): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + def update(self, modifier, dest_dir=None, **kwargs): + """ + Update the contents of a wheel in a generic way. The modifier should + be a callable which expects a dictionary argument: its keys are + archive-entry paths, and its values are absolute filesystem paths + where the contents the corresponding archive entries can be found. The + modifier is free to change the contents of the files pointed to, add + new entries and remove entries, before returning. This method will + extract the entire contents of the wheel to a temporary location, call + the modifier, and then use the passed (and possibly updated) + dictionary to write a new wheel. If ``dest_dir`` is specified, the new + wheel is written there -- otherwise, the original wheel is overwritten. + + The modifier should return True if it updated the wheel, else False. + This method returns the same value the modifier returns. + """ + + def get_version(path_map, info_dir): + version = path = None + key = '%s/%s' % (info_dir, METADATA_FILENAME) + if key not in path_map: + key = '%s/PKG-INFO' % info_dir + if key in path_map: + path = path_map[key] + version = Metadata(path=path).version + return version, path + + def update_version(version, path): + updated = None + try: + v = NormalizedVersion(version) + i = version.find('-') + if i < 0: + updated = '%s+1' % version + else: + parts = [int(s) for s in version[i + 1:].split('.')] + parts[-1] += 1 + updated = '%s+%s' % (version[:i], + '.'.join(str(i) for i in parts)) + except UnsupportedVersionError: + logger.debug('Cannot update non-compliant (PEP-440) ' + 'version %r', version) + if updated: + md = Metadata(path=path) + md.version = updated + legacy = not path.endswith(METADATA_FILENAME) + md.write(path=path, legacy=legacy) + logger.debug('Version updated from %r to %r', version, + updated) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + record_name = posixpath.join(info_dir, 'RECORD') + with tempdir() as workdir: + with ZipFile(pathname, 'r') as zf: + path_map = {} + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if u_arcname == record_name: + continue + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + zf.extract(zinfo, workdir) + path = os.path.join(workdir, convert_path(u_arcname)) + path_map[u_arcname] = path + + # Remember the version. + original_version, _ = get_version(path_map, info_dir) + # Files extracted. Call the modifier. + modified = modifier(path_map, **kwargs) + if modified: + # Something changed - need to build a new wheel. + current_version, path = get_version(path_map, info_dir) + if current_version and (current_version == original_version): + # Add or update local version to signify changes. + update_version(current_version, path) + # Decide where the new wheel goes. + if dest_dir is None: + fd, newpath = tempfile.mkstemp(suffix='.whl', + prefix='wheel-update-', + dir=workdir) + os.close(fd) + else: + if not os.path.isdir(dest_dir): + raise DistlibException('Not a directory: %r' % dest_dir) + newpath = os.path.join(dest_dir, self.filename) + archive_paths = list(path_map.items()) + distinfo = os.path.join(workdir, info_dir) + info = distinfo, info_dir + self.write_records(info, workdir, archive_paths) + self.build_zip(newpath, archive_paths) + if dest_dir is None: + shutil.copyfile(newpath, pathname) + return modified + +def compatible_tags(): + """ + Return (pyver, abi, arch) tuples compatible with this Python. + """ + versions = [VER_SUFFIX] + major = VER_SUFFIX[0] + for minor in range(sys.version_info[1] - 1, - 1, -1): + versions.append(''.join([major, str(minor)])) + + abis = [] + for suffix, _, _ in imp.get_suffixes(): + if suffix.startswith('.abi'): + abis.append(suffix.split('.', 2)[1]) + abis.sort() + if ABI != 'none': + abis.insert(0, ABI) + abis.append('none') + result = [] + + arches = [ARCH] + if sys.platform == 'darwin': + m = re.match('(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) + if m: + name, major, minor, arch = m.groups() + minor = int(minor) + matches = [arch] + if arch in ('i386', 'ppc'): + matches.append('fat') + if arch in ('i386', 'ppc', 'x86_64'): + matches.append('fat3') + if arch in ('ppc64', 'x86_64'): + matches.append('fat64') + if arch in ('i386', 'x86_64'): + matches.append('intel') + if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + matches.append('universal') + while minor >= 0: + for match in matches: + s = '%s_%s_%s_%s' % (name, major, minor, match) + if s != ARCH: # already there + arches.append(s) + minor -= 1 + + # Most specific - our Python version, ABI and arch + for abi in abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) + + # where no ABI / arch dependency, but IMP_PREFIX dependency + for i, version in enumerate(versions): + result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) + if i == 0: + result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) + + # no IMP_PREFIX, ABI or arch dependency + for i, version in enumerate(versions): + result.append((''.join(('py', version)), 'none', 'any')) + if i == 0: + result.append((''.join(('py', version[0])), 'none', 'any')) + return set(result) + + +COMPATIBLE_TAGS = compatible_tags() + +del compatible_tags + + +def is_compatible(wheel, tags=None): + if not isinstance(wheel, Wheel): + wheel = Wheel(wheel) # assume it's a filename + result = False + if tags is None: + tags = COMPATIBLE_TAGS + for ver, abi, arch in tags: + if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: + result = True + break + return result diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/distro.py b/RBXLegacyDiscordBot/lib/pip/_vendor/distro.py new file mode 100644 index 0000000..9e7daad --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/distro.py @@ -0,0 +1,1081 @@ +# Copyright 2015,2016 Nir Cohen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The ``distro`` package (``distro`` stands for Linux Distribution) provides +information about the Linux distribution it runs on, such as a reliable +machine-readable distro ID, or version information. + +It is a renewed alternative implementation for Python's original +:py:func:`platform.linux_distribution` function, but it provides much more +functionality. An alternative implementation became necessary because Python +3.5 deprecated this function, and Python 3.7 is expected to remove it +altogether. Its predecessor function :py:func:`platform.dist` was already +deprecated since Python 2.6 and is also expected to be removed in Python 3.7. +Still, there are many cases in which access to Linux distribution information +is needed. See `Python issue 1322 `_ for +more information. +""" + +import os +import re +import sys +import json +import shlex +import logging +import subprocess + + +if not sys.platform.startswith('linux'): + raise ImportError('Unsupported platform: {0}'.format(sys.platform)) + +_UNIXCONFDIR = '/etc' +_OS_RELEASE_BASENAME = 'os-release' + +#: Translation table for normalizing the "ID" attribute defined in os-release +#: files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as defined in the os-release file, translated to lower case, +#: with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_OS_ID = {} + +#: Translation table for normalizing the "Distributor ID" attribute returned by +#: the lsb_release command, for use by the :func:`distro.id` method. +#: +#: * Key: Value as returned by the lsb_release command, translated to lower +#: case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_LSB_ID = { + 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux + 'redhatenterpriseworkstation': 'rhel', # RHEL 6.7 +} + +#: Translation table for normalizing the distro ID derived from the file name +#: of distro release files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as derived from the file name of a distro release file, +#: translated to lower case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_DISTRO_ID = { + 'redhat': 'rhel', # RHEL 6.x, 7.x +} + +# Pattern for content of distro release file (reversed) +_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( + r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)') + +# Pattern for base file name of distro release file +_DISTRO_RELEASE_BASENAME_PATTERN = re.compile( + r'(\w+)[-_](release|version)$') + +# Base file names to be ignored when searching for distro release file +_DISTRO_RELEASE_IGNORE_BASENAMES = ( + 'debian_version', + 'lsb-release', + 'oem-release', + _OS_RELEASE_BASENAME, + 'system-release' +) + + +def linux_distribution(full_distribution_name=True): + """ + Return information about the current Linux distribution as a tuple + ``(id_name, version, codename)`` with items as follows: + + * ``id_name``: If *full_distribution_name* is false, the result of + :func:`distro.id`. Otherwise, the result of :func:`distro.name`. + + * ``version``: The result of :func:`distro.version`. + + * ``codename``: The result of :func:`distro.codename`. + + The interface of this function is compatible with the original + :py:func:`platform.linux_distribution` function, supporting a subset of + its parameters. + + The data it returns may not exactly be the same, because it uses more data + sources than the original function, and that may lead to different data if + the Linux distribution is not consistent across multiple data sources it + provides (there are indeed such distributions ...). + + Another reason for differences is the fact that the :func:`distro.id` + method normalizes the distro ID string to a reliable machine-readable value + for a number of popular Linux distributions. + """ + return _distro.linux_distribution(full_distribution_name) + + +def id(): + """ + Return the distro ID of the current Linux distribution, as a + machine-readable string. + + For a number of Linux distributions, the returned distro ID value is + *reliable*, in the sense that it is documented and that it does not change + across releases of the distribution. + + This package maintains the following reliable distro ID values: + + ============== ========================================= + Distro ID Distribution + ============== ========================================= + "ubuntu" Ubuntu + "debian" Debian + "rhel" RedHat Enterprise Linux + "centos" CentOS + "fedora" Fedora + "sles" SUSE Linux Enterprise Server + "opensuse" openSUSE + "amazon" Amazon Linux + "arch" Arch Linux + "cloudlinux" CloudLinux OS + "exherbo" Exherbo Linux + "gentoo" GenToo Linux + "ibm_powerkvm" IBM PowerKVM + "kvmibm" KVM for IBM z Systems + "linuxmint" Linux Mint + "mageia" Mageia + "mandriva" Mandriva Linux + "parallels" Parallels + "pidora" Pidora + "raspbian" Raspbian + "oracle" Oracle Linux (and Oracle Enterprise Linux) + "scientific" Scientific Linux + "slackware" Slackware + "xenserver" XenServer + ============== ========================================= + + If you have a need to get distros for reliable IDs added into this set, + or if you find that the :func:`distro.id` function returns a different + distro ID for one of the listed distros, please create an issue in the + `distro issue tracker`_. + + **Lookup hierarchy and transformations:** + + First, the ID is obtained from the following sources, in the specified + order. The first available and non-empty value is used: + + * the value of the "ID" attribute of the os-release file, + + * the value of the "Distributor ID" attribute returned by the lsb_release + command, + + * the first part of the file name of the distro release file, + + The so determined ID value then passes the following transformations, + before it is returned by this method: + + * it is translated to lower case, + + * blanks (which should not be there anyway) are translated to underscores, + + * a normalization of the ID is performed, based upon + `normalization tables`_. The purpose of this normalization is to ensure + that the ID is as reliable as possible, even across incompatible changes + in the Linux distributions. A common reason for an incompatible change is + the addition of an os-release file, or the addition of the lsb_release + command, with ID values that differ from what was previously determined + from the distro release file name. + """ + return _distro.id() + + +def name(pretty=False): + """ + Return the name of the current Linux distribution, as a human-readable + string. + + If *pretty* is false, the name is returned without version or codename. + (e.g. "CentOS Linux") + + If *pretty* is true, the version and codename are appended. + (e.g. "CentOS Linux 7.1.1503 (Core)") + + **Lookup hierarchy:** + + The name is obtained from the following sources, in the specified order. + The first available and non-empty value is used: + + * If *pretty* is false: + + - the value of the "NAME" attribute of the os-release file, + + - the value of the "Distributor ID" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file. + + * If *pretty* is true: + + - the value of the "PRETTY_NAME" attribute of the os-release file, + + - the value of the "Description" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file, appended + with the value of the pretty version ("" and "" + fields) of the distro release file, if available. + """ + return _distro.name(pretty) + + +def version(pretty=False, best=False): + """ + Return the version of the current Linux distribution, as a human-readable + string. + + If *pretty* is false, the version is returned without codename (e.g. + "7.0"). + + If *pretty* is true, the codename in parenthesis is appended, if the + codename is non-empty (e.g. "7.0 (Maipo)"). + + Some distributions provide version numbers with different precisions in + the different sources of distribution information. Examining the different + sources in a fixed priority order does not always yield the most precise + version (e.g. for Debian 8.2, or CentOS 7.1). + + The *best* parameter can be used to control the approach for the returned + version: + + If *best* is false, the first non-empty version number in priority order of + the examined sources is returned. + + If *best* is true, the most precise version number out of all examined + sources is returned. + + **Lookup hierarchy:** + + In all cases, the version number is obtained from the following sources. + If *best* is false, this order represents the priority order: + + * the value of the "VERSION_ID" attribute of the os-release file, + * the value of the "Release" attribute returned by the lsb_release + command, + * the version number parsed from the "" field of the first line + of the distro release file, + * the version number parsed from the "PRETTY_NAME" attribute of the + os-release file, if it follows the format of the distro release files. + * the version number parsed from the "Description" attribute returned by + the lsb_release command, if it follows the format of the distro release + files. + """ + return _distro.version(pretty, best) + + +def version_parts(best=False): + """ + Return the version of the current Linux distribution as a tuple + ``(major, minor, build_number)`` with items as follows: + + * ``major``: The result of :func:`distro.major_version`. + + * ``minor``: The result of :func:`distro.minor_version`. + + * ``build_number``: The result of :func:`distro.build_number`. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.version_parts(best) + + +def major_version(best=False): + """ + Return the major version of the current Linux distribution, as a string, + if provided. + Otherwise, the empty string is returned. The major version is the first + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.major_version(best) + + +def minor_version(best=False): + """ + Return the minor version of the current Linux distribution, as a string, + if provided. + Otherwise, the empty string is returned. The minor version is the second + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.minor_version(best) + + +def build_number(best=False): + """ + Return the build number of the current Linux distribution, as a string, + if provided. + Otherwise, the empty string is returned. The build number is the third part + of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.build_number(best) + + +def like(): + """ + Return a space-separated list of distro IDs of distributions that are + closely related to the current Linux distribution in regards to packaging + and programming interfaces, for example distributions the current + distribution is a derivative from. + + **Lookup hierarchy:** + + This information item is only provided by the os-release file. + For details, see the description of the "ID_LIKE" attribute in the + `os-release man page + `_. + """ + return _distro.like() + + +def codename(): + """ + Return the codename for the release of the current Linux distribution, + as a string. + + If the distribution does not have a codename, an empty string is returned. + + Note that the returned codename is not always really a codename. For + example, openSUSE returns "x86_64". This function does not handle such + cases in any special way and just returns the string it finds, if any. + + **Lookup hierarchy:** + + * the codename within the "VERSION" attribute of the os-release file, if + provided, + + * the value of the "Codename" attribute returned by the lsb_release + command, + + * the value of the "" field of the distro release file. + """ + return _distro.codename() + + +def info(pretty=False, best=False): + """ + Return certain machine-readable information items about the current Linux + distribution in a dictionary, as shown in the following example: + + .. sourcecode:: python + + { + 'id': 'rhel', + 'version': '7.0', + 'version_parts': { + 'major': '7', + 'minor': '0', + 'build_number': '' + }, + 'like': 'fedora', + 'codename': 'Maipo' + } + + The dictionary structure and keys are always the same, regardless of which + information items are available in the underlying data sources. The values + for the various keys are as follows: + + * ``id``: The result of :func:`distro.id`. + + * ``version``: The result of :func:`distro.version`. + + * ``version_parts -> major``: The result of :func:`distro.major_version`. + + * ``version_parts -> minor``: The result of :func:`distro.minor_version`. + + * ``version_parts -> build_number``: The result of + :func:`distro.build_number`. + + * ``like``: The result of :func:`distro.like`. + + * ``codename``: The result of :func:`distro.codename`. + + For a description of the *pretty* and *best* parameters, see the + :func:`distro.version` method. + """ + return _distro.info(pretty, best) + + +def os_release_info(): + """ + Return a dictionary containing key-value pairs for the information items + from the os-release file data source of the current Linux distribution. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_info() + + +def lsb_release_info(): + """ + Return a dictionary containing key-value pairs for the information items + from the lsb_release command data source of the current Linux distribution. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_info() + + +def distro_release_info(): + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current Linux distribution. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_info() + + +def os_release_attr(attribute): + """ + Return a single named information item from the os-release file data source + of the current Linux distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_attr(attribute) + + +def lsb_release_attr(attribute): + """ + Return a single named information item from the lsb_release command output + data source of the current Linux distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_attr(attribute) + + +def distro_release_attr(attribute): + """ + Return a single named information item from the distro release file + data source of the current Linux distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_attr(attribute) + + +class LinuxDistribution(object): + """ + Provides information about a Linux distribution. + + This package creates a private module-global instance of this class with + default initialization arguments, that is used by the + `consolidated accessor functions`_ and `single source accessor functions`_. + By using default initialization arguments, that module-global instance + returns data about the current Linux distribution (i.e. the distro this + package runs on). + + Normally, it is not necessary to create additional instances of this class. + However, in situations where control is needed over the exact data sources + that are used, instances of this class can be created with a specific + distro release file, or a specific os-release file, or without invoking the + lsb_release command. + """ + + def __init__(self, + include_lsb=True, + os_release_file='', + distro_release_file=''): + """ + The initialization method of this class gathers information from the + available data sources, and stores that in private instance attributes. + Subsequent access to the information items uses these private instance + attributes, so that the data sources are read only once. + + Parameters: + + * ``include_lsb`` (bool): Controls whether the + `lsb_release command output`_ is included as a data source. + + If the lsb_release command is not available in the program execution + path, the data source for the lsb_release command will be empty. + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is to be used as a data source. + + An empty string (the default) will cause the default path name to + be used (see `os-release file`_ for details). + + If the specified or defaulted os-release file does not exist, the + data source for the os-release file will be empty. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is to be used as a data source. + + An empty string (the default) will cause a default search algorithm + to be used (see `distro release file`_ for details). + + If the specified distro release file does not exist, or if no default + distro release file can be found, the data source for the distro + release file will be empty. + + Public instance attributes: + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + Raises: + + * :py:exc:`IOError`: Some I/O issue with an os-release file or distro + release file. + + * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had + some issue (other than not being available in the program execution + path). + + * :py:exc:`UnicodeError`: A data source has unexpected characters or + uses an unexpected encoding. + """ + self.os_release_file = os_release_file or \ + os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME) + self.distro_release_file = distro_release_file or '' # updated later + self._os_release_info = self._get_os_release_info() + self._lsb_release_info = self._get_lsb_release_info() \ + if include_lsb else {} + self._distro_release_info = self._get_distro_release_info() + + def __repr__(self): + """Return repr of all info + """ + return \ + "LinuxDistribution(" \ + "os_release_file={0!r}, " \ + "distro_release_file={1!r}, " \ + "_os_release_info={2!r}, " \ + "_lsb_release_info={3!r}, " \ + "_distro_release_info={4!r})".format( + self.os_release_file, + self.distro_release_file, + self._os_release_info, + self._lsb_release_info, + self._distro_release_info) + + def linux_distribution(self, full_distribution_name=True): + """ + Return information about the Linux distribution that is compatible + with Python's :func:`platform.linux_distribution`, supporting a subset + of its parameters. + + For details, see :func:`distro.linux_distribution`. + """ + return ( + self.name() if full_distribution_name else self.id(), + self.version(), + self.codename() + ) + + def id(self): + """Return the distro ID of the Linux distribution, as a string. + + For details, see :func:`distro.id`. + """ + def normalize(distro_id, table): + distro_id = distro_id.lower().replace(' ', '_') + return table.get(distro_id, distro_id) + + distro_id = self.os_release_attr('id') + if distro_id: + return normalize(distro_id, NORMALIZED_OS_ID) + + distro_id = self.lsb_release_attr('distributor_id') + if distro_id: + return normalize(distro_id, NORMALIZED_LSB_ID) + + distro_id = self.distro_release_attr('id') + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + return '' + + def name(self, pretty=False): + """ + Return the name of the Linux distribution, as a string. + + For details, see :func:`distro.name`. + """ + name = self.os_release_attr('name') \ + or self.lsb_release_attr('distributor_id') \ + or self.distro_release_attr('name') + if pretty: + name = self.os_release_attr('pretty_name') \ + or self.lsb_release_attr('description') + if not name: + name = self.distro_release_attr('name') + version = self.version(pretty=True) + if version: + name = name + ' ' + version + return name or '' + + def version(self, pretty=False, best=False): + """ + Return the version of the Linux distribution, as a string. + + For details, see :func:`distro.version`. + """ + versions = [ + self.os_release_attr('version_id'), + self.lsb_release_attr('release'), + self.distro_release_attr('version_id'), + self._parse_distro_release_content( + self.os_release_attr('pretty_name')).get('version_id', ''), + self._parse_distro_release_content( + self.lsb_release_attr('description')).get('version_id', '') + ] + version = '' + if best: + # This algorithm uses the last version in priority order that has + # the best precision. If the versions are not in conflict, that + # does not matter; otherwise, using the last one instead of the + # first one might be considered a surprise. + for v in versions: + if v.count(".") > version.count(".") or version == '': + version = v + else: + for v in versions: + if v != '': + version = v + break + if pretty and version and self.codename(): + version = u'{0} ({1})'.format(version, self.codename()) + return version + + def version_parts(self, best=False): + """ + Return the version of the Linux distribution, as a tuple of version + numbers. + + For details, see :func:`distro.version_parts`. + """ + version_str = self.version(best=best) + if version_str: + version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?') + matches = version_regex.match(version_str) + if matches: + major, minor, build_number = matches.groups() + return major, minor or '', build_number or '' + return '', '', '' + + def major_version(self, best=False): + """ + Return the major version number of the current distribution. + + For details, see :func:`distro.major_version`. + """ + return self.version_parts(best)[0] + + def minor_version(self, best=False): + """ + Return the minor version number of the Linux distribution. + + For details, see :func:`distro.minor_version`. + """ + return self.version_parts(best)[1] + + def build_number(self, best=False): + """ + Return the build number of the Linux distribution. + + For details, see :func:`distro.build_number`. + """ + return self.version_parts(best)[2] + + def like(self): + """ + Return the IDs of distributions that are like the Linux distribution. + + For details, see :func:`distro.like`. + """ + return self.os_release_attr('id_like') or '' + + def codename(self): + """ + Return the codename of the Linux distribution. + + For details, see :func:`distro.codename`. + """ + return self.os_release_attr('codename') \ + or self.lsb_release_attr('codename') \ + or self.distro_release_attr('codename') \ + or '' + + def info(self, pretty=False, best=False): + """ + Return certain machine-readable information about the Linux + distribution. + + For details, see :func:`distro.info`. + """ + return dict( + id=self.id(), + version=self.version(pretty, best), + version_parts=dict( + major=self.major_version(best), + minor=self.minor_version(best), + build_number=self.build_number(best) + ), + like=self.like(), + codename=self.codename(), + ) + + def os_release_info(self): + """ + Return a dictionary containing key-value pairs for the information + items from the os-release file data source of the Linux distribution. + + For details, see :func:`distro.os_release_info`. + """ + return self._os_release_info + + def lsb_release_info(self): + """ + Return a dictionary containing key-value pairs for the information + items from the lsb_release command data source of the Linux + distribution. + + For details, see :func:`distro.lsb_release_info`. + """ + return self._lsb_release_info + + def distro_release_info(self): + """ + Return a dictionary containing key-value pairs for the information + items from the distro release file data source of the Linux + distribution. + + For details, see :func:`distro.distro_release_info`. + """ + return self._distro_release_info + + def os_release_attr(self, attribute): + """ + Return a single named information item from the os-release file data + source of the Linux distribution. + + For details, see :func:`distro.os_release_attr`. + """ + return self._os_release_info.get(attribute, '') + + def lsb_release_attr(self, attribute): + """ + Return a single named information item from the lsb_release command + output data source of the Linux distribution. + + For details, see :func:`distro.lsb_release_attr`. + """ + return self._lsb_release_info.get(attribute, '') + + def distro_release_attr(self, attribute): + """ + Return a single named information item from the distro release file + data source of the Linux distribution. + + For details, see :func:`distro.distro_release_attr`. + """ + return self._distro_release_info.get(attribute, '') + + def _get_os_release_info(self): + """ + Get the information items from the specified os-release file. + + Returns: + A dictionary containing all information items. + """ + if os.path.isfile(self.os_release_file): + with open(self.os_release_file) as release_file: + return self._parse_os_release_content(release_file) + return {} + + @staticmethod + def _parse_os_release_content(lines): + """ + Parse the lines of an os-release file. + + Parameters: + + * lines: Iterable through the lines in the os-release file. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + lexer = shlex.shlex(lines, posix=True) + lexer.whitespace_split = True + + # The shlex module defines its `wordchars` variable using literals, + # making it dependent on the encoding of the Python source file. + # In Python 2.6 and 2.7, the shlex source file is encoded in + # 'iso-8859-1', and the `wordchars` variable is defined as a byte + # string. This causes a UnicodeDecodeError to be raised when the + # parsed content is a unicode object. The following fix resolves that + # (... but it should be fixed in shlex...): + if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes): + lexer.wordchars = lexer.wordchars.decode('iso-8859-1') + + tokens = list(lexer) + for token in tokens: + # At this point, all shell-like parsing has been done (i.e. + # comments processed, quotes and backslash escape sequences + # processed, multi-line values assembled, trailing newlines + # stripped, etc.), so the tokens are now either: + # * variable assignments: var=value + # * commands or their arguments (not allowed in os-release) + if '=' in token: + k, v = token.split('=', 1) + if isinstance(v, bytes): + v = v.decode('utf-8') + props[k.lower()] = v + if k == 'VERSION': + # this handles cases in which the codename is in + # the `(CODENAME)` (rhel, centos, fedora) format + # or in the `, CODENAME` format (Ubuntu). + codename = re.search(r'(\(\D+\))|,(\s+)?\D+', v) + if codename: + codename = codename.group() + codename = codename.strip('()') + codename = codename.strip(',') + codename = codename.strip() + # codename appears within paranthese. + props['codename'] = codename + else: + props['codename'] = '' + else: + # Ignore any tokens that are not variable assignments + pass + return props + + def _get_lsb_release_info(self): + """ + Get the information items from the lsb_release command output. + + Returns: + A dictionary containing all information items. + """ + cmd = 'lsb_release -a' + process = subprocess.Popen( + cmd, + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + stdout, stderr = stdout.decode('utf-8'), stderr.decode('utf-8') + code = process.returncode + if code == 0: + content = stdout.splitlines() + return self._parse_lsb_release_content(content) + elif code == 127: # Command not found + return {} + else: + if sys.version_info[:2] >= (3, 5): + raise subprocess.CalledProcessError(code, cmd, stdout, stderr) + elif sys.version_info[:2] >= (2, 7): + raise subprocess.CalledProcessError(code, cmd, stdout) + elif sys.version_info[:2] == (2, 6): + raise subprocess.CalledProcessError(code, cmd) + + @staticmethod + def _parse_lsb_release_content(lines): + """ + Parse the output of the lsb_release command. + + Parameters: + + * lines: Iterable through the lines of the lsb_release output. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + for line in lines: + line = line.decode('utf-8') if isinstance(line, bytes) else line + kv = line.strip('\n').split(':', 1) + if len(kv) != 2: + # Ignore lines without colon. + continue + k, v = kv + props.update({k.replace(' ', '_').lower(): v.strip()}) + return props + + def _get_distro_release_info(self): + """ + Get the information items from the specified distro release file. + + Returns: + A dictionary containing all information items. + """ + if self.distro_release_file: + # If it was specified, we use it and parse what we can, even if + # its file name or content does not match the expected pattern. + distro_info = self._parse_distro_release_file( + self.distro_release_file) + basename = os.path.basename(self.distro_release_file) + # The file name pattern for user-specified distro release files + # is somewhat more tolerant (compared to when searching for the + # file), because we want to use what was specified as best as + # possible. + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match: + distro_info['id'] = match.group(1) + return distro_info + else: + basenames = os.listdir(_UNIXCONFDIR) + # We sort for repeatability in cases where there are multiple + # distro specific files; e.g. CentOS, Oracle, Enterprise all + # containing `redhat-release` on top of their own. + basenames.sort() + for basename in basenames: + if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: + continue + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match: + filepath = os.path.join(_UNIXCONFDIR, basename) + distro_info = self._parse_distro_release_file(filepath) + if 'name' in distro_info: + # The name is always present if the pattern matches + self.distro_release_file = filepath + distro_info['id'] = match.group(1) + return distro_info + return {} + + def _parse_distro_release_file(self, filepath): + """ + Parse a distro release file. + + Parameters: + + * filepath: Path name of the distro release file. + + Returns: + A dictionary containing all information items. + """ + if os.path.isfile(filepath): + with open(filepath) as fp: + # Only parse the first line. For instance, on SLES there + # are multiple lines. We don't want them... + return self._parse_distro_release_content(fp.readline()) + return {} + + @staticmethod + def _parse_distro_release_content(line): + """ + Parse a line from a distro release file. + + Parameters: + * line: Line from the distro release file. Must be a unicode string + or a UTF-8 encoded byte string. + + Returns: + A dictionary containing all information items. + """ + if isinstance(line, bytes): + line = line.decode('utf-8') + matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match( + line.strip()[::-1]) + distro_info = {} + if matches: + # regexp ensures non-None + distro_info['name'] = matches.group(3)[::-1] + if matches.group(2): + distro_info['version_id'] = matches.group(2)[::-1] + if matches.group(1): + distro_info['codename'] = matches.group(1)[::-1] + elif line: + distro_info['name'] = line.strip() + return distro_info + + +_distro = LinuxDistribution() + + +def main(): + import argparse + + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.StreamHandler(sys.stdout)) + + parser = argparse.ArgumentParser(description="Linux distro info tool") + parser.add_argument( + '--json', + '-j', + help="Output in machine readable format", + action="store_true") + args = parser.parse_args() + + if args.json: + logger.info(json.dumps(info(), indent=4, sort_keys=True)) + else: + logger.info('Name: %s', name(pretty=True)) + distribution_version = version(pretty=True) + if distribution_version: + logger.info('Version: %s', distribution_version) + distribution_codename = codename() + if distribution_codename: + logger.info('Codename: %s', distribution_codename) + + +if __name__ == '__main__': + main() diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/__init__.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/__init__.py new file mode 100644 index 0000000..7427eb1 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/__init__.py @@ -0,0 +1,25 @@ +""" +HTML parsing library based on the WHATWG "HTML5" +specification. The parser is designed to be compatible with existing +HTML found in the wild and implements well-defined error recovery that +is largely compatible with modern desktop web browsers. + +Example usage: + +import html5lib +f = open("my_document.html") +tree = html5lib.parse(f) +""" + +from __future__ import absolute_import, division, unicode_literals + +from .html5parser import HTMLParser, parse, parseFragment +from .treebuilders import getTreeBuilder +from .treewalkers import getTreeWalker +from .serializer import serialize + +__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", + "getTreeWalker", "serialize"] + +# this has to be at the top level, see how setup.py parses this +__version__ = "1.0b10" diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_ihatexml.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_ihatexml.py new file mode 100644 index 0000000..d6d1d6f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_ihatexml.py @@ -0,0 +1,288 @@ +from __future__ import absolute_import, division, unicode_literals + +import re +import warnings + +from .constants import DataLossWarning + +baseChar = """ +[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] | +[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] | +[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] | +[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 | +[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] | +[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] | +[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] | +[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] | +[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 | +[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] | +[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] | +[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D | +[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] | +[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] | +[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] | +[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] | +[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] | +[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] | +[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 | +[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] | +[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] | +[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] | +[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] | +[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] | +[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] | +[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] | +[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] | +[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] | +[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] | +[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A | +#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 | +#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] | +#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] | +[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] | +[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C | +#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 | +[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] | +[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] | +[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 | +[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] | +[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B | +#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE | +[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] | +[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 | +[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] | +[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]""" + +ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]""" + +combiningCharacter = """ +[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] | +[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 | +[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] | +[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] | +#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] | +[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] | +[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 | +#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] | +[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC | +[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] | +#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] | +[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] | +[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] | +[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] | +[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] | +[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] | +#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 | +[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] | +#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] | +[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] | +[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] | +#x3099 | #x309A""" + +digit = """ +[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] | +[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] | +[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] | +[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]""" + +extender = """ +#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 | +#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]""" + +letter = " | ".join([baseChar, ideographic]) + +# Without the +name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter, + extender]) +nameFirst = " | ".join([letter, "_"]) + +reChar = re.compile(r"#x([\d|A-F]{4,4})") +reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]") + + +def charStringToList(chars): + charRanges = [item.strip() for item in chars.split(" | ")] + rv = [] + for item in charRanges: + foundMatch = False + for regexp in (reChar, reCharRange): + match = regexp.match(item) + if match is not None: + rv.append([hexToInt(item) for item in match.groups()]) + if len(rv[-1]) == 1: + rv[-1] = rv[-1] * 2 + foundMatch = True + break + if not foundMatch: + assert len(item) == 1 + + rv.append([ord(item)] * 2) + rv = normaliseCharList(rv) + return rv + + +def normaliseCharList(charList): + charList = sorted(charList) + for item in charList: + assert item[1] >= item[0] + rv = [] + i = 0 + while i < len(charList): + j = 1 + rv.append(charList[i]) + while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1: + rv[-1][1] = charList[i + j][1] + j += 1 + i += j + return rv + +# We don't really support characters above the BMP :( +max_unicode = int("FFFF", 16) + + +def missingRanges(charList): + rv = [] + if charList[0] != 0: + rv.append([0, charList[0][0] - 1]) + for i, item in enumerate(charList[:-1]): + rv.append([item[1] + 1, charList[i + 1][0] - 1]) + if charList[-1][1] != max_unicode: + rv.append([charList[-1][1] + 1, max_unicode]) + return rv + + +def listToRegexpStr(charList): + rv = [] + for item in charList: + if item[0] == item[1]: + rv.append(escapeRegexp(chr(item[0]))) + else: + rv.append(escapeRegexp(chr(item[0])) + "-" + + escapeRegexp(chr(item[1]))) + return "[%s]" % "".join(rv) + + +def hexToInt(hex_str): + return int(hex_str, 16) + + +def escapeRegexp(string): + specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}", + "[", "]", "|", "(", ")", "-") + for char in specialCharacters: + string = string.replace(char, "\\" + char) + + return string + +# output from the above +nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa + +nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa + +# Simpler things +nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\-\'()+,./:=?;!*#@$_%]") + + +class InfosetFilter(object): + replacementRegexp = re.compile(r"U[\dA-F]{5,5}") + + def __init__(self, + dropXmlnsLocalName=False, + dropXmlnsAttrNs=False, + preventDoubleDashComments=False, + preventDashAtCommentEnd=False, + replaceFormFeedCharacters=True, + preventSingleQuotePubid=False): + + self.dropXmlnsLocalName = dropXmlnsLocalName + self.dropXmlnsAttrNs = dropXmlnsAttrNs + + self.preventDoubleDashComments = preventDoubleDashComments + self.preventDashAtCommentEnd = preventDashAtCommentEnd + + self.replaceFormFeedCharacters = replaceFormFeedCharacters + + self.preventSingleQuotePubid = preventSingleQuotePubid + + self.replaceCache = {} + + def coerceAttribute(self, name, namespace=None): + if self.dropXmlnsLocalName and name.startswith("xmlns:"): + warnings.warn("Attributes cannot begin with xmlns", DataLossWarning) + return None + elif (self.dropXmlnsAttrNs and + namespace == "http://www.w3.org/2000/xmlns/"): + warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning) + return None + else: + return self.toXmlName(name) + + def coerceElement(self, name): + return self.toXmlName(name) + + def coerceComment(self, data): + if self.preventDoubleDashComments: + while "--" in data: + warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning) + data = data.replace("--", "- -") + if data.endswith("-"): + warnings.warn("Comments cannot end in a dash", DataLossWarning) + data += " " + return data + + def coerceCharacters(self, data): + if self.replaceFormFeedCharacters: + for _ in range(data.count("\x0C")): + warnings.warn("Text cannot contain U+000C", DataLossWarning) + data = data.replace("\x0C", " ") + # Other non-xml characters + return data + + def coercePubid(self, data): + dataOutput = data + for char in nonPubidCharRegexp.findall(data): + warnings.warn("Coercing non-XML pubid", DataLossWarning) + replacement = self.getReplacementCharacter(char) + dataOutput = dataOutput.replace(char, replacement) + if self.preventSingleQuotePubid and dataOutput.find("'") >= 0: + warnings.warn("Pubid cannot contain single quote", DataLossWarning) + dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'")) + return dataOutput + + def toXmlName(self, name): + nameFirst = name[0] + nameRest = name[1:] + m = nonXmlNameFirstBMPRegexp.match(nameFirst) + if m: + warnings.warn("Coercing non-XML name", DataLossWarning) + nameFirstOutput = self.getReplacementCharacter(nameFirst) + else: + nameFirstOutput = nameFirst + + nameRestOutput = nameRest + replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) + for char in replaceChars: + warnings.warn("Coercing non-XML name", DataLossWarning) + replacement = self.getReplacementCharacter(char) + nameRestOutput = nameRestOutput.replace(char, replacement) + return nameFirstOutput + nameRestOutput + + def getReplacementCharacter(self, char): + if char in self.replaceCache: + replacement = self.replaceCache[char] + else: + replacement = self.escapeChar(char) + return replacement + + def fromXmlName(self, name): + for item in set(self.replacementRegexp.findall(name)): + name = name.replace(item, self.unescapeChar(item)) + return name + + def escapeChar(self, char): + replacement = "U%05X" % ord(char) + self.replaceCache[char] = replacement + return replacement + + def unescapeChar(self, charcode): + return chr(int(charcode[1:], 16)) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_inputstream.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_inputstream.py new file mode 100644 index 0000000..7c5639f --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_inputstream.py @@ -0,0 +1,923 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import text_type, binary_type +from pip._vendor.six.moves import http_client, urllib + +import codecs +import re + +from pip._vendor import webencodings + +from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase +from .constants import ReparseException +from . import _utils + +from io import StringIO + +try: + from io import BytesIO +except ImportError: + BytesIO = StringIO + +# Non-unicode versions of constants for use in the pre-parser +spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters]) +asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters]) +asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase]) +spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"]) + + +invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa + +if _utils.supports_lone_surrogates: + # Use one extra step of indirection and create surrogates with + # eval. Not using this indirection would introduce an illegal + # unicode literal on platforms not supporting such lone + # surrogates. + assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1 + invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] + + eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used + "]") +else: + invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) + +non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, + 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, + 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, + 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, + 0x10FFFE, 0x10FFFF]) + +ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005B-\u0060\u007B-\u007E]") + +# Cache for charsUntil() +charsUntilRegEx = {} + + +class BufferedStream(object): + """Buffering for streams that do not have buffering of their own + + The buffer is implemented as a list of chunks on the assumption that + joining many strings will be slow since it is O(n**2) + """ + + def __init__(self, stream): + self.stream = stream + self.buffer = [] + self.position = [-1, 0] # chunk number, offset + + def tell(self): + pos = 0 + for chunk in self.buffer[:self.position[0]]: + pos += len(chunk) + pos += self.position[1] + return pos + + def seek(self, pos): + assert pos <= self._bufferedBytes() + offset = pos + i = 0 + while len(self.buffer[i]) < offset: + offset -= len(self.buffer[i]) + i += 1 + self.position = [i, offset] + + def read(self, bytes): + if not self.buffer: + return self._readStream(bytes) + elif (self.position[0] == len(self.buffer) and + self.position[1] == len(self.buffer[-1])): + return self._readStream(bytes) + else: + return self._readFromBuffer(bytes) + + def _bufferedBytes(self): + return sum([len(item) for item in self.buffer]) + + def _readStream(self, bytes): + data = self.stream.read(bytes) + self.buffer.append(data) + self.position[0] += 1 + self.position[1] = len(data) + return data + + def _readFromBuffer(self, bytes): + remainingBytes = bytes + rv = [] + bufferIndex = self.position[0] + bufferOffset = self.position[1] + while bufferIndex < len(self.buffer) and remainingBytes != 0: + assert remainingBytes > 0 + bufferedData = self.buffer[bufferIndex] + + if remainingBytes <= len(bufferedData) - bufferOffset: + bytesToRead = remainingBytes + self.position = [bufferIndex, bufferOffset + bytesToRead] + else: + bytesToRead = len(bufferedData) - bufferOffset + self.position = [bufferIndex, len(bufferedData)] + bufferIndex += 1 + rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead]) + remainingBytes -= bytesToRead + + bufferOffset = 0 + + if remainingBytes: + rv.append(self._readStream(remainingBytes)) + + return b"".join(rv) + + +def HTMLInputStream(source, **kwargs): + # Work around Python bug #20007: read(0) closes the connection. + # http://bugs.python.org/issue20007 + if (isinstance(source, http_client.HTTPResponse) or + # Also check for addinfourl wrapping HTTPResponse + (isinstance(source, urllib.response.addbase) and + isinstance(source.fp, http_client.HTTPResponse))): + isUnicode = False + elif hasattr(source, "read"): + isUnicode = isinstance(source.read(0), text_type) + else: + isUnicode = isinstance(source, text_type) + + if isUnicode: + encodings = [x for x in kwargs if x.endswith("_encoding")] + if encodings: + raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings) + + return HTMLUnicodeInputStream(source, **kwargs) + else: + return HTMLBinaryInputStream(source, **kwargs) + + +class HTMLUnicodeInputStream(object): + """Provides a unicode stream of characters to the HTMLTokenizer. + + This class takes care of character encoding and removing or replacing + incorrect byte-sequences and also provides column and line tracking. + + """ + + _defaultChunkSize = 10240 + + def __init__(self, source): + """Initialises the HTMLInputStream. + + HTMLInputStream(source, [encoding]) -> Normalized stream from source + for use by html5lib. + + source can be either a file-object, local filename or a string. + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + """ + + if not _utils.supports_lone_surrogates: + # Such platforms will have already checked for such + # surrogate errors, so no need to do this checking. + self.reportCharacterErrors = None + elif len("\U0010FFFF") == 1: + self.reportCharacterErrors = self.characterErrorsUCS4 + else: + self.reportCharacterErrors = self.characterErrorsUCS2 + + # List of where new lines occur + self.newLines = [0] + + self.charEncoding = (lookupEncoding("utf-8"), "certain") + self.dataStream = self.openStream(source) + + self.reset() + + def reset(self): + self.chunk = "" + self.chunkSize = 0 + self.chunkOffset = 0 + self.errors = [] + + # number of (complete) lines in previous chunks + self.prevNumLines = 0 + # number of columns in the last line of the previous chunk + self.prevNumCols = 0 + + # Deal with CR LF and surrogates split over chunk boundaries + self._bufferedCharacter = None + + def openStream(self, source): + """Produces a file object from source. + + source can be either a file object, local filename or a string. + + """ + # Already a file object + if hasattr(source, 'read'): + stream = source + else: + stream = StringIO(source) + + return stream + + def _position(self, offset): + chunk = self.chunk + nLines = chunk.count('\n', 0, offset) + positionLine = self.prevNumLines + nLines + lastLinePos = chunk.rfind('\n', 0, offset) + if lastLinePos == -1: + positionColumn = self.prevNumCols + offset + else: + positionColumn = offset - (lastLinePos + 1) + return (positionLine, positionColumn) + + def position(self): + """Returns (line, col) of the current position in the stream.""" + line, col = self._position(self.chunkOffset) + return (line + 1, col) + + def char(self): + """ Read one character from the stream or queue if available. Return + EOF when EOF is reached. + """ + # Read a new chunk from the input stream if necessary + if self.chunkOffset >= self.chunkSize: + if not self.readChunk(): + return EOF + + chunkOffset = self.chunkOffset + char = self.chunk[chunkOffset] + self.chunkOffset = chunkOffset + 1 + + return char + + def readChunk(self, chunkSize=None): + if chunkSize is None: + chunkSize = self._defaultChunkSize + + self.prevNumLines, self.prevNumCols = self._position(self.chunkSize) + + self.chunk = "" + self.chunkSize = 0 + self.chunkOffset = 0 + + data = self.dataStream.read(chunkSize) + + # Deal with CR LF and surrogates broken across chunks + if self._bufferedCharacter: + data = self._bufferedCharacter + data + self._bufferedCharacter = None + elif not data: + # We have no more data, bye-bye stream + return False + + if len(data) > 1: + lastv = ord(data[-1]) + if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF: + self._bufferedCharacter = data[-1] + data = data[:-1] + + if self.reportCharacterErrors: + self.reportCharacterErrors(data) + + # Replace invalid characters + data = data.replace("\r\n", "\n") + data = data.replace("\r", "\n") + + self.chunk = data + self.chunkSize = len(data) + + return True + + def characterErrorsUCS4(self, data): + for _ in range(len(invalid_unicode_re.findall(data))): + self.errors.append("invalid-codepoint") + + def characterErrorsUCS2(self, data): + # Someone picked the wrong compile option + # You lose + skip = False + for match in invalid_unicode_re.finditer(data): + if skip: + continue + codepoint = ord(match.group()) + pos = match.start() + # Pretty sure there should be endianness issues here + if _utils.isSurrogatePair(data[pos:pos + 2]): + # We have a surrogate pair! + char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2]) + if char_val in non_bmp_invalid_codepoints: + self.errors.append("invalid-codepoint") + skip = True + elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and + pos == len(data) - 1): + self.errors.append("invalid-codepoint") + else: + skip = False + self.errors.append("invalid-codepoint") + + def charsUntil(self, characters, opposite=False): + """ Returns a string of characters from the stream up to but not + including any character in 'characters' or EOF. 'characters' must be + a container that supports the 'in' method and iteration over its + characters. + """ + + # Use a cache of regexps to find the required characters + try: + chars = charsUntilRegEx[(characters, opposite)] + except KeyError: + if __debug__: + for c in characters: + assert(ord(c) < 128) + regex = "".join(["\\x%02x" % ord(c) for c in characters]) + if not opposite: + regex = "^%s" % regex + chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex) + + rv = [] + + while True: + # Find the longest matching prefix + m = chars.match(self.chunk, self.chunkOffset) + if m is None: + # If nothing matched, and it wasn't because we ran out of chunk, + # then stop + if self.chunkOffset != self.chunkSize: + break + else: + end = m.end() + # If not the whole chunk matched, return everything + # up to the part that didn't match + if end != self.chunkSize: + rv.append(self.chunk[self.chunkOffset:end]) + self.chunkOffset = end + break + # If the whole remainder of the chunk matched, + # use it all and read the next chunk + rv.append(self.chunk[self.chunkOffset:]) + if not self.readChunk(): + # Reached EOF + break + + r = "".join(rv) + return r + + def unget(self, char): + # Only one character is allowed to be ungotten at once - it must + # be consumed again before any further call to unget + if char is not None: + if self.chunkOffset == 0: + # unget is called quite rarely, so it's a good idea to do + # more work here if it saves a bit of work in the frequently + # called char and charsUntil. + # So, just prepend the ungotten character onto the current + # chunk: + self.chunk = char + self.chunk + self.chunkSize += 1 + else: + self.chunkOffset -= 1 + assert self.chunk[self.chunkOffset] == char + + +class HTMLBinaryInputStream(HTMLUnicodeInputStream): + """Provides a unicode stream of characters to the HTMLTokenizer. + + This class takes care of character encoding and removing or replacing + incorrect byte-sequences and also provides column and line tracking. + + """ + + def __init__(self, source, override_encoding=None, transport_encoding=None, + same_origin_parent_encoding=None, likely_encoding=None, + default_encoding="windows-1252", useChardet=True): + """Initialises the HTMLInputStream. + + HTMLInputStream(source, [encoding]) -> Normalized stream from source + for use by html5lib. + + source can be either a file-object, local filename or a string. + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + """ + # Raw Stream - for unicode objects this will encode to utf-8 and set + # self.charEncoding as appropriate + self.rawStream = self.openStream(source) + + HTMLUnicodeInputStream.__init__(self, self.rawStream) + + # Encoding Information + # Number of bytes to use when looking for a meta element with + # encoding information + self.numBytesMeta = 1024 + # Number of bytes to use when using detecting encoding using chardet + self.numBytesChardet = 100 + # Things from args + self.override_encoding = override_encoding + self.transport_encoding = transport_encoding + self.same_origin_parent_encoding = same_origin_parent_encoding + self.likely_encoding = likely_encoding + self.default_encoding = default_encoding + + # Determine encoding + self.charEncoding = self.determineEncoding(useChardet) + assert self.charEncoding[0] is not None + + # Call superclass + self.reset() + + def reset(self): + self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace') + HTMLUnicodeInputStream.reset(self) + + def openStream(self, source): + """Produces a file object from source. + + source can be either a file object, local filename or a string. + + """ + # Already a file object + if hasattr(source, 'read'): + stream = source + else: + stream = BytesIO(source) + + try: + stream.seek(stream.tell()) + except: # pylint:disable=bare-except + stream = BufferedStream(stream) + + return stream + + def determineEncoding(self, chardet=True): + # BOMs take precedence over everything + # This will also read past the BOM if present + charEncoding = self.detectBOM(), "certain" + if charEncoding[0] is not None: + return charEncoding + + # If we've been overriden, we've been overriden + charEncoding = lookupEncoding(self.override_encoding), "certain" + if charEncoding[0] is not None: + return charEncoding + + # Now check the transport layer + charEncoding = lookupEncoding(self.transport_encoding), "certain" + if charEncoding[0] is not None: + return charEncoding + + # Look for meta elements with encoding information + charEncoding = self.detectEncodingMeta(), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Parent document encoding + charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative" + if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"): + return charEncoding + + # "likely" encoding + charEncoding = lookupEncoding(self.likely_encoding), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Guess with chardet, if available + if chardet: + try: + from chardet.universaldetector import UniversalDetector + except ImportError: + pass + else: + buffers = [] + detector = UniversalDetector() + while not detector.done: + buffer = self.rawStream.read(self.numBytesChardet) + assert isinstance(buffer, bytes) + if not buffer: + break + buffers.append(buffer) + detector.feed(buffer) + detector.close() + encoding = lookupEncoding(detector.result['encoding']) + self.rawStream.seek(0) + if encoding is not None: + return encoding, "tentative" + + # Try the default encoding + charEncoding = lookupEncoding(self.default_encoding), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Fallback to html5lib's default if even that hasn't worked + return lookupEncoding("windows-1252"), "tentative" + + def changeEncoding(self, newEncoding): + assert self.charEncoding[1] != "certain" + newEncoding = lookupEncoding(newEncoding) + if newEncoding is None: + return + if newEncoding.name in ("utf-16be", "utf-16le"): + newEncoding = lookupEncoding("utf-8") + assert newEncoding is not None + elif newEncoding == self.charEncoding[0]: + self.charEncoding = (self.charEncoding[0], "certain") + else: + self.rawStream.seek(0) + self.charEncoding = (newEncoding, "certain") + self.reset() + raise ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding)) + + def detectBOM(self): + """Attempts to detect at BOM at the start of the stream. If + an encoding can be determined from the BOM return the name of the + encoding otherwise return None""" + bomDict = { + codecs.BOM_UTF8: 'utf-8', + codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be', + codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be' + } + + # Go to beginning of file and read in 4 bytes + string = self.rawStream.read(4) + assert isinstance(string, bytes) + + # Try detecting the BOM using bytes from the string + encoding = bomDict.get(string[:3]) # UTF-8 + seek = 3 + if not encoding: + # Need to detect UTF-32 before UTF-16 + encoding = bomDict.get(string) # UTF-32 + seek = 4 + if not encoding: + encoding = bomDict.get(string[:2]) # UTF-16 + seek = 2 + + # Set the read position past the BOM if one was found, otherwise + # set it to the start of the stream + if encoding: + self.rawStream.seek(seek) + return lookupEncoding(encoding) + else: + self.rawStream.seek(0) + return None + + def detectEncodingMeta(self): + """Report the encoding declared by the meta element + """ + buffer = self.rawStream.read(self.numBytesMeta) + assert isinstance(buffer, bytes) + parser = EncodingParser(buffer) + self.rawStream.seek(0) + encoding = parser.getEncoding() + + if encoding is not None and encoding.name in ("utf-16be", "utf-16le"): + encoding = lookupEncoding("utf-8") + + return encoding + + +class EncodingBytes(bytes): + """String-like object with an associated position and various extra methods + If the position is ever greater than the string length then an exception is + raised""" + def __new__(self, value): + assert isinstance(value, bytes) + return bytes.__new__(self, value.lower()) + + def __init__(self, value): + # pylint:disable=unused-argument + self._position = -1 + + def __iter__(self): + return self + + def __next__(self): + p = self._position = self._position + 1 + if p >= len(self): + raise StopIteration + elif p < 0: + raise TypeError + return self[p:p + 1] + + def next(self): + # Py2 compat + return self.__next__() + + def previous(self): + p = self._position + if p >= len(self): + raise StopIteration + elif p < 0: + raise TypeError + self._position = p = p - 1 + return self[p:p + 1] + + def setPosition(self, position): + if self._position >= len(self): + raise StopIteration + self._position = position + + def getPosition(self): + if self._position >= len(self): + raise StopIteration + if self._position >= 0: + return self._position + else: + return None + + position = property(getPosition, setPosition) + + def getCurrentByte(self): + return self[self.position:self.position + 1] + + currentByte = property(getCurrentByte) + + def skip(self, chars=spaceCharactersBytes): + """Skip past a list of characters""" + p = self.position # use property for the error-checking + while p < len(self): + c = self[p:p + 1] + if c not in chars: + self._position = p + return c + p += 1 + self._position = p + return None + + def skipUntil(self, chars): + p = self.position + while p < len(self): + c = self[p:p + 1] + if c in chars: + self._position = p + return c + p += 1 + self._position = p + return None + + def matchBytes(self, bytes): + """Look for a sequence of bytes at the start of a string. If the bytes + are found return True and advance the position to the byte after the + match. Otherwise return False and leave the position alone""" + p = self.position + data = self[p:p + len(bytes)] + rv = data.startswith(bytes) + if rv: + self.position += len(bytes) + return rv + + def jumpTo(self, bytes): + """Look for the next sequence of bytes matching a given sequence. If + a match is found advance the position to the last byte of the match""" + newPosition = self[self.position:].find(bytes) + if newPosition > -1: + # XXX: This is ugly, but I can't see a nicer way to fix this. + if self._position == -1: + self._position = 0 + self._position += (newPosition + len(bytes) - 1) + return True + else: + raise StopIteration + + +class EncodingParser(object): + """Mini parser for detecting character encoding from meta elements""" + + def __init__(self, data): + """string - the data to work on for encoding detection""" + self.data = EncodingBytes(data) + self.encoding = None + + def getEncoding(self): + methodDispatch = ( + (b"") + + def handleMeta(self): + if self.data.currentByte not in spaceCharactersBytes: + # if we have ") + + def getAttribute(self): + """Return a name,value pair for the next attribute in the stream, + if one is found, or None""" + data = self.data + # Step 1 (skip chars) + c = data.skip(spaceCharactersBytes | frozenset([b"/"])) + assert c is None or len(c) == 1 + # Step 2 + if c in (b">", None): + return None + # Step 3 + attrName = [] + attrValue = [] + # Step 4 attribute name + while True: + if c == b"=" and attrName: + break + elif c in spaceCharactersBytes: + # Step 6! + c = data.skip() + break + elif c in (b"/", b">"): + return b"".join(attrName), b"" + elif c in asciiUppercaseBytes: + attrName.append(c.lower()) + elif c is None: + return None + else: + attrName.append(c) + # Step 5 + c = next(data) + # Step 7 + if c != b"=": + data.previous() + return b"".join(attrName), b"" + # Step 8 + next(data) + # Step 9 + c = data.skip() + # Step 10 + if c in (b"'", b'"'): + # 10.1 + quoteChar = c + while True: + # 10.2 + c = next(data) + # 10.3 + if c == quoteChar: + next(data) + return b"".join(attrName), b"".join(attrValue) + # 10.4 + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + # 10.5 + else: + attrValue.append(c) + elif c == b">": + return b"".join(attrName), b"" + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + elif c is None: + return None + else: + attrValue.append(c) + # Step 11 + while True: + c = next(data) + if c in spacesAngleBrackets: + return b"".join(attrName), b"".join(attrValue) + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + elif c is None: + return None + else: + attrValue.append(c) + + +class ContentAttrParser(object): + def __init__(self, data): + assert isinstance(data, bytes) + self.data = data + + def parse(self): + try: + # Check if the attr name is charset + # otherwise return + self.data.jumpTo(b"charset") + self.data.position += 1 + self.data.skip() + if not self.data.currentByte == b"=": + # If there is no = sign keep looking for attrs + return None + self.data.position += 1 + self.data.skip() + # Look for an encoding between matching quote marks + if self.data.currentByte in (b'"', b"'"): + quoteMark = self.data.currentByte + self.data.position += 1 + oldPosition = self.data.position + if self.data.jumpTo(quoteMark): + return self.data[oldPosition:self.data.position] + else: + return None + else: + # Unquoted value + oldPosition = self.data.position + try: + self.data.skipUntil(spaceCharactersBytes) + return self.data[oldPosition:self.data.position] + except StopIteration: + # Return the whole remaining value + return self.data[oldPosition:] + except StopIteration: + return None + + +def lookupEncoding(encoding): + """Return the python codec name corresponding to an encoding or None if the + string doesn't correspond to a valid encoding.""" + if isinstance(encoding, binary_type): + try: + encoding = encoding.decode("ascii") + except UnicodeDecodeError: + return None + + if encoding is not None: + try: + return webencodings.lookup(encoding) + except AttributeError: + return None + else: + return None diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_tokenizer.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_tokenizer.py new file mode 100644 index 0000000..178f6e7 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_tokenizer.py @@ -0,0 +1,1721 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import unichr as chr + +from collections import deque + +from .constants import spaceCharacters +from .constants import entities +from .constants import asciiLetters, asciiUpper2Lower +from .constants import digits, hexDigits, EOF +from .constants import tokenTypes, tagTokenTypes +from .constants import replacementCharacters + +from ._inputstream import HTMLInputStream + +from ._trie import Trie + +entitiesTrie = Trie(entities) + + +class HTMLTokenizer(object): + """ This class takes care of tokenizing HTML. + + * self.currentToken + Holds the token that is currently being processed. + + * self.state + Holds a reference to the method to be invoked... XXX + + * self.stream + Points to HTMLInputStream object. + """ + + def __init__(self, stream, parser=None, **kwargs): + + self.stream = HTMLInputStream(stream, **kwargs) + self.parser = parser + + # Setup the initial tokenizer state + self.escapeFlag = False + self.lastFourChars = [] + self.state = self.dataState + self.escape = False + + # The current token being created + self.currentToken = None + super(HTMLTokenizer, self).__init__() + + def __iter__(self): + """ This is where the magic happens. + + We do our usually processing through the states and when we have a token + to return we yield the token which pauses processing until the next token + is requested. + """ + self.tokenQueue = deque([]) + # Start processing. When EOF is reached self.state will return False + # instead of True and the loop will terminate. + while self.state(): + while self.stream.errors: + yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)} + while self.tokenQueue: + yield self.tokenQueue.popleft() + + def consumeNumberEntity(self, isHex): + """This function returns either U+FFFD or the character based on the + decimal or hexadecimal representation. It also discards ";" if present. + If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked. + """ + + allowed = digits + radix = 10 + if isHex: + allowed = hexDigits + radix = 16 + + charStack = [] + + # Consume all the characters that are in range while making sure we + # don't hit an EOF. + c = self.stream.char() + while c in allowed and c is not EOF: + charStack.append(c) + c = self.stream.char() + + # Convert the set of characters consumed to an int. + charAsInt = int("".join(charStack), radix) + + # Certain characters get replaced with others + if charAsInt in replacementCharacters: + char = replacementCharacters[charAsInt] + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + elif ((0xD800 <= charAsInt <= 0xDFFF) or + (charAsInt > 0x10FFFF)): + char = "\uFFFD" + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + else: + # Should speed up this check somehow (e.g. move the set to a constant) + if ((0x0001 <= charAsInt <= 0x0008) or + (0x000E <= charAsInt <= 0x001F) or + (0x007F <= charAsInt <= 0x009F) or + (0xFDD0 <= charAsInt <= 0xFDEF) or + charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE, + 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, + 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE, + 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE, + 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE, + 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, + 0xFFFFF, 0x10FFFE, 0x10FFFF])): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + try: + # Try/except needed as UCS-2 Python builds' unichar only works + # within the BMP. + char = chr(charAsInt) + except ValueError: + v = charAsInt - 0x10000 + char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF)) + + # Discard the ; if present. Otherwise, put it back on the queue and + # invoke parseError on parser. + if c != ";": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "numeric-entity-without-semicolon"}) + self.stream.unget(c) + + return char + + def consumeEntity(self, allowedChar=None, fromAttribute=False): + # Initialise to the default output for when no entity is matched + output = "&" + + charStack = [self.stream.char()] + if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or + (allowedChar is not None and allowedChar == charStack[0])): + self.stream.unget(charStack[0]) + + elif charStack[0] == "#": + # Read the next character to see if it's hex or decimal + hex = False + charStack.append(self.stream.char()) + if charStack[-1] in ("x", "X"): + hex = True + charStack.append(self.stream.char()) + + # charStack[-1] should be the first digit + if (hex and charStack[-1] in hexDigits) \ + or (not hex and charStack[-1] in digits): + # At least one digit found, so consume the whole number + self.stream.unget(charStack[-1]) + output = self.consumeNumberEntity(hex) + else: + # No digits found + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "expected-numeric-entity"}) + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + + else: + # At this point in the process might have named entity. Entities + # are stored in the global variable "entities". + # + # Consume characters and compare to these to a substring of the + # entity names in the list until the substring no longer matches. + while (charStack[-1] is not EOF): + if not entitiesTrie.has_keys_with_prefix("".join(charStack)): + break + charStack.append(self.stream.char()) + + # At this point we have a string that starts with some characters + # that may match an entity + # Try to find the longest entity the string will match to take care + # of ¬i for instance. + try: + entityName = entitiesTrie.longest_prefix("".join(charStack[:-1])) + entityLength = len(entityName) + except KeyError: + entityName = None + + if entityName is not None: + if entityName[-1] != ";": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "named-entity-without-semicolon"}) + if (entityName[-1] != ";" and fromAttribute and + (charStack[entityLength] in asciiLetters or + charStack[entityLength] in digits or + charStack[entityLength] == "=")): + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + else: + output = entities[entityName] + self.stream.unget(charStack.pop()) + output += "".join(charStack[entityLength:]) + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-named-entity"}) + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + + if fromAttribute: + self.currentToken["data"][-1][1] += output + else: + if output in spaceCharacters: + tokenType = "SpaceCharacters" + else: + tokenType = "Characters" + self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output}) + + def processEntityInAttribute(self, allowedChar): + """This method replaces the need for "entityInAttributeValueState". + """ + self.consumeEntity(allowedChar=allowedChar, fromAttribute=True) + + def emitCurrentToken(self): + """This method is a generic handler for emitting the tags. It also sets + the state to "data" because that's what's needed after a token has been + emitted. + """ + token = self.currentToken + # Add token to the queue to be yielded + if (token["type"] in tagTokenTypes): + token["name"] = token["name"].translate(asciiUpper2Lower) + if token["type"] == tokenTypes["EndTag"]: + if token["data"]: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "attributes-in-end-tag"}) + if token["selfClosing"]: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "self-closing-flag-on-end-tag"}) + self.tokenQueue.append(token) + self.state = self.dataState + + # Below are the various tokenizer states worked out. + def dataState(self): + data = self.stream.char() + if data == "&": + self.state = self.entityDataState + elif data == "<": + self.state = self.tagOpenState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\u0000"}) + elif data is EOF: + # Tokenization ends. + return False + elif data in spaceCharacters: + # Directly after emitting a token you switch back to the "data + # state". At that point spaceCharacters are important so they are + # emitted separately. + self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": + data + self.stream.charsUntil(spaceCharacters, True)}) + # No need to update lastFourChars here, since the first space will + # have already been appended to lastFourChars and will have broken + # any sequences + else: + chars = self.stream.charsUntil(("&", "<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def entityDataState(self): + self.consumeEntity() + self.state = self.dataState + return True + + def rcdataState(self): + data = self.stream.char() + if data == "&": + self.state = self.characterReferenceInRcdata + elif data == "<": + self.state = self.rcdataLessThanSignState + elif data == EOF: + # Tokenization ends. + return False + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data in spaceCharacters: + # Directly after emitting a token you switch back to the "data + # state". At that point spaceCharacters are important so they are + # emitted separately. + self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": + data + self.stream.charsUntil(spaceCharacters, True)}) + # No need to update lastFourChars here, since the first space will + # have already been appended to lastFourChars and will have broken + # any sequences + else: + chars = self.stream.charsUntil(("&", "<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def characterReferenceInRcdata(self): + self.consumeEntity() + self.state = self.rcdataState + return True + + def rawtextState(self): + data = self.stream.char() + if data == "<": + self.state = self.rawtextLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + # Tokenization ends. + return False + else: + chars = self.stream.charsUntil(("<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def scriptDataState(self): + data = self.stream.char() + if data == "<": + self.state = self.scriptDataLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + # Tokenization ends. + return False + else: + chars = self.stream.charsUntil(("<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def plaintextState(self): + data = self.stream.char() + if data == EOF: + # Tokenization ends. + return False + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + self.stream.charsUntil("\u0000")}) + return True + + def tagOpenState(self): + data = self.stream.char() + if data == "!": + self.state = self.markupDeclarationOpenState + elif data == "/": + self.state = self.closeTagOpenState + elif data in asciiLetters: + self.currentToken = {"type": tokenTypes["StartTag"], + "name": data, "data": [], + "selfClosing": False, + "selfClosingAcknowledged": False} + self.state = self.tagNameState + elif data == ">": + # XXX In theory it could be something besides a tag name. But + # do we really care? + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name-but-got-right-bracket"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"}) + self.state = self.dataState + elif data == "?": + # XXX In theory it could be something besides a tag name. But + # do we really care? + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name-but-got-question-mark"}) + self.stream.unget(data) + self.state = self.bogusCommentState + else: + # XXX + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.dataState + return True + + def closeTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.currentToken = {"type": tokenTypes["EndTag"], "name": data, + "data": [], "selfClosing": False} + self.state = self.tagNameState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-closing-tag-but-got-right-bracket"}) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-closing-tag-but-got-eof"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "": + self.emitCurrentToken() + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-tag-name"}) + self.state = self.dataState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] += "\uFFFD" + else: + self.currentToken["name"] += data + # (Don't use charsUntil here, because tag names are + # very short and it's faster to not do anything fancy) + return True + + def rcdataLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.temporaryBuffer = "" + self.state = self.rcdataEndTagOpenState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.rcdataState + return True + + def rcdataEndTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.temporaryBuffer += data + self.state = self.rcdataEndTagNameState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) + self.state = self.scriptDataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataEscapedState + elif data == EOF: + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataEscapedState + return True + + def scriptDataEscapedLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.temporaryBuffer = "" + self.state = self.scriptDataEscapedEndTagOpenState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data}) + self.temporaryBuffer = data + self.state = self.scriptDataDoubleEscapeStartState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.scriptDataEscapedState + return True + + def scriptDataEscapedEndTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.temporaryBuffer = data + self.state = self.scriptDataEscapedEndTagNameState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": ""))): + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + if self.temporaryBuffer.lower() == "script": + self.state = self.scriptDataDoubleEscapedState + else: + self.state = self.scriptDataEscapedState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.temporaryBuffer += data + else: + self.stream.unget(data) + self.state = self.scriptDataEscapedState + return True + + def scriptDataDoubleEscapedState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataDoubleEscapedDashState + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + return True + + def scriptDataDoubleEscapedDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataDoubleEscapedDashDashState + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataDoubleEscapedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapedDashDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) + self.state = self.scriptDataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataDoubleEscapedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapedLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"}) + self.temporaryBuffer = "" + self.state = self.scriptDataDoubleEscapeEndState + else: + self.stream.unget(data) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapeEndState(self): + data = self.stream.char() + if data in (spaceCharacters | frozenset(("/", ">"))): + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + if self.temporaryBuffer.lower() == "script": + self.state = self.scriptDataEscapedState + else: + self.state = self.scriptDataDoubleEscapedState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.temporaryBuffer += data + else: + self.stream.unget(data) + self.state = self.scriptDataDoubleEscapedState + return True + + def beforeAttributeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data in asciiLetters: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == ">": + self.emitCurrentToken() + elif data == "/": + self.state = self.selfClosingStartTagState + elif data in ("'", '"', "=", "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "invalid-character-in-attribute-name"}) + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"].append(["\uFFFD", ""]) + self.state = self.attributeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-name-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + return True + + def attributeNameState(self): + data = self.stream.char() + leavingThisState = True + emitToken = False + if data == "=": + self.state = self.beforeAttributeValueState + elif data in asciiLetters: + self.currentToken["data"][-1][0] += data +\ + self.stream.charsUntil(asciiLetters, True) + leavingThisState = False + elif data == ">": + # XXX If we emit here the attributes are converted to a dict + # without being checked and when the code below runs we error + # because data is a dict not a list + emitToken = True + elif data in spaceCharacters: + self.state = self.afterAttributeNameState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][0] += "\uFFFD" + leavingThisState = False + elif data in ("'", '"', "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "invalid-character-in-attribute-name"}) + self.currentToken["data"][-1][0] += data + leavingThisState = False + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "eof-in-attribute-name"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][0] += data + leavingThisState = False + + if leavingThisState: + # Attributes are not dropped at this stage. That happens when the + # start tag token is emitted so values can still be safely appended + # to attributes, but we do want to report the parse error in time. + self.currentToken["data"][-1][0] = ( + self.currentToken["data"][-1][0].translate(asciiUpper2Lower)) + for name, _ in self.currentToken["data"][:-1]: + if self.currentToken["data"][-1][0] == name: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "duplicate-attribute"}) + break + # XXX Fix for above XXX + if emitToken: + self.emitCurrentToken() + return True + + def afterAttributeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data == "=": + self.state = self.beforeAttributeValueState + elif data == ">": + self.emitCurrentToken() + elif data in asciiLetters: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"].append(["\uFFFD", ""]) + self.state = self.attributeNameState + elif data in ("'", '"', "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "invalid-character-after-attribute-name"}) + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-end-of-tag-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + return True + + def beforeAttributeValueState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data == "\"": + self.state = self.attributeValueDoubleQuotedState + elif data == "&": + self.state = self.attributeValueUnQuotedState + self.stream.unget(data) + elif data == "'": + self.state = self.attributeValueSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-value-but-got-right-bracket"}) + self.emitCurrentToken() + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + self.state = self.attributeValueUnQuotedState + elif data in ("=", "<", "`"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "equals-in-unquoted-attribute-value"}) + self.currentToken["data"][-1][1] += data + self.state = self.attributeValueUnQuotedState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-value-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data + self.state = self.attributeValueUnQuotedState + return True + + def attributeValueDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterAttributeValueState + elif data == "&": + self.processEntityInAttribute('"') + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-double-quote"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data +\ + self.stream.charsUntil(("\"", "&", "\u0000")) + return True + + def attributeValueSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterAttributeValueState + elif data == "&": + self.processEntityInAttribute("'") + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-single-quote"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data +\ + self.stream.charsUntil(("'", "&", "\u0000")) + return True + + def attributeValueUnQuotedState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeAttributeNameState + elif data == "&": + self.processEntityInAttribute(">") + elif data == ">": + self.emitCurrentToken() + elif data in ('"', "'", "=", "<", "`"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-in-unquoted-attribute-value"}) + self.currentToken["data"][-1][1] += data + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-no-quotes"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data + self.stream.charsUntil( + frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters) + return True + + def afterAttributeValueState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeAttributeNameState + elif data == ">": + self.emitCurrentToken() + elif data == "/": + self.state = self.selfClosingStartTagState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-EOF-after-attribute-value"}) + self.stream.unget(data) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-after-attribute-value"}) + self.stream.unget(data) + self.state = self.beforeAttributeNameState + return True + + def selfClosingStartTagState(self): + data = self.stream.char() + if data == ">": + self.currentToken["selfClosing"] = True + self.emitCurrentToken() + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "unexpected-EOF-after-solidus-in-tag"}) + self.stream.unget(data) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-after-solidus-in-tag"}) + self.stream.unget(data) + self.state = self.beforeAttributeNameState + return True + + def bogusCommentState(self): + # Make a new comment token and give it as value all the characters + # until the first > or EOF (charsUntil checks for EOF automatically) + # and emit it. + data = self.stream.charsUntil(">") + data = data.replace("\u0000", "\uFFFD") + self.tokenQueue.append( + {"type": tokenTypes["Comment"], "data": data}) + + # Eat the character directly after the bogus comment which is either a + # ">" or an EOF. + self.stream.char() + self.state = self.dataState + return True + + def markupDeclarationOpenState(self): + charStack = [self.stream.char()] + if charStack[-1] == "-": + charStack.append(self.stream.char()) + if charStack[-1] == "-": + self.currentToken = {"type": tokenTypes["Comment"], "data": ""} + self.state = self.commentStartState + return True + elif charStack[-1] in ('d', 'D'): + matched = True + for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'), + ('y', 'Y'), ('p', 'P'), ('e', 'E')): + charStack.append(self.stream.char()) + if charStack[-1] not in expected: + matched = False + break + if matched: + self.currentToken = {"type": tokenTypes["Doctype"], + "name": "", + "publicId": None, "systemId": None, + "correct": True} + self.state = self.doctypeState + return True + elif (charStack[-1] == "[" and + self.parser is not None and + self.parser.tree.openElements and + self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace): + matched = True + for expected in ["C", "D", "A", "T", "A", "["]: + charStack.append(self.stream.char()) + if charStack[-1] != expected: + matched = False + break + if matched: + self.state = self.cdataSectionState + return True + + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-dashes-or-doctype"}) + + while charStack: + self.stream.unget(charStack.pop()) + self.state = self.bogusCommentState + return True + + def commentStartState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentStartDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "incorrect-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += data + self.state = self.commentState + return True + + def commentStartDashState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "-\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "incorrect-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "-" + data + self.state = self.commentState + return True + + def commentState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += data + \ + self.stream.charsUntil(("-", "\u0000")) + return True + + def commentEndDashState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "-\uFFFD" + self.state = self.commentState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-end-dash"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "-" + data + self.state = self.commentState + return True + + def commentEndState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "--\uFFFD" + self.state = self.commentState + elif data == "!": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-bang-after-double-dash-in-comment"}) + self.state = self.commentEndBangState + elif data == "-": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-dash-after-double-dash-in-comment"}) + self.currentToken["data"] += data + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-double-dash"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + # XXX + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-comment"}) + self.currentToken["data"] += "--" + data + self.state = self.commentState + return True + + def commentEndBangState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "-": + self.currentToken["data"] += "--!" + self.state = self.commentEndDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "--!\uFFFD" + self.state = self.commentState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-end-bang-state"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "--!" + data + self.state = self.commentState + return True + + def doctypeState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-eof"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "need-space-after-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypeNameState + return True + + def beforeDoctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-right-bracket"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] = "\uFFFD" + self.state = self.doctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-eof"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["name"] = data + self.state = self.doctypeNameState + return True + + def doctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.state = self.afterDoctypeNameState + elif data == ">": + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] += "\uFFFD" + self.state = self.doctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype-name"}) + self.currentToken["correct"] = False + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["name"] += data + return True + + def afterDoctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.currentToken["correct"] = False + self.stream.unget(data) + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + if data in ("p", "P"): + matched = True + for expected in (("u", "U"), ("b", "B"), ("l", "L"), + ("i", "I"), ("c", "C")): + data = self.stream.char() + if data not in expected: + matched = False + break + if matched: + self.state = self.afterDoctypePublicKeywordState + return True + elif data in ("s", "S"): + matched = True + for expected in (("y", "Y"), ("s", "S"), ("t", "T"), + ("e", "E"), ("m", "M")): + data = self.stream.char() + if data not in expected: + matched = False + break + if matched: + self.state = self.afterDoctypeSystemKeywordState + return True + + # All the characters read before the current 'data' will be + # [a-zA-Z], so they're garbage in the bogus doctype and can be + # discarded; only the latest character might be '>' or EOF + # and needs to be ungetted + self.stream.unget(data) + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-space-or-right-bracket-in-doctype", "datavars": + {"data": data}}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + + return True + + def afterDoctypePublicKeywordState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypePublicIdentifierState + elif data in ("'", '"'): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypePublicIdentifierState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.stream.unget(data) + self.state = self.beforeDoctypePublicIdentifierState + return True + + def beforeDoctypePublicIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == "\"": + self.currentToken["publicId"] = "" + self.state = self.doctypePublicIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["publicId"] = "" + self.state = self.doctypePublicIdentifierSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def doctypePublicIdentifierDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterDoctypePublicIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["publicId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["publicId"] += data + return True + + def doctypePublicIdentifierSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterDoctypePublicIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["publicId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["publicId"] += data + return True + + def afterDoctypePublicIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.betweenDoctypePublicAndSystemIdentifiersState + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == '"': + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def betweenDoctypePublicAndSystemIdentifiersState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == '"': + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def afterDoctypeSystemKeywordState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypeSystemIdentifierState + elif data in ("'", '"'): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypeSystemIdentifierState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.stream.unget(data) + self.state = self.beforeDoctypeSystemIdentifierState + return True + + def beforeDoctypeSystemIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == "\"": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def doctypeSystemIdentifierDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterDoctypeSystemIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["systemId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["systemId"] += data + return True + + def doctypeSystemIdentifierSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterDoctypeSystemIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["systemId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["systemId"] += data + return True + + def afterDoctypeSystemIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.state = self.bogusDoctypeState + return True + + def bogusDoctypeState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + # XXX EMIT + self.stream.unget(data) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + pass + return True + + def cdataSectionState(self): + data = [] + while True: + data.append(self.stream.charsUntil("]")) + data.append(self.stream.charsUntil(">")) + char = self.stream.char() + if char == EOF: + break + else: + assert char == ">" + if data[-1][-2:] == "]]": + data[-1] = data[-1][:-2] + break + else: + data.append(char) + + data = "".join(data) # pylint:disable=redefined-variable-type + # Deal with null here rather than in the parser + nullCount = data.count("\u0000") + if nullCount > 0: + for _ in range(nullCount): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + data = data.replace("\u0000", "\uFFFD") + if data: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": data}) + self.state = self.dataState + return True diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/__init__.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/__init__.py new file mode 100644 index 0000000..a5ba4bf --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/__init__.py @@ -0,0 +1,14 @@ +from __future__ import absolute_import, division, unicode_literals + +from .py import Trie as PyTrie + +Trie = PyTrie + +# pylint:disable=wrong-import-position +try: + from .datrie import Trie as DATrie +except ImportError: + pass +else: + Trie = DATrie +# pylint:enable=wrong-import-position diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/_base.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/_base.py new file mode 100644 index 0000000..25eece4 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/_base.py @@ -0,0 +1,38 @@ +from __future__ import absolute_import, division, unicode_literals + +from collections import Mapping + + +class Trie(Mapping): + """Abstract base class for tries""" + + def keys(self, prefix=None): + # pylint:disable=arguments-differ + keys = super(Trie, self).keys() + + if prefix is None: + return set(keys) + + # Python 2.6: no set comprehensions + return set([x for x in keys if x.startswith(prefix)]) + + def has_keys_with_prefix(self, prefix): + for key in self.keys(): + if key.startswith(prefix): + return True + + return False + + def longest_prefix(self, prefix): + if prefix in self: + return prefix + + for i in range(1, len(prefix) + 1): + if prefix[:-i] in self: + return prefix[:-i] + + raise KeyError(prefix) + + def longest_prefix_item(self, prefix): + lprefix = self.longest_prefix(prefix) + return (lprefix, self[lprefix]) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/datrie.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/datrie.py new file mode 100644 index 0000000..e2e5f86 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/datrie.py @@ -0,0 +1,44 @@ +from __future__ import absolute_import, division, unicode_literals + +from datrie import Trie as DATrie +from pip._vendor.six import text_type + +from ._base import Trie as ABCTrie + + +class Trie(ABCTrie): + def __init__(self, data): + chars = set() + for key in data.keys(): + if not isinstance(key, text_type): + raise TypeError("All keys must be strings") + for char in key: + chars.add(char) + + self._data = DATrie("".join(chars)) + for key, value in data.items(): + self._data[key] = value + + def __contains__(self, key): + return key in self._data + + def __len__(self): + return len(self._data) + + def __iter__(self): + raise NotImplementedError() + + def __getitem__(self, key): + return self._data[key] + + def keys(self, prefix=None): + return self._data.keys(prefix) + + def has_keys_with_prefix(self, prefix): + return self._data.has_keys_with_prefix(prefix) + + def longest_prefix(self, prefix): + return self._data.longest_prefix(prefix) + + def longest_prefix_item(self, prefix): + return self._data.longest_prefix_item(prefix) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/py.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/py.py new file mode 100644 index 0000000..c178b21 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_trie/py.py @@ -0,0 +1,67 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import text_type + +from bisect import bisect_left + +from ._base import Trie as ABCTrie + + +class Trie(ABCTrie): + def __init__(self, data): + if not all(isinstance(x, text_type) for x in data.keys()): + raise TypeError("All keys must be strings") + + self._data = data + self._keys = sorted(data.keys()) + self._cachestr = "" + self._cachepoints = (0, len(data)) + + def __contains__(self, key): + return key in self._data + + def __len__(self): + return len(self._data) + + def __iter__(self): + return iter(self._data) + + def __getitem__(self, key): + return self._data[key] + + def keys(self, prefix=None): + if prefix is None or prefix == "" or not self._keys: + return set(self._keys) + + if prefix.startswith(self._cachestr): + lo, hi = self._cachepoints + start = i = bisect_left(self._keys, prefix, lo, hi) + else: + start = i = bisect_left(self._keys, prefix) + + keys = set() + if start == len(self._keys): + return keys + + while self._keys[i].startswith(prefix): + keys.add(self._keys[i]) + i += 1 + + self._cachestr = prefix + self._cachepoints = (start, i) + + return keys + + def has_keys_with_prefix(self, prefix): + if prefix in self._data: + return True + + if prefix.startswith(self._cachestr): + lo, hi = self._cachepoints + i = bisect_left(self._keys, prefix, lo, hi) + else: + i = bisect_left(self._keys, prefix) + + if i == len(self._keys): + return False + + return self._keys[i].startswith(prefix) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_utils.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_utils.py new file mode 100644 index 0000000..55d6747 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/_utils.py @@ -0,0 +1,127 @@ +from __future__ import absolute_import, division, unicode_literals + +import sys +from types import ModuleType + +from pip._vendor.six import text_type + +try: + import xml.etree.cElementTree as default_etree +except ImportError: + import xml.etree.ElementTree as default_etree + + +__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", + "surrogatePairToCodepoint", "moduleFactoryFactory", + "supports_lone_surrogates", "PY27"] + + +PY27 = sys.version_info[0] == 2 and sys.version_info[1] >= 7 + +# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be +# caught by the below test. In general this would be any platform +# using UTF-16 as its encoding of unicode strings, such as +# Jython. This is because UTF-16 itself is based on the use of such +# surrogates, and there is no mechanism to further escape such +# escapes. +try: + _x = eval('"\\uD800"') # pylint:disable=eval-used + if not isinstance(_x, text_type): + # We need this with u"" because of http://bugs.jython.org/issue2039 + _x = eval('u"\\uD800"') # pylint:disable=eval-used + assert isinstance(_x, text_type) +except: # pylint:disable=bare-except + supports_lone_surrogates = False +else: + supports_lone_surrogates = True + + +class MethodDispatcher(dict): + """Dict with 2 special properties: + + On initiation, keys that are lists, sets or tuples are converted to + multiple keys so accessing any one of the items in the original + list-like object returns the matching value + + md = MethodDispatcher({("foo", "bar"):"baz"}) + md["foo"] == "baz" + + A default value which can be set through the default attribute. + """ + + def __init__(self, items=()): + # Using _dictEntries instead of directly assigning to self is about + # twice as fast. Please do careful performance testing before changing + # anything here. + _dictEntries = [] + for name, value in items: + if isinstance(name, (list, tuple, frozenset, set)): + for item in name: + _dictEntries.append((item, value)) + else: + _dictEntries.append((name, value)) + dict.__init__(self, _dictEntries) + assert len(self) == len(_dictEntries) + self.default = None + + def __getitem__(self, key): + return dict.get(self, key, self.default) + + +# Some utility functions to deal with weirdness around UCS2 vs UCS4 +# python builds + +def isSurrogatePair(data): + return (len(data) == 2 and + ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and + ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF) + + +def surrogatePairToCodepoint(data): + char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 + + (ord(data[1]) - 0xDC00)) + return char_val + +# Module Factory Factory (no, this isn't Java, I know) +# Here to stop this being duplicated all over the place. + + +def moduleFactoryFactory(factory): + moduleCache = {} + + def moduleFactory(baseModule, *args, **kwargs): + if isinstance(ModuleType.__name__, type("")): + name = "_%s_factory" % baseModule.__name__ + else: + name = b"_%s_factory" % baseModule.__name__ + + kwargs_tuple = tuple(kwargs.items()) + + try: + return moduleCache[name][args][kwargs_tuple] + except KeyError: + mod = ModuleType(name) + objs = factory(baseModule, *args, **kwargs) + mod.__dict__.update(objs) + if "name" not in moduleCache: + moduleCache[name] = {} + if "args" not in moduleCache[name]: + moduleCache[name][args] = {} + if "kwargs" not in moduleCache[name][args]: + moduleCache[name][args][kwargs_tuple] = {} + moduleCache[name][args][kwargs_tuple] = mod + return mod + + return moduleFactory + + +def memoize(func): + cache = {} + + def wrapped(*args, **kwargs): + key = (tuple(args), tuple(kwargs.items())) + if key not in cache: + cache[key] = func(*args, **kwargs) + return cache[key] + + return wrapped diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/constants.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/constants.py new file mode 100644 index 0000000..9e7541d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/constants.py @@ -0,0 +1,2945 @@ +from __future__ import absolute_import, division, unicode_literals + +import string + +EOF = None + +E = { + "null-character": + "Null character in input stream, replaced with U+FFFD.", + "invalid-codepoint": + "Invalid codepoint in stream.", + "incorrectly-placed-solidus": + "Solidus (/) incorrectly placed in tag.", + "incorrect-cr-newline-entity": + "Incorrect CR newline entity, replaced with LF.", + "illegal-windows-1252-entity": + "Entity used with illegal number (windows-1252 reference).", + "cant-convert-numeric-entity": + "Numeric entity couldn't be converted to character " + "(codepoint U+%(charAsInt)08x).", + "illegal-codepoint-for-numeric-entity": + "Numeric entity represents an illegal codepoint: " + "U+%(charAsInt)08x.", + "numeric-entity-without-semicolon": + "Numeric entity didn't end with ';'.", + "expected-numeric-entity-but-got-eof": + "Numeric entity expected. Got end of file instead.", + "expected-numeric-entity": + "Numeric entity expected but none found.", + "named-entity-without-semicolon": + "Named entity didn't end with ';'.", + "expected-named-entity": + "Named entity expected. Got none.", + "attributes-in-end-tag": + "End tag contains unexpected attributes.", + 'self-closing-flag-on-end-tag': + "End tag contains unexpected self-closing flag.", + "expected-tag-name-but-got-right-bracket": + "Expected tag name. Got '>' instead.", + "expected-tag-name-but-got-question-mark": + "Expected tag name. Got '?' instead. (HTML doesn't " + "support processing instructions.)", + "expected-tag-name": + "Expected tag name. Got something else instead", + "expected-closing-tag-but-got-right-bracket": + "Expected closing tag. Got '>' instead. Ignoring ''.", + "expected-closing-tag-but-got-eof": + "Expected closing tag. Unexpected end of file.", + "expected-closing-tag-but-got-char": + "Expected closing tag. Unexpected character '%(data)s' found.", + "eof-in-tag-name": + "Unexpected end of file in the tag name.", + "expected-attribute-name-but-got-eof": + "Unexpected end of file. Expected attribute name instead.", + "eof-in-attribute-name": + "Unexpected end of file in attribute name.", + "invalid-character-in-attribute-name": + "Invalid character in attribute name", + "duplicate-attribute": + "Dropped duplicate attribute on tag.", + "expected-end-of-tag-name-but-got-eof": + "Unexpected end of file. Expected = or end of tag.", + "expected-attribute-value-but-got-eof": + "Unexpected end of file. Expected attribute value.", + "expected-attribute-value-but-got-right-bracket": + "Expected attribute value. Got '>' instead.", + 'equals-in-unquoted-attribute-value': + "Unexpected = in unquoted attribute", + 'unexpected-character-in-unquoted-attribute-value': + "Unexpected character in unquoted attribute", + "invalid-character-after-attribute-name": + "Unexpected character after attribute name.", + "unexpected-character-after-attribute-value": + "Unexpected character after attribute value.", + "eof-in-attribute-value-double-quote": + "Unexpected end of file in attribute value (\").", + "eof-in-attribute-value-single-quote": + "Unexpected end of file in attribute value (').", + "eof-in-attribute-value-no-quotes": + "Unexpected end of file in attribute value.", + "unexpected-EOF-after-solidus-in-tag": + "Unexpected end of file in tag. Expected >", + "unexpected-character-after-solidus-in-tag": + "Unexpected character after / in tag. Expected >", + "expected-dashes-or-doctype": + "Expected '--' or 'DOCTYPE'. Not found.", + "unexpected-bang-after-double-dash-in-comment": + "Unexpected ! after -- in comment", + "unexpected-space-after-double-dash-in-comment": + "Unexpected space after -- in comment", + "incorrect-comment": + "Incorrect comment.", + "eof-in-comment": + "Unexpected end of file in comment.", + "eof-in-comment-end-dash": + "Unexpected end of file in comment (-)", + "unexpected-dash-after-double-dash-in-comment": + "Unexpected '-' after '--' found in comment.", + "eof-in-comment-double-dash": + "Unexpected end of file in comment (--).", + "eof-in-comment-end-space-state": + "Unexpected end of file in comment.", + "eof-in-comment-end-bang-state": + "Unexpected end of file in comment.", + "unexpected-char-in-comment": + "Unexpected character in comment found.", + "need-space-after-doctype": + "No space after literal string 'DOCTYPE'.", + "expected-doctype-name-but-got-right-bracket": + "Unexpected > character. Expected DOCTYPE name.", + "expected-doctype-name-but-got-eof": + "Unexpected end of file. Expected DOCTYPE name.", + "eof-in-doctype-name": + "Unexpected end of file in DOCTYPE name.", + "eof-in-doctype": + "Unexpected end of file in DOCTYPE.", + "expected-space-or-right-bracket-in-doctype": + "Expected space or '>'. Got '%(data)s'", + "unexpected-end-of-doctype": + "Unexpected end of DOCTYPE.", + "unexpected-char-in-doctype": + "Unexpected character in DOCTYPE.", + "eof-in-innerhtml": + "XXX innerHTML EOF", + "unexpected-doctype": + "Unexpected DOCTYPE. Ignored.", + "non-html-root": + "html needs to be the first start tag.", + "expected-doctype-but-got-eof": + "Unexpected End of file. Expected DOCTYPE.", + "unknown-doctype": + "Erroneous DOCTYPE.", + "expected-doctype-but-got-chars": + "Unexpected non-space characters. Expected DOCTYPE.", + "expected-doctype-but-got-start-tag": + "Unexpected start tag (%(name)s). Expected DOCTYPE.", + "expected-doctype-but-got-end-tag": + "Unexpected end tag (%(name)s). Expected DOCTYPE.", + "end-tag-after-implied-root": + "Unexpected end tag (%(name)s) after the (implied) root element.", + "expected-named-closing-tag-but-got-eof": + "Unexpected end of file. Expected end tag (%(name)s).", + "two-heads-are-not-better-than-one": + "Unexpected start tag head in existing head. Ignored.", + "unexpected-end-tag": + "Unexpected end tag (%(name)s). Ignored.", + "unexpected-start-tag-out-of-my-head": + "Unexpected start tag (%(name)s) that can be in head. Moved.", + "unexpected-start-tag": + "Unexpected start tag (%(name)s).", + "missing-end-tag": + "Missing end tag (%(name)s).", + "missing-end-tags": + "Missing end tags (%(name)s).", + "unexpected-start-tag-implies-end-tag": + "Unexpected start tag (%(startName)s) " + "implies end tag (%(endName)s).", + "unexpected-start-tag-treated-as": + "Unexpected start tag (%(originalName)s). Treated as %(newName)s.", + "deprecated-tag": + "Unexpected start tag %(name)s. Don't use it!", + "unexpected-start-tag-ignored": + "Unexpected start tag %(name)s. Ignored.", + "expected-one-end-tag-but-got-another": + "Unexpected end tag (%(gotName)s). " + "Missing end tag (%(expectedName)s).", + "end-tag-too-early": + "End tag (%(name)s) seen too early. Expected other end tag.", + "end-tag-too-early-named": + "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).", + "end-tag-too-early-ignored": + "End tag (%(name)s) seen too early. Ignored.", + "adoption-agency-1.1": + "End tag (%(name)s) violates step 1, " + "paragraph 1 of the adoption agency algorithm.", + "adoption-agency-1.2": + "End tag (%(name)s) violates step 1, " + "paragraph 2 of the adoption agency algorithm.", + "adoption-agency-1.3": + "End tag (%(name)s) violates step 1, " + "paragraph 3 of the adoption agency algorithm.", + "adoption-agency-4.4": + "End tag (%(name)s) violates step 4, " + "paragraph 4 of the adoption agency algorithm.", + "unexpected-end-tag-treated-as": + "Unexpected end tag (%(originalName)s). Treated as %(newName)s.", + "no-end-tag": + "This element (%(name)s) has no end tag.", + "unexpected-implied-end-tag-in-table": + "Unexpected implied end tag (%(name)s) in the table phase.", + "unexpected-implied-end-tag-in-table-body": + "Unexpected implied end tag (%(name)s) in the table body phase.", + "unexpected-char-implies-table-voodoo": + "Unexpected non-space characters in " + "table context caused voodoo mode.", + "unexpected-hidden-input-in-table": + "Unexpected input with type hidden in table context.", + "unexpected-form-in-table": + "Unexpected form in table context.", + "unexpected-start-tag-implies-table-voodoo": + "Unexpected start tag (%(name)s) in " + "table context caused voodoo mode.", + "unexpected-end-tag-implies-table-voodoo": + "Unexpected end tag (%(name)s) in " + "table context caused voodoo mode.", + "unexpected-cell-in-table-body": + "Unexpected table cell start tag (%(name)s) " + "in the table body phase.", + "unexpected-cell-end-tag": + "Got table cell end tag (%(name)s) " + "while required end tags are missing.", + "unexpected-end-tag-in-table-body": + "Unexpected end tag (%(name)s) in the table body phase. Ignored.", + "unexpected-implied-end-tag-in-table-row": + "Unexpected implied end tag (%(name)s) in the table row phase.", + "unexpected-end-tag-in-table-row": + "Unexpected end tag (%(name)s) in the table row phase. Ignored.", + "unexpected-select-in-select": + "Unexpected select start tag in the select phase " + "treated as select end tag.", + "unexpected-input-in-select": + "Unexpected input start tag in the select phase.", + "unexpected-start-tag-in-select": + "Unexpected start tag token (%(name)s in the select phase. " + "Ignored.", + "unexpected-end-tag-in-select": + "Unexpected end tag (%(name)s) in the select phase. Ignored.", + "unexpected-table-element-start-tag-in-select-in-table": + "Unexpected table element start tag (%(name)s) in the select in table phase.", + "unexpected-table-element-end-tag-in-select-in-table": + "Unexpected table element end tag (%(name)s) in the select in table phase.", + "unexpected-char-after-body": + "Unexpected non-space characters in the after body phase.", + "unexpected-start-tag-after-body": + "Unexpected start tag token (%(name)s)" + " in the after body phase.", + "unexpected-end-tag-after-body": + "Unexpected end tag token (%(name)s)" + " in the after body phase.", + "unexpected-char-in-frameset": + "Unexpected characters in the frameset phase. Characters ignored.", + "unexpected-start-tag-in-frameset": + "Unexpected start tag token (%(name)s)" + " in the frameset phase. Ignored.", + "unexpected-frameset-in-frameset-innerhtml": + "Unexpected end tag token (frameset) " + "in the frameset phase (innerHTML).", + "unexpected-end-tag-in-frameset": + "Unexpected end tag token (%(name)s)" + " in the frameset phase. Ignored.", + "unexpected-char-after-frameset": + "Unexpected non-space characters in the " + "after frameset phase. Ignored.", + "unexpected-start-tag-after-frameset": + "Unexpected start tag (%(name)s)" + " in the after frameset phase. Ignored.", + "unexpected-end-tag-after-frameset": + "Unexpected end tag (%(name)s)" + " in the after frameset phase. Ignored.", + "unexpected-end-tag-after-body-innerhtml": + "Unexpected end tag after body(innerHtml)", + "expected-eof-but-got-char": + "Unexpected non-space characters. Expected end of file.", + "expected-eof-but-got-start-tag": + "Unexpected start tag (%(name)s)" + ". Expected end of file.", + "expected-eof-but-got-end-tag": + "Unexpected end tag (%(name)s)" + ". Expected end of file.", + "eof-in-table": + "Unexpected end of file. Expected table content.", + "eof-in-select": + "Unexpected end of file. Expected select content.", + "eof-in-frameset": + "Unexpected end of file. Expected frameset content.", + "eof-in-script-in-script": + "Unexpected end of file. Expected script content.", + "eof-in-foreign-lands": + "Unexpected end of file. Expected foreign content", + "non-void-element-with-trailing-solidus": + "Trailing solidus not allowed on element %(name)s", + "unexpected-html-element-in-foreign-content": + "Element %(name)s not allowed in a non-html context", + "unexpected-end-tag-before-html": + "Unexpected end tag (%(name)s) before html.", + "unexpected-inhead-noscript-tag": + "Element %(name)s not allowed in a inhead-noscript context", + "eof-in-head-noscript": + "Unexpected end of file. Expected inhead-noscript content", + "char-in-head-noscript": + "Unexpected non-space character. Expected inhead-noscript content", + "XXX-undefined-error": + "Undefined error (this sucks and should be fixed)", +} + +namespaces = { + "html": "http://www.w3.org/1999/xhtml", + "mathml": "http://www.w3.org/1998/Math/MathML", + "svg": "http://www.w3.org/2000/svg", + "xlink": "http://www.w3.org/1999/xlink", + "xml": "http://www.w3.org/XML/1998/namespace", + "xmlns": "http://www.w3.org/2000/xmlns/" +} + +scopingElements = frozenset([ + (namespaces["html"], "applet"), + (namespaces["html"], "caption"), + (namespaces["html"], "html"), + (namespaces["html"], "marquee"), + (namespaces["html"], "object"), + (namespaces["html"], "table"), + (namespaces["html"], "td"), + (namespaces["html"], "th"), + (namespaces["mathml"], "mi"), + (namespaces["mathml"], "mo"), + (namespaces["mathml"], "mn"), + (namespaces["mathml"], "ms"), + (namespaces["mathml"], "mtext"), + (namespaces["mathml"], "annotation-xml"), + (namespaces["svg"], "foreignObject"), + (namespaces["svg"], "desc"), + (namespaces["svg"], "title"), +]) + +formattingElements = frozenset([ + (namespaces["html"], "a"), + (namespaces["html"], "b"), + (namespaces["html"], "big"), + (namespaces["html"], "code"), + (namespaces["html"], "em"), + (namespaces["html"], "font"), + (namespaces["html"], "i"), + (namespaces["html"], "nobr"), + (namespaces["html"], "s"), + (namespaces["html"], "small"), + (namespaces["html"], "strike"), + (namespaces["html"], "strong"), + (namespaces["html"], "tt"), + (namespaces["html"], "u") +]) + +specialElements = frozenset([ + (namespaces["html"], "address"), + (namespaces["html"], "applet"), + (namespaces["html"], "area"), + (namespaces["html"], "article"), + (namespaces["html"], "aside"), + (namespaces["html"], "base"), + (namespaces["html"], "basefont"), + (namespaces["html"], "bgsound"), + (namespaces["html"], "blockquote"), + (namespaces["html"], "body"), + (namespaces["html"], "br"), + (namespaces["html"], "button"), + (namespaces["html"], "caption"), + (namespaces["html"], "center"), + (namespaces["html"], "col"), + (namespaces["html"], "colgroup"), + (namespaces["html"], "command"), + (namespaces["html"], "dd"), + (namespaces["html"], "details"), + (namespaces["html"], "dir"), + (namespaces["html"], "div"), + (namespaces["html"], "dl"), + (namespaces["html"], "dt"), + (namespaces["html"], "embed"), + (namespaces["html"], "fieldset"), + (namespaces["html"], "figure"), + (namespaces["html"], "footer"), + (namespaces["html"], "form"), + (namespaces["html"], "frame"), + (namespaces["html"], "frameset"), + (namespaces["html"], "h1"), + (namespaces["html"], "h2"), + (namespaces["html"], "h3"), + (namespaces["html"], "h4"), + (namespaces["html"], "h5"), + (namespaces["html"], "h6"), + (namespaces["html"], "head"), + (namespaces["html"], "header"), + (namespaces["html"], "hr"), + (namespaces["html"], "html"), + (namespaces["html"], "iframe"), + # Note that image is commented out in the spec as "this isn't an + # element that can end up on the stack, so it doesn't matter," + (namespaces["html"], "image"), + (namespaces["html"], "img"), + (namespaces["html"], "input"), + (namespaces["html"], "isindex"), + (namespaces["html"], "li"), + (namespaces["html"], "link"), + (namespaces["html"], "listing"), + (namespaces["html"], "marquee"), + (namespaces["html"], "menu"), + (namespaces["html"], "meta"), + (namespaces["html"], "nav"), + (namespaces["html"], "noembed"), + (namespaces["html"], "noframes"), + (namespaces["html"], "noscript"), + (namespaces["html"], "object"), + (namespaces["html"], "ol"), + (namespaces["html"], "p"), + (namespaces["html"], "param"), + (namespaces["html"], "plaintext"), + (namespaces["html"], "pre"), + (namespaces["html"], "script"), + (namespaces["html"], "section"), + (namespaces["html"], "select"), + (namespaces["html"], "style"), + (namespaces["html"], "table"), + (namespaces["html"], "tbody"), + (namespaces["html"], "td"), + (namespaces["html"], "textarea"), + (namespaces["html"], "tfoot"), + (namespaces["html"], "th"), + (namespaces["html"], "thead"), + (namespaces["html"], "title"), + (namespaces["html"], "tr"), + (namespaces["html"], "ul"), + (namespaces["html"], "wbr"), + (namespaces["html"], "xmp"), + (namespaces["svg"], "foreignObject") +]) + +htmlIntegrationPointElements = frozenset([ + (namespaces["mathml"], "annotaion-xml"), + (namespaces["svg"], "foreignObject"), + (namespaces["svg"], "desc"), + (namespaces["svg"], "title") +]) + +mathmlTextIntegrationPointElements = frozenset([ + (namespaces["mathml"], "mi"), + (namespaces["mathml"], "mo"), + (namespaces["mathml"], "mn"), + (namespaces["mathml"], "ms"), + (namespaces["mathml"], "mtext") +]) + +adjustSVGAttributes = { + "attributename": "attributeName", + "attributetype": "attributeType", + "basefrequency": "baseFrequency", + "baseprofile": "baseProfile", + "calcmode": "calcMode", + "clippathunits": "clipPathUnits", + "contentscripttype": "contentScriptType", + "contentstyletype": "contentStyleType", + "diffuseconstant": "diffuseConstant", + "edgemode": "edgeMode", + "externalresourcesrequired": "externalResourcesRequired", + "filterres": "filterRes", + "filterunits": "filterUnits", + "glyphref": "glyphRef", + "gradienttransform": "gradientTransform", + "gradientunits": "gradientUnits", + "kernelmatrix": "kernelMatrix", + "kernelunitlength": "kernelUnitLength", + "keypoints": "keyPoints", + "keysplines": "keySplines", + "keytimes": "keyTimes", + "lengthadjust": "lengthAdjust", + "limitingconeangle": "limitingConeAngle", + "markerheight": "markerHeight", + "markerunits": "markerUnits", + "markerwidth": "markerWidth", + "maskcontentunits": "maskContentUnits", + "maskunits": "maskUnits", + "numoctaves": "numOctaves", + "pathlength": "pathLength", + "patterncontentunits": "patternContentUnits", + "patterntransform": "patternTransform", + "patternunits": "patternUnits", + "pointsatx": "pointsAtX", + "pointsaty": "pointsAtY", + "pointsatz": "pointsAtZ", + "preservealpha": "preserveAlpha", + "preserveaspectratio": "preserveAspectRatio", + "primitiveunits": "primitiveUnits", + "refx": "refX", + "refy": "refY", + "repeatcount": "repeatCount", + "repeatdur": "repeatDur", + "requiredextensions": "requiredExtensions", + "requiredfeatures": "requiredFeatures", + "specularconstant": "specularConstant", + "specularexponent": "specularExponent", + "spreadmethod": "spreadMethod", + "startoffset": "startOffset", + "stddeviation": "stdDeviation", + "stitchtiles": "stitchTiles", + "surfacescale": "surfaceScale", + "systemlanguage": "systemLanguage", + "tablevalues": "tableValues", + "targetx": "targetX", + "targety": "targetY", + "textlength": "textLength", + "viewbox": "viewBox", + "viewtarget": "viewTarget", + "xchannelselector": "xChannelSelector", + "ychannelselector": "yChannelSelector", + "zoomandpan": "zoomAndPan" +} + +adjustMathMLAttributes = {"definitionurl": "definitionURL"} + +adjustForeignAttributes = { + "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]), + "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]), + "xlink:href": ("xlink", "href", namespaces["xlink"]), + "xlink:role": ("xlink", "role", namespaces["xlink"]), + "xlink:show": ("xlink", "show", namespaces["xlink"]), + "xlink:title": ("xlink", "title", namespaces["xlink"]), + "xlink:type": ("xlink", "type", namespaces["xlink"]), + "xml:base": ("xml", "base", namespaces["xml"]), + "xml:lang": ("xml", "lang", namespaces["xml"]), + "xml:space": ("xml", "space", namespaces["xml"]), + "xmlns": (None, "xmlns", namespaces["xmlns"]), + "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"]) +} + +unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in + adjustForeignAttributes.items()]) + +spaceCharacters = frozenset([ + "\t", + "\n", + "\u000C", + " ", + "\r" +]) + +tableInsertModeElements = frozenset([ + "table", + "tbody", + "tfoot", + "thead", + "tr" +]) + +asciiLowercase = frozenset(string.ascii_lowercase) +asciiUppercase = frozenset(string.ascii_uppercase) +asciiLetters = frozenset(string.ascii_letters) +digits = frozenset(string.digits) +hexDigits = frozenset(string.hexdigits) + +asciiUpper2Lower = dict([(ord(c), ord(c.lower())) + for c in string.ascii_uppercase]) + +# Heading elements need to be ordered +headingElements = ( + "h1", + "h2", + "h3", + "h4", + "h5", + "h6" +) + +voidElements = frozenset([ + "base", + "command", + "event-source", + "link", + "meta", + "hr", + "br", + "img", + "embed", + "param", + "area", + "col", + "input", + "source", + "track" +]) + +cdataElements = frozenset(['title', 'textarea']) + +rcdataElements = frozenset([ + 'style', + 'script', + 'xmp', + 'iframe', + 'noembed', + 'noframes', + 'noscript' +]) + +booleanAttributes = { + "": frozenset(["irrelevant"]), + "style": frozenset(["scoped"]), + "img": frozenset(["ismap"]), + "audio": frozenset(["autoplay", "controls"]), + "video": frozenset(["autoplay", "controls"]), + "script": frozenset(["defer", "async"]), + "details": frozenset(["open"]), + "datagrid": frozenset(["multiple", "disabled"]), + "command": frozenset(["hidden", "disabled", "checked", "default"]), + "hr": frozenset(["noshade"]), + "menu": frozenset(["autosubmit"]), + "fieldset": frozenset(["disabled", "readonly"]), + "option": frozenset(["disabled", "readonly", "selected"]), + "optgroup": frozenset(["disabled", "readonly"]), + "button": frozenset(["disabled", "autofocus"]), + "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]), + "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]), + "output": frozenset(["disabled", "readonly"]), +} + +# entitiesWindows1252 has to be _ordered_ and needs to have an index. It +# therefore can't be a frozenset. +entitiesWindows1252 = ( + 8364, # 0x80 0x20AC EURO SIGN + 65533, # 0x81 UNDEFINED + 8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK + 402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK + 8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK + 8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS + 8224, # 0x86 0x2020 DAGGER + 8225, # 0x87 0x2021 DOUBLE DAGGER + 710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT + 8240, # 0x89 0x2030 PER MILLE SIGN + 352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON + 8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK + 338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE + 65533, # 0x8D UNDEFINED + 381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON + 65533, # 0x8F UNDEFINED + 65533, # 0x90 UNDEFINED + 8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK + 8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK + 8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK + 8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK + 8226, # 0x95 0x2022 BULLET + 8211, # 0x96 0x2013 EN DASH + 8212, # 0x97 0x2014 EM DASH + 732, # 0x98 0x02DC SMALL TILDE + 8482, # 0x99 0x2122 TRADE MARK SIGN + 353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON + 8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK + 339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE + 65533, # 0x9D UNDEFINED + 382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON + 376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS +) + +xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;']) + +entities = { + "AElig": "\xc6", + "AElig;": "\xc6", + "AMP": "&", + "AMP;": "&", + "Aacute": "\xc1", + "Aacute;": "\xc1", + "Abreve;": "\u0102", + "Acirc": "\xc2", + "Acirc;": "\xc2", + "Acy;": "\u0410", + "Afr;": "\U0001d504", + "Agrave": "\xc0", + "Agrave;": "\xc0", + "Alpha;": "\u0391", + "Amacr;": "\u0100", + "And;": "\u2a53", + "Aogon;": "\u0104", + "Aopf;": "\U0001d538", + "ApplyFunction;": "\u2061", + "Aring": "\xc5", + "Aring;": "\xc5", + "Ascr;": "\U0001d49c", + "Assign;": "\u2254", + "Atilde": "\xc3", + "Atilde;": "\xc3", + "Auml": "\xc4", + "Auml;": "\xc4", + "Backslash;": "\u2216", + "Barv;": "\u2ae7", + "Barwed;": "\u2306", + "Bcy;": "\u0411", + "Because;": "\u2235", + "Bernoullis;": "\u212c", + "Beta;": "\u0392", + "Bfr;": "\U0001d505", + "Bopf;": "\U0001d539", + "Breve;": "\u02d8", + "Bscr;": "\u212c", + "Bumpeq;": "\u224e", + "CHcy;": "\u0427", + "COPY": "\xa9", + "COPY;": "\xa9", + "Cacute;": "\u0106", + "Cap;": "\u22d2", + "CapitalDifferentialD;": "\u2145", + "Cayleys;": "\u212d", + "Ccaron;": "\u010c", + "Ccedil": "\xc7", + "Ccedil;": "\xc7", + "Ccirc;": "\u0108", + "Cconint;": "\u2230", + "Cdot;": "\u010a", + "Cedilla;": "\xb8", + "CenterDot;": "\xb7", + "Cfr;": "\u212d", + "Chi;": "\u03a7", + "CircleDot;": "\u2299", + "CircleMinus;": "\u2296", + "CirclePlus;": "\u2295", + "CircleTimes;": "\u2297", + "ClockwiseContourIntegral;": "\u2232", + "CloseCurlyDoubleQuote;": "\u201d", + "CloseCurlyQuote;": "\u2019", + "Colon;": "\u2237", + "Colone;": "\u2a74", + "Congruent;": "\u2261", + "Conint;": "\u222f", + "ContourIntegral;": "\u222e", + "Copf;": "\u2102", + "Coproduct;": "\u2210", + "CounterClockwiseContourIntegral;": "\u2233", + "Cross;": "\u2a2f", + "Cscr;": "\U0001d49e", + "Cup;": "\u22d3", + "CupCap;": "\u224d", + "DD;": "\u2145", + "DDotrahd;": "\u2911", + "DJcy;": "\u0402", + "DScy;": "\u0405", + "DZcy;": "\u040f", + "Dagger;": "\u2021", + "Darr;": "\u21a1", + "Dashv;": "\u2ae4", + "Dcaron;": "\u010e", + "Dcy;": "\u0414", + "Del;": "\u2207", + "Delta;": "\u0394", + "Dfr;": "\U0001d507", + "DiacriticalAcute;": "\xb4", + "DiacriticalDot;": "\u02d9", + "DiacriticalDoubleAcute;": "\u02dd", + "DiacriticalGrave;": "`", + "DiacriticalTilde;": "\u02dc", + "Diamond;": "\u22c4", + "DifferentialD;": "\u2146", + "Dopf;": "\U0001d53b", + "Dot;": "\xa8", + "DotDot;": "\u20dc", + "DotEqual;": "\u2250", + "DoubleContourIntegral;": "\u222f", + "DoubleDot;": "\xa8", + "DoubleDownArrow;": "\u21d3", + "DoubleLeftArrow;": "\u21d0", + "DoubleLeftRightArrow;": "\u21d4", + "DoubleLeftTee;": "\u2ae4", + "DoubleLongLeftArrow;": "\u27f8", + "DoubleLongLeftRightArrow;": "\u27fa", + "DoubleLongRightArrow;": "\u27f9", + "DoubleRightArrow;": "\u21d2", + "DoubleRightTee;": "\u22a8", + "DoubleUpArrow;": "\u21d1", + "DoubleUpDownArrow;": "\u21d5", + "DoubleVerticalBar;": "\u2225", + "DownArrow;": "\u2193", + "DownArrowBar;": "\u2913", + "DownArrowUpArrow;": "\u21f5", + "DownBreve;": "\u0311", + "DownLeftRightVector;": "\u2950", + "DownLeftTeeVector;": "\u295e", + "DownLeftVector;": "\u21bd", + "DownLeftVectorBar;": "\u2956", + "DownRightTeeVector;": "\u295f", + "DownRightVector;": "\u21c1", + "DownRightVectorBar;": "\u2957", + "DownTee;": "\u22a4", + "DownTeeArrow;": "\u21a7", + "Downarrow;": "\u21d3", + "Dscr;": "\U0001d49f", + "Dstrok;": "\u0110", + "ENG;": "\u014a", + "ETH": "\xd0", + "ETH;": "\xd0", + "Eacute": "\xc9", + "Eacute;": "\xc9", + "Ecaron;": "\u011a", + "Ecirc": "\xca", + "Ecirc;": "\xca", + "Ecy;": "\u042d", + "Edot;": "\u0116", + "Efr;": "\U0001d508", + "Egrave": "\xc8", + "Egrave;": "\xc8", + "Element;": "\u2208", + "Emacr;": "\u0112", + "EmptySmallSquare;": "\u25fb", + "EmptyVerySmallSquare;": "\u25ab", + "Eogon;": "\u0118", + "Eopf;": "\U0001d53c", + "Epsilon;": "\u0395", + "Equal;": "\u2a75", + "EqualTilde;": "\u2242", + "Equilibrium;": "\u21cc", + "Escr;": "\u2130", + "Esim;": "\u2a73", + "Eta;": "\u0397", + "Euml": "\xcb", + "Euml;": "\xcb", + "Exists;": "\u2203", + "ExponentialE;": "\u2147", + "Fcy;": "\u0424", + "Ffr;": "\U0001d509", + "FilledSmallSquare;": "\u25fc", + "FilledVerySmallSquare;": "\u25aa", + "Fopf;": "\U0001d53d", + "ForAll;": "\u2200", + "Fouriertrf;": "\u2131", + "Fscr;": "\u2131", + "GJcy;": "\u0403", + "GT": ">", + "GT;": ">", + "Gamma;": "\u0393", + "Gammad;": "\u03dc", + "Gbreve;": "\u011e", + "Gcedil;": "\u0122", + "Gcirc;": "\u011c", + "Gcy;": "\u0413", + "Gdot;": "\u0120", + "Gfr;": "\U0001d50a", + "Gg;": "\u22d9", + "Gopf;": "\U0001d53e", + "GreaterEqual;": "\u2265", + "GreaterEqualLess;": "\u22db", + "GreaterFullEqual;": "\u2267", + "GreaterGreater;": "\u2aa2", + "GreaterLess;": "\u2277", + "GreaterSlantEqual;": "\u2a7e", + "GreaterTilde;": "\u2273", + "Gscr;": "\U0001d4a2", + "Gt;": "\u226b", + "HARDcy;": "\u042a", + "Hacek;": "\u02c7", + "Hat;": "^", + "Hcirc;": "\u0124", + "Hfr;": "\u210c", + "HilbertSpace;": "\u210b", + "Hopf;": "\u210d", + "HorizontalLine;": "\u2500", + "Hscr;": "\u210b", + "Hstrok;": "\u0126", + "HumpDownHump;": "\u224e", + "HumpEqual;": "\u224f", + "IEcy;": "\u0415", + "IJlig;": "\u0132", + "IOcy;": "\u0401", + "Iacute": "\xcd", + "Iacute;": "\xcd", + "Icirc": "\xce", + "Icirc;": "\xce", + "Icy;": "\u0418", + "Idot;": "\u0130", + "Ifr;": "\u2111", + "Igrave": "\xcc", + "Igrave;": "\xcc", + "Im;": "\u2111", + "Imacr;": "\u012a", + "ImaginaryI;": "\u2148", + "Implies;": "\u21d2", + "Int;": "\u222c", + "Integral;": "\u222b", + "Intersection;": "\u22c2", + "InvisibleComma;": "\u2063", + "InvisibleTimes;": "\u2062", + "Iogon;": "\u012e", + "Iopf;": "\U0001d540", + "Iota;": "\u0399", + "Iscr;": "\u2110", + "Itilde;": "\u0128", + "Iukcy;": "\u0406", + "Iuml": "\xcf", + "Iuml;": "\xcf", + "Jcirc;": "\u0134", + "Jcy;": "\u0419", + "Jfr;": "\U0001d50d", + "Jopf;": "\U0001d541", + "Jscr;": "\U0001d4a5", + "Jsercy;": "\u0408", + "Jukcy;": "\u0404", + "KHcy;": "\u0425", + "KJcy;": "\u040c", + "Kappa;": "\u039a", + "Kcedil;": "\u0136", + "Kcy;": "\u041a", + "Kfr;": "\U0001d50e", + "Kopf;": "\U0001d542", + "Kscr;": "\U0001d4a6", + "LJcy;": "\u0409", + "LT": "<", + "LT;": "<", + "Lacute;": "\u0139", + "Lambda;": "\u039b", + "Lang;": "\u27ea", + "Laplacetrf;": "\u2112", + "Larr;": "\u219e", + "Lcaron;": "\u013d", + "Lcedil;": "\u013b", + "Lcy;": "\u041b", + "LeftAngleBracket;": "\u27e8", + "LeftArrow;": "\u2190", + "LeftArrowBar;": "\u21e4", + "LeftArrowRightArrow;": "\u21c6", + "LeftCeiling;": "\u2308", + "LeftDoubleBracket;": "\u27e6", + "LeftDownTeeVector;": "\u2961", + "LeftDownVector;": "\u21c3", + "LeftDownVectorBar;": "\u2959", + "LeftFloor;": "\u230a", + "LeftRightArrow;": "\u2194", + "LeftRightVector;": "\u294e", + "LeftTee;": "\u22a3", + "LeftTeeArrow;": "\u21a4", + "LeftTeeVector;": "\u295a", + "LeftTriangle;": "\u22b2", + "LeftTriangleBar;": "\u29cf", + "LeftTriangleEqual;": "\u22b4", + "LeftUpDownVector;": "\u2951", + "LeftUpTeeVector;": "\u2960", + "LeftUpVector;": "\u21bf", + "LeftUpVectorBar;": "\u2958", + "LeftVector;": "\u21bc", + "LeftVectorBar;": "\u2952", + "Leftarrow;": "\u21d0", + "Leftrightarrow;": "\u21d4", + "LessEqualGreater;": "\u22da", + "LessFullEqual;": "\u2266", + "LessGreater;": "\u2276", + "LessLess;": "\u2aa1", + "LessSlantEqual;": "\u2a7d", + "LessTilde;": "\u2272", + "Lfr;": "\U0001d50f", + "Ll;": "\u22d8", + "Lleftarrow;": "\u21da", + "Lmidot;": "\u013f", + "LongLeftArrow;": "\u27f5", + "LongLeftRightArrow;": "\u27f7", + "LongRightArrow;": "\u27f6", + "Longleftarrow;": "\u27f8", + "Longleftrightarrow;": "\u27fa", + "Longrightarrow;": "\u27f9", + "Lopf;": "\U0001d543", + "LowerLeftArrow;": "\u2199", + "LowerRightArrow;": "\u2198", + "Lscr;": "\u2112", + "Lsh;": "\u21b0", + "Lstrok;": "\u0141", + "Lt;": "\u226a", + "Map;": "\u2905", + "Mcy;": "\u041c", + "MediumSpace;": "\u205f", + "Mellintrf;": "\u2133", + "Mfr;": "\U0001d510", + "MinusPlus;": "\u2213", + "Mopf;": "\U0001d544", + "Mscr;": "\u2133", + "Mu;": "\u039c", + "NJcy;": "\u040a", + "Nacute;": "\u0143", + "Ncaron;": "\u0147", + "Ncedil;": "\u0145", + "Ncy;": "\u041d", + "NegativeMediumSpace;": "\u200b", + "NegativeThickSpace;": "\u200b", + "NegativeThinSpace;": "\u200b", + "NegativeVeryThinSpace;": "\u200b", + "NestedGreaterGreater;": "\u226b", + "NestedLessLess;": "\u226a", + "NewLine;": "\n", + "Nfr;": "\U0001d511", + "NoBreak;": "\u2060", + "NonBreakingSpace;": "\xa0", + "Nopf;": "\u2115", + "Not;": "\u2aec", + "NotCongruent;": "\u2262", + "NotCupCap;": "\u226d", + "NotDoubleVerticalBar;": "\u2226", + "NotElement;": "\u2209", + "NotEqual;": "\u2260", + "NotEqualTilde;": "\u2242\u0338", + "NotExists;": "\u2204", + "NotGreater;": "\u226f", + "NotGreaterEqual;": "\u2271", + "NotGreaterFullEqual;": "\u2267\u0338", + "NotGreaterGreater;": "\u226b\u0338", + "NotGreaterLess;": "\u2279", + "NotGreaterSlantEqual;": "\u2a7e\u0338", + "NotGreaterTilde;": "\u2275", + "NotHumpDownHump;": "\u224e\u0338", + "NotHumpEqual;": "\u224f\u0338", + "NotLeftTriangle;": "\u22ea", + "NotLeftTriangleBar;": "\u29cf\u0338", + "NotLeftTriangleEqual;": "\u22ec", + "NotLess;": "\u226e", + "NotLessEqual;": "\u2270", + "NotLessGreater;": "\u2278", + "NotLessLess;": "\u226a\u0338", + "NotLessSlantEqual;": "\u2a7d\u0338", + "NotLessTilde;": "\u2274", + "NotNestedGreaterGreater;": "\u2aa2\u0338", + "NotNestedLessLess;": "\u2aa1\u0338", + "NotPrecedes;": "\u2280", + "NotPrecedesEqual;": "\u2aaf\u0338", + "NotPrecedesSlantEqual;": "\u22e0", + "NotReverseElement;": "\u220c", + "NotRightTriangle;": "\u22eb", + "NotRightTriangleBar;": "\u29d0\u0338", + "NotRightTriangleEqual;": "\u22ed", + "NotSquareSubset;": "\u228f\u0338", + "NotSquareSubsetEqual;": "\u22e2", + "NotSquareSuperset;": "\u2290\u0338", + "NotSquareSupersetEqual;": "\u22e3", + "NotSubset;": "\u2282\u20d2", + "NotSubsetEqual;": "\u2288", + "NotSucceeds;": "\u2281", + "NotSucceedsEqual;": "\u2ab0\u0338", + "NotSucceedsSlantEqual;": "\u22e1", + "NotSucceedsTilde;": "\u227f\u0338", + "NotSuperset;": "\u2283\u20d2", + "NotSupersetEqual;": "\u2289", + "NotTilde;": "\u2241", + "NotTildeEqual;": "\u2244", + "NotTildeFullEqual;": "\u2247", + "NotTildeTilde;": "\u2249", + "NotVerticalBar;": "\u2224", + "Nscr;": "\U0001d4a9", + "Ntilde": "\xd1", + "Ntilde;": "\xd1", + "Nu;": "\u039d", + "OElig;": "\u0152", + "Oacute": "\xd3", + "Oacute;": "\xd3", + "Ocirc": "\xd4", + "Ocirc;": "\xd4", + "Ocy;": "\u041e", + "Odblac;": "\u0150", + "Ofr;": "\U0001d512", + "Ograve": "\xd2", + "Ograve;": "\xd2", + "Omacr;": "\u014c", + "Omega;": "\u03a9", + "Omicron;": "\u039f", + "Oopf;": "\U0001d546", + "OpenCurlyDoubleQuote;": "\u201c", + "OpenCurlyQuote;": "\u2018", + "Or;": "\u2a54", + "Oscr;": "\U0001d4aa", + "Oslash": "\xd8", + "Oslash;": "\xd8", + "Otilde": "\xd5", + "Otilde;": "\xd5", + "Otimes;": "\u2a37", + "Ouml": "\xd6", + "Ouml;": "\xd6", + "OverBar;": "\u203e", + "OverBrace;": "\u23de", + "OverBracket;": "\u23b4", + "OverParenthesis;": "\u23dc", + "PartialD;": "\u2202", + "Pcy;": "\u041f", + "Pfr;": "\U0001d513", + "Phi;": "\u03a6", + "Pi;": "\u03a0", + "PlusMinus;": "\xb1", + "Poincareplane;": "\u210c", + "Popf;": "\u2119", + "Pr;": "\u2abb", + "Precedes;": "\u227a", + "PrecedesEqual;": "\u2aaf", + "PrecedesSlantEqual;": "\u227c", + "PrecedesTilde;": "\u227e", + "Prime;": "\u2033", + "Product;": "\u220f", + "Proportion;": "\u2237", + "Proportional;": "\u221d", + "Pscr;": "\U0001d4ab", + "Psi;": "\u03a8", + "QUOT": "\"", + "QUOT;": "\"", + "Qfr;": "\U0001d514", + "Qopf;": "\u211a", + "Qscr;": "\U0001d4ac", + "RBarr;": "\u2910", + "REG": "\xae", + "REG;": "\xae", + "Racute;": "\u0154", + "Rang;": "\u27eb", + "Rarr;": "\u21a0", + "Rarrtl;": "\u2916", + "Rcaron;": "\u0158", + "Rcedil;": "\u0156", + "Rcy;": "\u0420", + "Re;": "\u211c", + "ReverseElement;": "\u220b", + "ReverseEquilibrium;": "\u21cb", + "ReverseUpEquilibrium;": "\u296f", + "Rfr;": "\u211c", + "Rho;": "\u03a1", + "RightAngleBracket;": "\u27e9", + "RightArrow;": "\u2192", + "RightArrowBar;": "\u21e5", + "RightArrowLeftArrow;": "\u21c4", + "RightCeiling;": "\u2309", + "RightDoubleBracket;": "\u27e7", + "RightDownTeeVector;": "\u295d", + "RightDownVector;": "\u21c2", + "RightDownVectorBar;": "\u2955", + "RightFloor;": "\u230b", + "RightTee;": "\u22a2", + "RightTeeArrow;": "\u21a6", + "RightTeeVector;": "\u295b", + "RightTriangle;": "\u22b3", + "RightTriangleBar;": "\u29d0", + "RightTriangleEqual;": "\u22b5", + "RightUpDownVector;": "\u294f", + "RightUpTeeVector;": "\u295c", + "RightUpVector;": "\u21be", + "RightUpVectorBar;": "\u2954", + "RightVector;": "\u21c0", + "RightVectorBar;": "\u2953", + "Rightarrow;": "\u21d2", + "Ropf;": "\u211d", + "RoundImplies;": "\u2970", + "Rrightarrow;": "\u21db", + "Rscr;": "\u211b", + "Rsh;": "\u21b1", + "RuleDelayed;": "\u29f4", + "SHCHcy;": "\u0429", + "SHcy;": "\u0428", + "SOFTcy;": "\u042c", + "Sacute;": "\u015a", + "Sc;": "\u2abc", + "Scaron;": "\u0160", + "Scedil;": "\u015e", + "Scirc;": "\u015c", + "Scy;": "\u0421", + "Sfr;": "\U0001d516", + "ShortDownArrow;": "\u2193", + "ShortLeftArrow;": "\u2190", + "ShortRightArrow;": "\u2192", + "ShortUpArrow;": "\u2191", + "Sigma;": "\u03a3", + "SmallCircle;": "\u2218", + "Sopf;": "\U0001d54a", + "Sqrt;": "\u221a", + "Square;": "\u25a1", + "SquareIntersection;": "\u2293", + "SquareSubset;": "\u228f", + "SquareSubsetEqual;": "\u2291", + "SquareSuperset;": "\u2290", + "SquareSupersetEqual;": "\u2292", + "SquareUnion;": "\u2294", + "Sscr;": "\U0001d4ae", + "Star;": "\u22c6", + "Sub;": "\u22d0", + "Subset;": "\u22d0", + "SubsetEqual;": "\u2286", + "Succeeds;": "\u227b", + "SucceedsEqual;": "\u2ab0", + "SucceedsSlantEqual;": "\u227d", + "SucceedsTilde;": "\u227f", + "SuchThat;": "\u220b", + "Sum;": "\u2211", + "Sup;": "\u22d1", + "Superset;": "\u2283", + "SupersetEqual;": "\u2287", + "Supset;": "\u22d1", + "THORN": "\xde", + "THORN;": "\xde", + "TRADE;": "\u2122", + "TSHcy;": "\u040b", + "TScy;": "\u0426", + "Tab;": "\t", + "Tau;": "\u03a4", + "Tcaron;": "\u0164", + "Tcedil;": "\u0162", + "Tcy;": "\u0422", + "Tfr;": "\U0001d517", + "Therefore;": "\u2234", + "Theta;": "\u0398", + "ThickSpace;": "\u205f\u200a", + "ThinSpace;": "\u2009", + "Tilde;": "\u223c", + "TildeEqual;": "\u2243", + "TildeFullEqual;": "\u2245", + "TildeTilde;": "\u2248", + "Topf;": "\U0001d54b", + "TripleDot;": "\u20db", + "Tscr;": "\U0001d4af", + "Tstrok;": "\u0166", + "Uacute": "\xda", + "Uacute;": "\xda", + "Uarr;": "\u219f", + "Uarrocir;": "\u2949", + "Ubrcy;": "\u040e", + "Ubreve;": "\u016c", + "Ucirc": "\xdb", + "Ucirc;": "\xdb", + "Ucy;": "\u0423", + "Udblac;": "\u0170", + "Ufr;": "\U0001d518", + "Ugrave": "\xd9", + "Ugrave;": "\xd9", + "Umacr;": "\u016a", + "UnderBar;": "_", + "UnderBrace;": "\u23df", + "UnderBracket;": "\u23b5", + "UnderParenthesis;": "\u23dd", + "Union;": "\u22c3", + "UnionPlus;": "\u228e", + "Uogon;": "\u0172", + "Uopf;": "\U0001d54c", + "UpArrow;": "\u2191", + "UpArrowBar;": "\u2912", + "UpArrowDownArrow;": "\u21c5", + "UpDownArrow;": "\u2195", + "UpEquilibrium;": "\u296e", + "UpTee;": "\u22a5", + "UpTeeArrow;": "\u21a5", + "Uparrow;": "\u21d1", + "Updownarrow;": "\u21d5", + "UpperLeftArrow;": "\u2196", + "UpperRightArrow;": "\u2197", + "Upsi;": "\u03d2", + "Upsilon;": "\u03a5", + "Uring;": "\u016e", + "Uscr;": "\U0001d4b0", + "Utilde;": "\u0168", + "Uuml": "\xdc", + "Uuml;": "\xdc", + "VDash;": "\u22ab", + "Vbar;": "\u2aeb", + "Vcy;": "\u0412", + "Vdash;": "\u22a9", + "Vdashl;": "\u2ae6", + "Vee;": "\u22c1", + "Verbar;": "\u2016", + "Vert;": "\u2016", + "VerticalBar;": "\u2223", + "VerticalLine;": "|", + "VerticalSeparator;": "\u2758", + "VerticalTilde;": "\u2240", + "VeryThinSpace;": "\u200a", + "Vfr;": "\U0001d519", + "Vopf;": "\U0001d54d", + "Vscr;": "\U0001d4b1", + "Vvdash;": "\u22aa", + "Wcirc;": "\u0174", + "Wedge;": "\u22c0", + "Wfr;": "\U0001d51a", + "Wopf;": "\U0001d54e", + "Wscr;": "\U0001d4b2", + "Xfr;": "\U0001d51b", + "Xi;": "\u039e", + "Xopf;": "\U0001d54f", + "Xscr;": "\U0001d4b3", + "YAcy;": "\u042f", + "YIcy;": "\u0407", + "YUcy;": "\u042e", + "Yacute": "\xdd", + "Yacute;": "\xdd", + "Ycirc;": "\u0176", + "Ycy;": "\u042b", + "Yfr;": "\U0001d51c", + "Yopf;": "\U0001d550", + "Yscr;": "\U0001d4b4", + "Yuml;": "\u0178", + "ZHcy;": "\u0416", + "Zacute;": "\u0179", + "Zcaron;": "\u017d", + "Zcy;": "\u0417", + "Zdot;": "\u017b", + "ZeroWidthSpace;": "\u200b", + "Zeta;": "\u0396", + "Zfr;": "\u2128", + "Zopf;": "\u2124", + "Zscr;": "\U0001d4b5", + "aacute": "\xe1", + "aacute;": "\xe1", + "abreve;": "\u0103", + "ac;": "\u223e", + "acE;": "\u223e\u0333", + "acd;": "\u223f", + "acirc": "\xe2", + "acirc;": "\xe2", + "acute": "\xb4", + "acute;": "\xb4", + "acy;": "\u0430", + "aelig": "\xe6", + "aelig;": "\xe6", + "af;": "\u2061", + "afr;": "\U0001d51e", + "agrave": "\xe0", + "agrave;": "\xe0", + "alefsym;": "\u2135", + "aleph;": "\u2135", + "alpha;": "\u03b1", + "amacr;": "\u0101", + "amalg;": "\u2a3f", + "amp": "&", + "amp;": "&", + "and;": "\u2227", + "andand;": "\u2a55", + "andd;": "\u2a5c", + "andslope;": "\u2a58", + "andv;": "\u2a5a", + "ang;": "\u2220", + "ange;": "\u29a4", + "angle;": "\u2220", + "angmsd;": "\u2221", + "angmsdaa;": "\u29a8", + "angmsdab;": "\u29a9", + "angmsdac;": "\u29aa", + "angmsdad;": "\u29ab", + "angmsdae;": "\u29ac", + "angmsdaf;": "\u29ad", + "angmsdag;": "\u29ae", + "angmsdah;": "\u29af", + "angrt;": "\u221f", + "angrtvb;": "\u22be", + "angrtvbd;": "\u299d", + "angsph;": "\u2222", + "angst;": "\xc5", + "angzarr;": "\u237c", + "aogon;": "\u0105", + "aopf;": "\U0001d552", + "ap;": "\u2248", + "apE;": "\u2a70", + "apacir;": "\u2a6f", + "ape;": "\u224a", + "apid;": "\u224b", + "apos;": "'", + "approx;": "\u2248", + "approxeq;": "\u224a", + "aring": "\xe5", + "aring;": "\xe5", + "ascr;": "\U0001d4b6", + "ast;": "*", + "asymp;": "\u2248", + "asympeq;": "\u224d", + "atilde": "\xe3", + "atilde;": "\xe3", + "auml": "\xe4", + "auml;": "\xe4", + "awconint;": "\u2233", + "awint;": "\u2a11", + "bNot;": "\u2aed", + "backcong;": "\u224c", + "backepsilon;": "\u03f6", + "backprime;": "\u2035", + "backsim;": "\u223d", + "backsimeq;": "\u22cd", + "barvee;": "\u22bd", + "barwed;": "\u2305", + "barwedge;": "\u2305", + "bbrk;": "\u23b5", + "bbrktbrk;": "\u23b6", + "bcong;": "\u224c", + "bcy;": "\u0431", + "bdquo;": "\u201e", + "becaus;": "\u2235", + "because;": "\u2235", + "bemptyv;": "\u29b0", + "bepsi;": "\u03f6", + "bernou;": "\u212c", + "beta;": "\u03b2", + "beth;": "\u2136", + "between;": "\u226c", + "bfr;": "\U0001d51f", + "bigcap;": "\u22c2", + "bigcirc;": "\u25ef", + "bigcup;": "\u22c3", + "bigodot;": "\u2a00", + "bigoplus;": "\u2a01", + "bigotimes;": "\u2a02", + "bigsqcup;": "\u2a06", + "bigstar;": "\u2605", + "bigtriangledown;": "\u25bd", + "bigtriangleup;": "\u25b3", + "biguplus;": "\u2a04", + "bigvee;": "\u22c1", + "bigwedge;": "\u22c0", + "bkarow;": "\u290d", + "blacklozenge;": "\u29eb", + "blacksquare;": "\u25aa", + "blacktriangle;": "\u25b4", + "blacktriangledown;": "\u25be", + "blacktriangleleft;": "\u25c2", + "blacktriangleright;": "\u25b8", + "blank;": "\u2423", + "blk12;": "\u2592", + "blk14;": "\u2591", + "blk34;": "\u2593", + "block;": "\u2588", + "bne;": "=\u20e5", + "bnequiv;": "\u2261\u20e5", + "bnot;": "\u2310", + "bopf;": "\U0001d553", + "bot;": "\u22a5", + "bottom;": "\u22a5", + "bowtie;": "\u22c8", + "boxDL;": "\u2557", + "boxDR;": "\u2554", + "boxDl;": "\u2556", + "boxDr;": "\u2553", + "boxH;": "\u2550", + "boxHD;": "\u2566", + "boxHU;": "\u2569", + "boxHd;": "\u2564", + "boxHu;": "\u2567", + "boxUL;": "\u255d", + "boxUR;": "\u255a", + "boxUl;": "\u255c", + "boxUr;": "\u2559", + "boxV;": "\u2551", + "boxVH;": "\u256c", + "boxVL;": "\u2563", + "boxVR;": "\u2560", + "boxVh;": "\u256b", + "boxVl;": "\u2562", + "boxVr;": "\u255f", + "boxbox;": "\u29c9", + "boxdL;": "\u2555", + "boxdR;": "\u2552", + "boxdl;": "\u2510", + "boxdr;": "\u250c", + "boxh;": "\u2500", + "boxhD;": "\u2565", + "boxhU;": "\u2568", + "boxhd;": "\u252c", + "boxhu;": "\u2534", + "boxminus;": "\u229f", + "boxplus;": "\u229e", + "boxtimes;": "\u22a0", + "boxuL;": "\u255b", + "boxuR;": "\u2558", + "boxul;": "\u2518", + "boxur;": "\u2514", + "boxv;": "\u2502", + "boxvH;": "\u256a", + "boxvL;": "\u2561", + "boxvR;": "\u255e", + "boxvh;": "\u253c", + "boxvl;": "\u2524", + "boxvr;": "\u251c", + "bprime;": "\u2035", + "breve;": "\u02d8", + "brvbar": "\xa6", + "brvbar;": "\xa6", + "bscr;": "\U0001d4b7", + "bsemi;": "\u204f", + "bsim;": "\u223d", + "bsime;": "\u22cd", + "bsol;": "\\", + "bsolb;": "\u29c5", + "bsolhsub;": "\u27c8", + "bull;": "\u2022", + "bullet;": "\u2022", + "bump;": "\u224e", + "bumpE;": "\u2aae", + "bumpe;": "\u224f", + "bumpeq;": "\u224f", + "cacute;": "\u0107", + "cap;": "\u2229", + "capand;": "\u2a44", + "capbrcup;": "\u2a49", + "capcap;": "\u2a4b", + "capcup;": "\u2a47", + "capdot;": "\u2a40", + "caps;": "\u2229\ufe00", + "caret;": "\u2041", + "caron;": "\u02c7", + "ccaps;": "\u2a4d", + "ccaron;": "\u010d", + "ccedil": "\xe7", + "ccedil;": "\xe7", + "ccirc;": "\u0109", + "ccups;": "\u2a4c", + "ccupssm;": "\u2a50", + "cdot;": "\u010b", + "cedil": "\xb8", + "cedil;": "\xb8", + "cemptyv;": "\u29b2", + "cent": "\xa2", + "cent;": "\xa2", + "centerdot;": "\xb7", + "cfr;": "\U0001d520", + "chcy;": "\u0447", + "check;": "\u2713", + "checkmark;": "\u2713", + "chi;": "\u03c7", + "cir;": "\u25cb", + "cirE;": "\u29c3", + "circ;": "\u02c6", + "circeq;": "\u2257", + "circlearrowleft;": "\u21ba", + "circlearrowright;": "\u21bb", + "circledR;": "\xae", + "circledS;": "\u24c8", + "circledast;": "\u229b", + "circledcirc;": "\u229a", + "circleddash;": "\u229d", + "cire;": "\u2257", + "cirfnint;": "\u2a10", + "cirmid;": "\u2aef", + "cirscir;": "\u29c2", + "clubs;": "\u2663", + "clubsuit;": "\u2663", + "colon;": ":", + "colone;": "\u2254", + "coloneq;": "\u2254", + "comma;": ",", + "commat;": "@", + "comp;": "\u2201", + "compfn;": "\u2218", + "complement;": "\u2201", + "complexes;": "\u2102", + "cong;": "\u2245", + "congdot;": "\u2a6d", + "conint;": "\u222e", + "copf;": "\U0001d554", + "coprod;": "\u2210", + "copy": "\xa9", + "copy;": "\xa9", + "copysr;": "\u2117", + "crarr;": "\u21b5", + "cross;": "\u2717", + "cscr;": "\U0001d4b8", + "csub;": "\u2acf", + "csube;": "\u2ad1", + "csup;": "\u2ad0", + "csupe;": "\u2ad2", + "ctdot;": "\u22ef", + "cudarrl;": "\u2938", + "cudarrr;": "\u2935", + "cuepr;": "\u22de", + "cuesc;": "\u22df", + "cularr;": "\u21b6", + "cularrp;": "\u293d", + "cup;": "\u222a", + "cupbrcap;": "\u2a48", + "cupcap;": "\u2a46", + "cupcup;": "\u2a4a", + "cupdot;": "\u228d", + "cupor;": "\u2a45", + "cups;": "\u222a\ufe00", + "curarr;": "\u21b7", + "curarrm;": "\u293c", + "curlyeqprec;": "\u22de", + "curlyeqsucc;": "\u22df", + "curlyvee;": "\u22ce", + "curlywedge;": "\u22cf", + "curren": "\xa4", + "curren;": "\xa4", + "curvearrowleft;": "\u21b6", + "curvearrowright;": "\u21b7", + "cuvee;": "\u22ce", + "cuwed;": "\u22cf", + "cwconint;": "\u2232", + "cwint;": "\u2231", + "cylcty;": "\u232d", + "dArr;": "\u21d3", + "dHar;": "\u2965", + "dagger;": "\u2020", + "daleth;": "\u2138", + "darr;": "\u2193", + "dash;": "\u2010", + "dashv;": "\u22a3", + "dbkarow;": "\u290f", + "dblac;": "\u02dd", + "dcaron;": "\u010f", + "dcy;": "\u0434", + "dd;": "\u2146", + "ddagger;": "\u2021", + "ddarr;": "\u21ca", + "ddotseq;": "\u2a77", + "deg": "\xb0", + "deg;": "\xb0", + "delta;": "\u03b4", + "demptyv;": "\u29b1", + "dfisht;": "\u297f", + "dfr;": "\U0001d521", + "dharl;": "\u21c3", + "dharr;": "\u21c2", + "diam;": "\u22c4", + "diamond;": "\u22c4", + "diamondsuit;": "\u2666", + "diams;": "\u2666", + "die;": "\xa8", + "digamma;": "\u03dd", + "disin;": "\u22f2", + "div;": "\xf7", + "divide": "\xf7", + "divide;": "\xf7", + "divideontimes;": "\u22c7", + "divonx;": "\u22c7", + "djcy;": "\u0452", + "dlcorn;": "\u231e", + "dlcrop;": "\u230d", + "dollar;": "$", + "dopf;": "\U0001d555", + "dot;": "\u02d9", + "doteq;": "\u2250", + "doteqdot;": "\u2251", + "dotminus;": "\u2238", + "dotplus;": "\u2214", + "dotsquare;": "\u22a1", + "doublebarwedge;": "\u2306", + "downarrow;": "\u2193", + "downdownarrows;": "\u21ca", + "downharpoonleft;": "\u21c3", + "downharpoonright;": "\u21c2", + "drbkarow;": "\u2910", + "drcorn;": "\u231f", + "drcrop;": "\u230c", + "dscr;": "\U0001d4b9", + "dscy;": "\u0455", + "dsol;": "\u29f6", + "dstrok;": "\u0111", + "dtdot;": "\u22f1", + "dtri;": "\u25bf", + "dtrif;": "\u25be", + "duarr;": "\u21f5", + "duhar;": "\u296f", + "dwangle;": "\u29a6", + "dzcy;": "\u045f", + "dzigrarr;": "\u27ff", + "eDDot;": "\u2a77", + "eDot;": "\u2251", + "eacute": "\xe9", + "eacute;": "\xe9", + "easter;": "\u2a6e", + "ecaron;": "\u011b", + "ecir;": "\u2256", + "ecirc": "\xea", + "ecirc;": "\xea", + "ecolon;": "\u2255", + "ecy;": "\u044d", + "edot;": "\u0117", + "ee;": "\u2147", + "efDot;": "\u2252", + "efr;": "\U0001d522", + "eg;": "\u2a9a", + "egrave": "\xe8", + "egrave;": "\xe8", + "egs;": "\u2a96", + "egsdot;": "\u2a98", + "el;": "\u2a99", + "elinters;": "\u23e7", + "ell;": "\u2113", + "els;": "\u2a95", + "elsdot;": "\u2a97", + "emacr;": "\u0113", + "empty;": "\u2205", + "emptyset;": "\u2205", + "emptyv;": "\u2205", + "emsp13;": "\u2004", + "emsp14;": "\u2005", + "emsp;": "\u2003", + "eng;": "\u014b", + "ensp;": "\u2002", + "eogon;": "\u0119", + "eopf;": "\U0001d556", + "epar;": "\u22d5", + "eparsl;": "\u29e3", + "eplus;": "\u2a71", + "epsi;": "\u03b5", + "epsilon;": "\u03b5", + "epsiv;": "\u03f5", + "eqcirc;": "\u2256", + "eqcolon;": "\u2255", + "eqsim;": "\u2242", + "eqslantgtr;": "\u2a96", + "eqslantless;": "\u2a95", + "equals;": "=", + "equest;": "\u225f", + "equiv;": "\u2261", + "equivDD;": "\u2a78", + "eqvparsl;": "\u29e5", + "erDot;": "\u2253", + "erarr;": "\u2971", + "escr;": "\u212f", + "esdot;": "\u2250", + "esim;": "\u2242", + "eta;": "\u03b7", + "eth": "\xf0", + "eth;": "\xf0", + "euml": "\xeb", + "euml;": "\xeb", + "euro;": "\u20ac", + "excl;": "!", + "exist;": "\u2203", + "expectation;": "\u2130", + "exponentiale;": "\u2147", + "fallingdotseq;": "\u2252", + "fcy;": "\u0444", + "female;": "\u2640", + "ffilig;": "\ufb03", + "fflig;": "\ufb00", + "ffllig;": "\ufb04", + "ffr;": "\U0001d523", + "filig;": "\ufb01", + "fjlig;": "fj", + "flat;": "\u266d", + "fllig;": "\ufb02", + "fltns;": "\u25b1", + "fnof;": "\u0192", + "fopf;": "\U0001d557", + "forall;": "\u2200", + "fork;": "\u22d4", + "forkv;": "\u2ad9", + "fpartint;": "\u2a0d", + "frac12": "\xbd", + "frac12;": "\xbd", + "frac13;": "\u2153", + "frac14": "\xbc", + "frac14;": "\xbc", + "frac15;": "\u2155", + "frac16;": "\u2159", + "frac18;": "\u215b", + "frac23;": "\u2154", + "frac25;": "\u2156", + "frac34": "\xbe", + "frac34;": "\xbe", + "frac35;": "\u2157", + "frac38;": "\u215c", + "frac45;": "\u2158", + "frac56;": "\u215a", + "frac58;": "\u215d", + "frac78;": "\u215e", + "frasl;": "\u2044", + "frown;": "\u2322", + "fscr;": "\U0001d4bb", + "gE;": "\u2267", + "gEl;": "\u2a8c", + "gacute;": "\u01f5", + "gamma;": "\u03b3", + "gammad;": "\u03dd", + "gap;": "\u2a86", + "gbreve;": "\u011f", + "gcirc;": "\u011d", + "gcy;": "\u0433", + "gdot;": "\u0121", + "ge;": "\u2265", + "gel;": "\u22db", + "geq;": "\u2265", + "geqq;": "\u2267", + "geqslant;": "\u2a7e", + "ges;": "\u2a7e", + "gescc;": "\u2aa9", + "gesdot;": "\u2a80", + "gesdoto;": "\u2a82", + "gesdotol;": "\u2a84", + "gesl;": "\u22db\ufe00", + "gesles;": "\u2a94", + "gfr;": "\U0001d524", + "gg;": "\u226b", + "ggg;": "\u22d9", + "gimel;": "\u2137", + "gjcy;": "\u0453", + "gl;": "\u2277", + "glE;": "\u2a92", + "gla;": "\u2aa5", + "glj;": "\u2aa4", + "gnE;": "\u2269", + "gnap;": "\u2a8a", + "gnapprox;": "\u2a8a", + "gne;": "\u2a88", + "gneq;": "\u2a88", + "gneqq;": "\u2269", + "gnsim;": "\u22e7", + "gopf;": "\U0001d558", + "grave;": "`", + "gscr;": "\u210a", + "gsim;": "\u2273", + "gsime;": "\u2a8e", + "gsiml;": "\u2a90", + "gt": ">", + "gt;": ">", + "gtcc;": "\u2aa7", + "gtcir;": "\u2a7a", + "gtdot;": "\u22d7", + "gtlPar;": "\u2995", + "gtquest;": "\u2a7c", + "gtrapprox;": "\u2a86", + "gtrarr;": "\u2978", + "gtrdot;": "\u22d7", + "gtreqless;": "\u22db", + "gtreqqless;": "\u2a8c", + "gtrless;": "\u2277", + "gtrsim;": "\u2273", + "gvertneqq;": "\u2269\ufe00", + "gvnE;": "\u2269\ufe00", + "hArr;": "\u21d4", + "hairsp;": "\u200a", + "half;": "\xbd", + "hamilt;": "\u210b", + "hardcy;": "\u044a", + "harr;": "\u2194", + "harrcir;": "\u2948", + "harrw;": "\u21ad", + "hbar;": "\u210f", + "hcirc;": "\u0125", + "hearts;": "\u2665", + "heartsuit;": "\u2665", + "hellip;": "\u2026", + "hercon;": "\u22b9", + "hfr;": "\U0001d525", + "hksearow;": "\u2925", + "hkswarow;": "\u2926", + "hoarr;": "\u21ff", + "homtht;": "\u223b", + "hookleftarrow;": "\u21a9", + "hookrightarrow;": "\u21aa", + "hopf;": "\U0001d559", + "horbar;": "\u2015", + "hscr;": "\U0001d4bd", + "hslash;": "\u210f", + "hstrok;": "\u0127", + "hybull;": "\u2043", + "hyphen;": "\u2010", + "iacute": "\xed", + "iacute;": "\xed", + "ic;": "\u2063", + "icirc": "\xee", + "icirc;": "\xee", + "icy;": "\u0438", + "iecy;": "\u0435", + "iexcl": "\xa1", + "iexcl;": "\xa1", + "iff;": "\u21d4", + "ifr;": "\U0001d526", + "igrave": "\xec", + "igrave;": "\xec", + "ii;": "\u2148", + "iiiint;": "\u2a0c", + "iiint;": "\u222d", + "iinfin;": "\u29dc", + "iiota;": "\u2129", + "ijlig;": "\u0133", + "imacr;": "\u012b", + "image;": "\u2111", + "imagline;": "\u2110", + "imagpart;": "\u2111", + "imath;": "\u0131", + "imof;": "\u22b7", + "imped;": "\u01b5", + "in;": "\u2208", + "incare;": "\u2105", + "infin;": "\u221e", + "infintie;": "\u29dd", + "inodot;": "\u0131", + "int;": "\u222b", + "intcal;": "\u22ba", + "integers;": "\u2124", + "intercal;": "\u22ba", + "intlarhk;": "\u2a17", + "intprod;": "\u2a3c", + "iocy;": "\u0451", + "iogon;": "\u012f", + "iopf;": "\U0001d55a", + "iota;": "\u03b9", + "iprod;": "\u2a3c", + "iquest": "\xbf", + "iquest;": "\xbf", + "iscr;": "\U0001d4be", + "isin;": "\u2208", + "isinE;": "\u22f9", + "isindot;": "\u22f5", + "isins;": "\u22f4", + "isinsv;": "\u22f3", + "isinv;": "\u2208", + "it;": "\u2062", + "itilde;": "\u0129", + "iukcy;": "\u0456", + "iuml": "\xef", + "iuml;": "\xef", + "jcirc;": "\u0135", + "jcy;": "\u0439", + "jfr;": "\U0001d527", + "jmath;": "\u0237", + "jopf;": "\U0001d55b", + "jscr;": "\U0001d4bf", + "jsercy;": "\u0458", + "jukcy;": "\u0454", + "kappa;": "\u03ba", + "kappav;": "\u03f0", + "kcedil;": "\u0137", + "kcy;": "\u043a", + "kfr;": "\U0001d528", + "kgreen;": "\u0138", + "khcy;": "\u0445", + "kjcy;": "\u045c", + "kopf;": "\U0001d55c", + "kscr;": "\U0001d4c0", + "lAarr;": "\u21da", + "lArr;": "\u21d0", + "lAtail;": "\u291b", + "lBarr;": "\u290e", + "lE;": "\u2266", + "lEg;": "\u2a8b", + "lHar;": "\u2962", + "lacute;": "\u013a", + "laemptyv;": "\u29b4", + "lagran;": "\u2112", + "lambda;": "\u03bb", + "lang;": "\u27e8", + "langd;": "\u2991", + "langle;": "\u27e8", + "lap;": "\u2a85", + "laquo": "\xab", + "laquo;": "\xab", + "larr;": "\u2190", + "larrb;": "\u21e4", + "larrbfs;": "\u291f", + "larrfs;": "\u291d", + "larrhk;": "\u21a9", + "larrlp;": "\u21ab", + "larrpl;": "\u2939", + "larrsim;": "\u2973", + "larrtl;": "\u21a2", + "lat;": "\u2aab", + "latail;": "\u2919", + "late;": "\u2aad", + "lates;": "\u2aad\ufe00", + "lbarr;": "\u290c", + "lbbrk;": "\u2772", + "lbrace;": "{", + "lbrack;": "[", + "lbrke;": "\u298b", + "lbrksld;": "\u298f", + "lbrkslu;": "\u298d", + "lcaron;": "\u013e", + "lcedil;": "\u013c", + "lceil;": "\u2308", + "lcub;": "{", + "lcy;": "\u043b", + "ldca;": "\u2936", + "ldquo;": "\u201c", + "ldquor;": "\u201e", + "ldrdhar;": "\u2967", + "ldrushar;": "\u294b", + "ldsh;": "\u21b2", + "le;": "\u2264", + "leftarrow;": "\u2190", + "leftarrowtail;": "\u21a2", + "leftharpoondown;": "\u21bd", + "leftharpoonup;": "\u21bc", + "leftleftarrows;": "\u21c7", + "leftrightarrow;": "\u2194", + "leftrightarrows;": "\u21c6", + "leftrightharpoons;": "\u21cb", + "leftrightsquigarrow;": "\u21ad", + "leftthreetimes;": "\u22cb", + "leg;": "\u22da", + "leq;": "\u2264", + "leqq;": "\u2266", + "leqslant;": "\u2a7d", + "les;": "\u2a7d", + "lescc;": "\u2aa8", + "lesdot;": "\u2a7f", + "lesdoto;": "\u2a81", + "lesdotor;": "\u2a83", + "lesg;": "\u22da\ufe00", + "lesges;": "\u2a93", + "lessapprox;": "\u2a85", + "lessdot;": "\u22d6", + "lesseqgtr;": "\u22da", + "lesseqqgtr;": "\u2a8b", + "lessgtr;": "\u2276", + "lesssim;": "\u2272", + "lfisht;": "\u297c", + "lfloor;": "\u230a", + "lfr;": "\U0001d529", + "lg;": "\u2276", + "lgE;": "\u2a91", + "lhard;": "\u21bd", + "lharu;": "\u21bc", + "lharul;": "\u296a", + "lhblk;": "\u2584", + "ljcy;": "\u0459", + "ll;": "\u226a", + "llarr;": "\u21c7", + "llcorner;": "\u231e", + "llhard;": "\u296b", + "lltri;": "\u25fa", + "lmidot;": "\u0140", + "lmoust;": "\u23b0", + "lmoustache;": "\u23b0", + "lnE;": "\u2268", + "lnap;": "\u2a89", + "lnapprox;": "\u2a89", + "lne;": "\u2a87", + "lneq;": "\u2a87", + "lneqq;": "\u2268", + "lnsim;": "\u22e6", + "loang;": "\u27ec", + "loarr;": "\u21fd", + "lobrk;": "\u27e6", + "longleftarrow;": "\u27f5", + "longleftrightarrow;": "\u27f7", + "longmapsto;": "\u27fc", + "longrightarrow;": "\u27f6", + "looparrowleft;": "\u21ab", + "looparrowright;": "\u21ac", + "lopar;": "\u2985", + "lopf;": "\U0001d55d", + "loplus;": "\u2a2d", + "lotimes;": "\u2a34", + "lowast;": "\u2217", + "lowbar;": "_", + "loz;": "\u25ca", + "lozenge;": "\u25ca", + "lozf;": "\u29eb", + "lpar;": "(", + "lparlt;": "\u2993", + "lrarr;": "\u21c6", + "lrcorner;": "\u231f", + "lrhar;": "\u21cb", + "lrhard;": "\u296d", + "lrm;": "\u200e", + "lrtri;": "\u22bf", + "lsaquo;": "\u2039", + "lscr;": "\U0001d4c1", + "lsh;": "\u21b0", + "lsim;": "\u2272", + "lsime;": "\u2a8d", + "lsimg;": "\u2a8f", + "lsqb;": "[", + "lsquo;": "\u2018", + "lsquor;": "\u201a", + "lstrok;": "\u0142", + "lt": "<", + "lt;": "<", + "ltcc;": "\u2aa6", + "ltcir;": "\u2a79", + "ltdot;": "\u22d6", + "lthree;": "\u22cb", + "ltimes;": "\u22c9", + "ltlarr;": "\u2976", + "ltquest;": "\u2a7b", + "ltrPar;": "\u2996", + "ltri;": "\u25c3", + "ltrie;": "\u22b4", + "ltrif;": "\u25c2", + "lurdshar;": "\u294a", + "luruhar;": "\u2966", + "lvertneqq;": "\u2268\ufe00", + "lvnE;": "\u2268\ufe00", + "mDDot;": "\u223a", + "macr": "\xaf", + "macr;": "\xaf", + "male;": "\u2642", + "malt;": "\u2720", + "maltese;": "\u2720", + "map;": "\u21a6", + "mapsto;": "\u21a6", + "mapstodown;": "\u21a7", + "mapstoleft;": "\u21a4", + "mapstoup;": "\u21a5", + "marker;": "\u25ae", + "mcomma;": "\u2a29", + "mcy;": "\u043c", + "mdash;": "\u2014", + "measuredangle;": "\u2221", + "mfr;": "\U0001d52a", + "mho;": "\u2127", + "micro": "\xb5", + "micro;": "\xb5", + "mid;": "\u2223", + "midast;": "*", + "midcir;": "\u2af0", + "middot": "\xb7", + "middot;": "\xb7", + "minus;": "\u2212", + "minusb;": "\u229f", + "minusd;": "\u2238", + "minusdu;": "\u2a2a", + "mlcp;": "\u2adb", + "mldr;": "\u2026", + "mnplus;": "\u2213", + "models;": "\u22a7", + "mopf;": "\U0001d55e", + "mp;": "\u2213", + "mscr;": "\U0001d4c2", + "mstpos;": "\u223e", + "mu;": "\u03bc", + "multimap;": "\u22b8", + "mumap;": "\u22b8", + "nGg;": "\u22d9\u0338", + "nGt;": "\u226b\u20d2", + "nGtv;": "\u226b\u0338", + "nLeftarrow;": "\u21cd", + "nLeftrightarrow;": "\u21ce", + "nLl;": "\u22d8\u0338", + "nLt;": "\u226a\u20d2", + "nLtv;": "\u226a\u0338", + "nRightarrow;": "\u21cf", + "nVDash;": "\u22af", + "nVdash;": "\u22ae", + "nabla;": "\u2207", + "nacute;": "\u0144", + "nang;": "\u2220\u20d2", + "nap;": "\u2249", + "napE;": "\u2a70\u0338", + "napid;": "\u224b\u0338", + "napos;": "\u0149", + "napprox;": "\u2249", + "natur;": "\u266e", + "natural;": "\u266e", + "naturals;": "\u2115", + "nbsp": "\xa0", + "nbsp;": "\xa0", + "nbump;": "\u224e\u0338", + "nbumpe;": "\u224f\u0338", + "ncap;": "\u2a43", + "ncaron;": "\u0148", + "ncedil;": "\u0146", + "ncong;": "\u2247", + "ncongdot;": "\u2a6d\u0338", + "ncup;": "\u2a42", + "ncy;": "\u043d", + "ndash;": "\u2013", + "ne;": "\u2260", + "neArr;": "\u21d7", + "nearhk;": "\u2924", + "nearr;": "\u2197", + "nearrow;": "\u2197", + "nedot;": "\u2250\u0338", + "nequiv;": "\u2262", + "nesear;": "\u2928", + "nesim;": "\u2242\u0338", + "nexist;": "\u2204", + "nexists;": "\u2204", + "nfr;": "\U0001d52b", + "ngE;": "\u2267\u0338", + "nge;": "\u2271", + "ngeq;": "\u2271", + "ngeqq;": "\u2267\u0338", + "ngeqslant;": "\u2a7e\u0338", + "nges;": "\u2a7e\u0338", + "ngsim;": "\u2275", + "ngt;": "\u226f", + "ngtr;": "\u226f", + "nhArr;": "\u21ce", + "nharr;": "\u21ae", + "nhpar;": "\u2af2", + "ni;": "\u220b", + "nis;": "\u22fc", + "nisd;": "\u22fa", + "niv;": "\u220b", + "njcy;": "\u045a", + "nlArr;": "\u21cd", + "nlE;": "\u2266\u0338", + "nlarr;": "\u219a", + "nldr;": "\u2025", + "nle;": "\u2270", + "nleftarrow;": "\u219a", + "nleftrightarrow;": "\u21ae", + "nleq;": "\u2270", + "nleqq;": "\u2266\u0338", + "nleqslant;": "\u2a7d\u0338", + "nles;": "\u2a7d\u0338", + "nless;": "\u226e", + "nlsim;": "\u2274", + "nlt;": "\u226e", + "nltri;": "\u22ea", + "nltrie;": "\u22ec", + "nmid;": "\u2224", + "nopf;": "\U0001d55f", + "not": "\xac", + "not;": "\xac", + "notin;": "\u2209", + "notinE;": "\u22f9\u0338", + "notindot;": "\u22f5\u0338", + "notinva;": "\u2209", + "notinvb;": "\u22f7", + "notinvc;": "\u22f6", + "notni;": "\u220c", + "notniva;": "\u220c", + "notnivb;": "\u22fe", + "notnivc;": "\u22fd", + "npar;": "\u2226", + "nparallel;": "\u2226", + "nparsl;": "\u2afd\u20e5", + "npart;": "\u2202\u0338", + "npolint;": "\u2a14", + "npr;": "\u2280", + "nprcue;": "\u22e0", + "npre;": "\u2aaf\u0338", + "nprec;": "\u2280", + "npreceq;": "\u2aaf\u0338", + "nrArr;": "\u21cf", + "nrarr;": "\u219b", + "nrarrc;": "\u2933\u0338", + "nrarrw;": "\u219d\u0338", + "nrightarrow;": "\u219b", + "nrtri;": "\u22eb", + "nrtrie;": "\u22ed", + "nsc;": "\u2281", + "nsccue;": "\u22e1", + "nsce;": "\u2ab0\u0338", + "nscr;": "\U0001d4c3", + "nshortmid;": "\u2224", + "nshortparallel;": "\u2226", + "nsim;": "\u2241", + "nsime;": "\u2244", + "nsimeq;": "\u2244", + "nsmid;": "\u2224", + "nspar;": "\u2226", + "nsqsube;": "\u22e2", + "nsqsupe;": "\u22e3", + "nsub;": "\u2284", + "nsubE;": "\u2ac5\u0338", + "nsube;": "\u2288", + "nsubset;": "\u2282\u20d2", + "nsubseteq;": "\u2288", + "nsubseteqq;": "\u2ac5\u0338", + "nsucc;": "\u2281", + "nsucceq;": "\u2ab0\u0338", + "nsup;": "\u2285", + "nsupE;": "\u2ac6\u0338", + "nsupe;": "\u2289", + "nsupset;": "\u2283\u20d2", + "nsupseteq;": "\u2289", + "nsupseteqq;": "\u2ac6\u0338", + "ntgl;": "\u2279", + "ntilde": "\xf1", + "ntilde;": "\xf1", + "ntlg;": "\u2278", + "ntriangleleft;": "\u22ea", + "ntrianglelefteq;": "\u22ec", + "ntriangleright;": "\u22eb", + "ntrianglerighteq;": "\u22ed", + "nu;": "\u03bd", + "num;": "#", + "numero;": "\u2116", + "numsp;": "\u2007", + "nvDash;": "\u22ad", + "nvHarr;": "\u2904", + "nvap;": "\u224d\u20d2", + "nvdash;": "\u22ac", + "nvge;": "\u2265\u20d2", + "nvgt;": ">\u20d2", + "nvinfin;": "\u29de", + "nvlArr;": "\u2902", + "nvle;": "\u2264\u20d2", + "nvlt;": "<\u20d2", + "nvltrie;": "\u22b4\u20d2", + "nvrArr;": "\u2903", + "nvrtrie;": "\u22b5\u20d2", + "nvsim;": "\u223c\u20d2", + "nwArr;": "\u21d6", + "nwarhk;": "\u2923", + "nwarr;": "\u2196", + "nwarrow;": "\u2196", + "nwnear;": "\u2927", + "oS;": "\u24c8", + "oacute": "\xf3", + "oacute;": "\xf3", + "oast;": "\u229b", + "ocir;": "\u229a", + "ocirc": "\xf4", + "ocirc;": "\xf4", + "ocy;": "\u043e", + "odash;": "\u229d", + "odblac;": "\u0151", + "odiv;": "\u2a38", + "odot;": "\u2299", + "odsold;": "\u29bc", + "oelig;": "\u0153", + "ofcir;": "\u29bf", + "ofr;": "\U0001d52c", + "ogon;": "\u02db", + "ograve": "\xf2", + "ograve;": "\xf2", + "ogt;": "\u29c1", + "ohbar;": "\u29b5", + "ohm;": "\u03a9", + "oint;": "\u222e", + "olarr;": "\u21ba", + "olcir;": "\u29be", + "olcross;": "\u29bb", + "oline;": "\u203e", + "olt;": "\u29c0", + "omacr;": "\u014d", + "omega;": "\u03c9", + "omicron;": "\u03bf", + "omid;": "\u29b6", + "ominus;": "\u2296", + "oopf;": "\U0001d560", + "opar;": "\u29b7", + "operp;": "\u29b9", + "oplus;": "\u2295", + "or;": "\u2228", + "orarr;": "\u21bb", + "ord;": "\u2a5d", + "order;": "\u2134", + "orderof;": "\u2134", + "ordf": "\xaa", + "ordf;": "\xaa", + "ordm": "\xba", + "ordm;": "\xba", + "origof;": "\u22b6", + "oror;": "\u2a56", + "orslope;": "\u2a57", + "orv;": "\u2a5b", + "oscr;": "\u2134", + "oslash": "\xf8", + "oslash;": "\xf8", + "osol;": "\u2298", + "otilde": "\xf5", + "otilde;": "\xf5", + "otimes;": "\u2297", + "otimesas;": "\u2a36", + "ouml": "\xf6", + "ouml;": "\xf6", + "ovbar;": "\u233d", + "par;": "\u2225", + "para": "\xb6", + "para;": "\xb6", + "parallel;": "\u2225", + "parsim;": "\u2af3", + "parsl;": "\u2afd", + "part;": "\u2202", + "pcy;": "\u043f", + "percnt;": "%", + "period;": ".", + "permil;": "\u2030", + "perp;": "\u22a5", + "pertenk;": "\u2031", + "pfr;": "\U0001d52d", + "phi;": "\u03c6", + "phiv;": "\u03d5", + "phmmat;": "\u2133", + "phone;": "\u260e", + "pi;": "\u03c0", + "pitchfork;": "\u22d4", + "piv;": "\u03d6", + "planck;": "\u210f", + "planckh;": "\u210e", + "plankv;": "\u210f", + "plus;": "+", + "plusacir;": "\u2a23", + "plusb;": "\u229e", + "pluscir;": "\u2a22", + "plusdo;": "\u2214", + "plusdu;": "\u2a25", + "pluse;": "\u2a72", + "plusmn": "\xb1", + "plusmn;": "\xb1", + "plussim;": "\u2a26", + "plustwo;": "\u2a27", + "pm;": "\xb1", + "pointint;": "\u2a15", + "popf;": "\U0001d561", + "pound": "\xa3", + "pound;": "\xa3", + "pr;": "\u227a", + "prE;": "\u2ab3", + "prap;": "\u2ab7", + "prcue;": "\u227c", + "pre;": "\u2aaf", + "prec;": "\u227a", + "precapprox;": "\u2ab7", + "preccurlyeq;": "\u227c", + "preceq;": "\u2aaf", + "precnapprox;": "\u2ab9", + "precneqq;": "\u2ab5", + "precnsim;": "\u22e8", + "precsim;": "\u227e", + "prime;": "\u2032", + "primes;": "\u2119", + "prnE;": "\u2ab5", + "prnap;": "\u2ab9", + "prnsim;": "\u22e8", + "prod;": "\u220f", + "profalar;": "\u232e", + "profline;": "\u2312", + "profsurf;": "\u2313", + "prop;": "\u221d", + "propto;": "\u221d", + "prsim;": "\u227e", + "prurel;": "\u22b0", + "pscr;": "\U0001d4c5", + "psi;": "\u03c8", + "puncsp;": "\u2008", + "qfr;": "\U0001d52e", + "qint;": "\u2a0c", + "qopf;": "\U0001d562", + "qprime;": "\u2057", + "qscr;": "\U0001d4c6", + "quaternions;": "\u210d", + "quatint;": "\u2a16", + "quest;": "?", + "questeq;": "\u225f", + "quot": "\"", + "quot;": "\"", + "rAarr;": "\u21db", + "rArr;": "\u21d2", + "rAtail;": "\u291c", + "rBarr;": "\u290f", + "rHar;": "\u2964", + "race;": "\u223d\u0331", + "racute;": "\u0155", + "radic;": "\u221a", + "raemptyv;": "\u29b3", + "rang;": "\u27e9", + "rangd;": "\u2992", + "range;": "\u29a5", + "rangle;": "\u27e9", + "raquo": "\xbb", + "raquo;": "\xbb", + "rarr;": "\u2192", + "rarrap;": "\u2975", + "rarrb;": "\u21e5", + "rarrbfs;": "\u2920", + "rarrc;": "\u2933", + "rarrfs;": "\u291e", + "rarrhk;": "\u21aa", + "rarrlp;": "\u21ac", + "rarrpl;": "\u2945", + "rarrsim;": "\u2974", + "rarrtl;": "\u21a3", + "rarrw;": "\u219d", + "ratail;": "\u291a", + "ratio;": "\u2236", + "rationals;": "\u211a", + "rbarr;": "\u290d", + "rbbrk;": "\u2773", + "rbrace;": "}", + "rbrack;": "]", + "rbrke;": "\u298c", + "rbrksld;": "\u298e", + "rbrkslu;": "\u2990", + "rcaron;": "\u0159", + "rcedil;": "\u0157", + "rceil;": "\u2309", + "rcub;": "}", + "rcy;": "\u0440", + "rdca;": "\u2937", + "rdldhar;": "\u2969", + "rdquo;": "\u201d", + "rdquor;": "\u201d", + "rdsh;": "\u21b3", + "real;": "\u211c", + "realine;": "\u211b", + "realpart;": "\u211c", + "reals;": "\u211d", + "rect;": "\u25ad", + "reg": "\xae", + "reg;": "\xae", + "rfisht;": "\u297d", + "rfloor;": "\u230b", + "rfr;": "\U0001d52f", + "rhard;": "\u21c1", + "rharu;": "\u21c0", + "rharul;": "\u296c", + "rho;": "\u03c1", + "rhov;": "\u03f1", + "rightarrow;": "\u2192", + "rightarrowtail;": "\u21a3", + "rightharpoondown;": "\u21c1", + "rightharpoonup;": "\u21c0", + "rightleftarrows;": "\u21c4", + "rightleftharpoons;": "\u21cc", + "rightrightarrows;": "\u21c9", + "rightsquigarrow;": "\u219d", + "rightthreetimes;": "\u22cc", + "ring;": "\u02da", + "risingdotseq;": "\u2253", + "rlarr;": "\u21c4", + "rlhar;": "\u21cc", + "rlm;": "\u200f", + "rmoust;": "\u23b1", + "rmoustache;": "\u23b1", + "rnmid;": "\u2aee", + "roang;": "\u27ed", + "roarr;": "\u21fe", + "robrk;": "\u27e7", + "ropar;": "\u2986", + "ropf;": "\U0001d563", + "roplus;": "\u2a2e", + "rotimes;": "\u2a35", + "rpar;": ")", + "rpargt;": "\u2994", + "rppolint;": "\u2a12", + "rrarr;": "\u21c9", + "rsaquo;": "\u203a", + "rscr;": "\U0001d4c7", + "rsh;": "\u21b1", + "rsqb;": "]", + "rsquo;": "\u2019", + "rsquor;": "\u2019", + "rthree;": "\u22cc", + "rtimes;": "\u22ca", + "rtri;": "\u25b9", + "rtrie;": "\u22b5", + "rtrif;": "\u25b8", + "rtriltri;": "\u29ce", + "ruluhar;": "\u2968", + "rx;": "\u211e", + "sacute;": "\u015b", + "sbquo;": "\u201a", + "sc;": "\u227b", + "scE;": "\u2ab4", + "scap;": "\u2ab8", + "scaron;": "\u0161", + "sccue;": "\u227d", + "sce;": "\u2ab0", + "scedil;": "\u015f", + "scirc;": "\u015d", + "scnE;": "\u2ab6", + "scnap;": "\u2aba", + "scnsim;": "\u22e9", + "scpolint;": "\u2a13", + "scsim;": "\u227f", + "scy;": "\u0441", + "sdot;": "\u22c5", + "sdotb;": "\u22a1", + "sdote;": "\u2a66", + "seArr;": "\u21d8", + "searhk;": "\u2925", + "searr;": "\u2198", + "searrow;": "\u2198", + "sect": "\xa7", + "sect;": "\xa7", + "semi;": ";", + "seswar;": "\u2929", + "setminus;": "\u2216", + "setmn;": "\u2216", + "sext;": "\u2736", + "sfr;": "\U0001d530", + "sfrown;": "\u2322", + "sharp;": "\u266f", + "shchcy;": "\u0449", + "shcy;": "\u0448", + "shortmid;": "\u2223", + "shortparallel;": "\u2225", + "shy": "\xad", + "shy;": "\xad", + "sigma;": "\u03c3", + "sigmaf;": "\u03c2", + "sigmav;": "\u03c2", + "sim;": "\u223c", + "simdot;": "\u2a6a", + "sime;": "\u2243", + "simeq;": "\u2243", + "simg;": "\u2a9e", + "simgE;": "\u2aa0", + "siml;": "\u2a9d", + "simlE;": "\u2a9f", + "simne;": "\u2246", + "simplus;": "\u2a24", + "simrarr;": "\u2972", + "slarr;": "\u2190", + "smallsetminus;": "\u2216", + "smashp;": "\u2a33", + "smeparsl;": "\u29e4", + "smid;": "\u2223", + "smile;": "\u2323", + "smt;": "\u2aaa", + "smte;": "\u2aac", + "smtes;": "\u2aac\ufe00", + "softcy;": "\u044c", + "sol;": "/", + "solb;": "\u29c4", + "solbar;": "\u233f", + "sopf;": "\U0001d564", + "spades;": "\u2660", + "spadesuit;": "\u2660", + "spar;": "\u2225", + "sqcap;": "\u2293", + "sqcaps;": "\u2293\ufe00", + "sqcup;": "\u2294", + "sqcups;": "\u2294\ufe00", + "sqsub;": "\u228f", + "sqsube;": "\u2291", + "sqsubset;": "\u228f", + "sqsubseteq;": "\u2291", + "sqsup;": "\u2290", + "sqsupe;": "\u2292", + "sqsupset;": "\u2290", + "sqsupseteq;": "\u2292", + "squ;": "\u25a1", + "square;": "\u25a1", + "squarf;": "\u25aa", + "squf;": "\u25aa", + "srarr;": "\u2192", + "sscr;": "\U0001d4c8", + "ssetmn;": "\u2216", + "ssmile;": "\u2323", + "sstarf;": "\u22c6", + "star;": "\u2606", + "starf;": "\u2605", + "straightepsilon;": "\u03f5", + "straightphi;": "\u03d5", + "strns;": "\xaf", + "sub;": "\u2282", + "subE;": "\u2ac5", + "subdot;": "\u2abd", + "sube;": "\u2286", + "subedot;": "\u2ac3", + "submult;": "\u2ac1", + "subnE;": "\u2acb", + "subne;": "\u228a", + "subplus;": "\u2abf", + "subrarr;": "\u2979", + "subset;": "\u2282", + "subseteq;": "\u2286", + "subseteqq;": "\u2ac5", + "subsetneq;": "\u228a", + "subsetneqq;": "\u2acb", + "subsim;": "\u2ac7", + "subsub;": "\u2ad5", + "subsup;": "\u2ad3", + "succ;": "\u227b", + "succapprox;": "\u2ab8", + "succcurlyeq;": "\u227d", + "succeq;": "\u2ab0", + "succnapprox;": "\u2aba", + "succneqq;": "\u2ab6", + "succnsim;": "\u22e9", + "succsim;": "\u227f", + "sum;": "\u2211", + "sung;": "\u266a", + "sup1": "\xb9", + "sup1;": "\xb9", + "sup2": "\xb2", + "sup2;": "\xb2", + "sup3": "\xb3", + "sup3;": "\xb3", + "sup;": "\u2283", + "supE;": "\u2ac6", + "supdot;": "\u2abe", + "supdsub;": "\u2ad8", + "supe;": "\u2287", + "supedot;": "\u2ac4", + "suphsol;": "\u27c9", + "suphsub;": "\u2ad7", + "suplarr;": "\u297b", + "supmult;": "\u2ac2", + "supnE;": "\u2acc", + "supne;": "\u228b", + "supplus;": "\u2ac0", + "supset;": "\u2283", + "supseteq;": "\u2287", + "supseteqq;": "\u2ac6", + "supsetneq;": "\u228b", + "supsetneqq;": "\u2acc", + "supsim;": "\u2ac8", + "supsub;": "\u2ad4", + "supsup;": "\u2ad6", + "swArr;": "\u21d9", + "swarhk;": "\u2926", + "swarr;": "\u2199", + "swarrow;": "\u2199", + "swnwar;": "\u292a", + "szlig": "\xdf", + "szlig;": "\xdf", + "target;": "\u2316", + "tau;": "\u03c4", + "tbrk;": "\u23b4", + "tcaron;": "\u0165", + "tcedil;": "\u0163", + "tcy;": "\u0442", + "tdot;": "\u20db", + "telrec;": "\u2315", + "tfr;": "\U0001d531", + "there4;": "\u2234", + "therefore;": "\u2234", + "theta;": "\u03b8", + "thetasym;": "\u03d1", + "thetav;": "\u03d1", + "thickapprox;": "\u2248", + "thicksim;": "\u223c", + "thinsp;": "\u2009", + "thkap;": "\u2248", + "thksim;": "\u223c", + "thorn": "\xfe", + "thorn;": "\xfe", + "tilde;": "\u02dc", + "times": "\xd7", + "times;": "\xd7", + "timesb;": "\u22a0", + "timesbar;": "\u2a31", + "timesd;": "\u2a30", + "tint;": "\u222d", + "toea;": "\u2928", + "top;": "\u22a4", + "topbot;": "\u2336", + "topcir;": "\u2af1", + "topf;": "\U0001d565", + "topfork;": "\u2ada", + "tosa;": "\u2929", + "tprime;": "\u2034", + "trade;": "\u2122", + "triangle;": "\u25b5", + "triangledown;": "\u25bf", + "triangleleft;": "\u25c3", + "trianglelefteq;": "\u22b4", + "triangleq;": "\u225c", + "triangleright;": "\u25b9", + "trianglerighteq;": "\u22b5", + "tridot;": "\u25ec", + "trie;": "\u225c", + "triminus;": "\u2a3a", + "triplus;": "\u2a39", + "trisb;": "\u29cd", + "tritime;": "\u2a3b", + "trpezium;": "\u23e2", + "tscr;": "\U0001d4c9", + "tscy;": "\u0446", + "tshcy;": "\u045b", + "tstrok;": "\u0167", + "twixt;": "\u226c", + "twoheadleftarrow;": "\u219e", + "twoheadrightarrow;": "\u21a0", + "uArr;": "\u21d1", + "uHar;": "\u2963", + "uacute": "\xfa", + "uacute;": "\xfa", + "uarr;": "\u2191", + "ubrcy;": "\u045e", + "ubreve;": "\u016d", + "ucirc": "\xfb", + "ucirc;": "\xfb", + "ucy;": "\u0443", + "udarr;": "\u21c5", + "udblac;": "\u0171", + "udhar;": "\u296e", + "ufisht;": "\u297e", + "ufr;": "\U0001d532", + "ugrave": "\xf9", + "ugrave;": "\xf9", + "uharl;": "\u21bf", + "uharr;": "\u21be", + "uhblk;": "\u2580", + "ulcorn;": "\u231c", + "ulcorner;": "\u231c", + "ulcrop;": "\u230f", + "ultri;": "\u25f8", + "umacr;": "\u016b", + "uml": "\xa8", + "uml;": "\xa8", + "uogon;": "\u0173", + "uopf;": "\U0001d566", + "uparrow;": "\u2191", + "updownarrow;": "\u2195", + "upharpoonleft;": "\u21bf", + "upharpoonright;": "\u21be", + "uplus;": "\u228e", + "upsi;": "\u03c5", + "upsih;": "\u03d2", + "upsilon;": "\u03c5", + "upuparrows;": "\u21c8", + "urcorn;": "\u231d", + "urcorner;": "\u231d", + "urcrop;": "\u230e", + "uring;": "\u016f", + "urtri;": "\u25f9", + "uscr;": "\U0001d4ca", + "utdot;": "\u22f0", + "utilde;": "\u0169", + "utri;": "\u25b5", + "utrif;": "\u25b4", + "uuarr;": "\u21c8", + "uuml": "\xfc", + "uuml;": "\xfc", + "uwangle;": "\u29a7", + "vArr;": "\u21d5", + "vBar;": "\u2ae8", + "vBarv;": "\u2ae9", + "vDash;": "\u22a8", + "vangrt;": "\u299c", + "varepsilon;": "\u03f5", + "varkappa;": "\u03f0", + "varnothing;": "\u2205", + "varphi;": "\u03d5", + "varpi;": "\u03d6", + "varpropto;": "\u221d", + "varr;": "\u2195", + "varrho;": "\u03f1", + "varsigma;": "\u03c2", + "varsubsetneq;": "\u228a\ufe00", + "varsubsetneqq;": "\u2acb\ufe00", + "varsupsetneq;": "\u228b\ufe00", + "varsupsetneqq;": "\u2acc\ufe00", + "vartheta;": "\u03d1", + "vartriangleleft;": "\u22b2", + "vartriangleright;": "\u22b3", + "vcy;": "\u0432", + "vdash;": "\u22a2", + "vee;": "\u2228", + "veebar;": "\u22bb", + "veeeq;": "\u225a", + "vellip;": "\u22ee", + "verbar;": "|", + "vert;": "|", + "vfr;": "\U0001d533", + "vltri;": "\u22b2", + "vnsub;": "\u2282\u20d2", + "vnsup;": "\u2283\u20d2", + "vopf;": "\U0001d567", + "vprop;": "\u221d", + "vrtri;": "\u22b3", + "vscr;": "\U0001d4cb", + "vsubnE;": "\u2acb\ufe00", + "vsubne;": "\u228a\ufe00", + "vsupnE;": "\u2acc\ufe00", + "vsupne;": "\u228b\ufe00", + "vzigzag;": "\u299a", + "wcirc;": "\u0175", + "wedbar;": "\u2a5f", + "wedge;": "\u2227", + "wedgeq;": "\u2259", + "weierp;": "\u2118", + "wfr;": "\U0001d534", + "wopf;": "\U0001d568", + "wp;": "\u2118", + "wr;": "\u2240", + "wreath;": "\u2240", + "wscr;": "\U0001d4cc", + "xcap;": "\u22c2", + "xcirc;": "\u25ef", + "xcup;": "\u22c3", + "xdtri;": "\u25bd", + "xfr;": "\U0001d535", + "xhArr;": "\u27fa", + "xharr;": "\u27f7", + "xi;": "\u03be", + "xlArr;": "\u27f8", + "xlarr;": "\u27f5", + "xmap;": "\u27fc", + "xnis;": "\u22fb", + "xodot;": "\u2a00", + "xopf;": "\U0001d569", + "xoplus;": "\u2a01", + "xotime;": "\u2a02", + "xrArr;": "\u27f9", + "xrarr;": "\u27f6", + "xscr;": "\U0001d4cd", + "xsqcup;": "\u2a06", + "xuplus;": "\u2a04", + "xutri;": "\u25b3", + "xvee;": "\u22c1", + "xwedge;": "\u22c0", + "yacute": "\xfd", + "yacute;": "\xfd", + "yacy;": "\u044f", + "ycirc;": "\u0177", + "ycy;": "\u044b", + "yen": "\xa5", + "yen;": "\xa5", + "yfr;": "\U0001d536", + "yicy;": "\u0457", + "yopf;": "\U0001d56a", + "yscr;": "\U0001d4ce", + "yucy;": "\u044e", + "yuml": "\xff", + "yuml;": "\xff", + "zacute;": "\u017a", + "zcaron;": "\u017e", + "zcy;": "\u0437", + "zdot;": "\u017c", + "zeetrf;": "\u2128", + "zeta;": "\u03b6", + "zfr;": "\U0001d537", + "zhcy;": "\u0436", + "zigrarr;": "\u21dd", + "zopf;": "\U0001d56b", + "zscr;": "\U0001d4cf", + "zwj;": "\u200d", + "zwnj;": "\u200c", +} + +replacementCharacters = { + 0x0: "\uFFFD", + 0x0d: "\u000D", + 0x80: "\u20AC", + 0x81: "\u0081", + 0x82: "\u201A", + 0x83: "\u0192", + 0x84: "\u201E", + 0x85: "\u2026", + 0x86: "\u2020", + 0x87: "\u2021", + 0x88: "\u02C6", + 0x89: "\u2030", + 0x8A: "\u0160", + 0x8B: "\u2039", + 0x8C: "\u0152", + 0x8D: "\u008D", + 0x8E: "\u017D", + 0x8F: "\u008F", + 0x90: "\u0090", + 0x91: "\u2018", + 0x92: "\u2019", + 0x93: "\u201C", + 0x94: "\u201D", + 0x95: "\u2022", + 0x96: "\u2013", + 0x97: "\u2014", + 0x98: "\u02DC", + 0x99: "\u2122", + 0x9A: "\u0161", + 0x9B: "\u203A", + 0x9C: "\u0153", + 0x9D: "\u009D", + 0x9E: "\u017E", + 0x9F: "\u0178", +} + +tokenTypes = { + "Doctype": 0, + "Characters": 1, + "SpaceCharacters": 2, + "StartTag": 3, + "EndTag": 4, + "EmptyTag": 5, + "Comment": 6, + "ParseError": 7 +} + +tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], + tokenTypes["EmptyTag"]]) + + +prefixes = dict([(v, k) for k, v in namespaces.items()]) +prefixes["http://www.w3.org/1998/Math/MathML"] = "math" + + +class DataLossWarning(UserWarning): + pass + + +class ReparseException(Exception): + pass diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/__init__.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/alphabeticalattributes.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/alphabeticalattributes.py new file mode 100644 index 0000000..4795bae --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/alphabeticalattributes.py @@ -0,0 +1,20 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict + + +class Filter(base.Filter): + def __iter__(self): + for token in base.Filter.__iter__(self): + if token["type"] in ("StartTag", "EmptyTag"): + attrs = OrderedDict() + for name, value in sorted(token["data"].items(), + key=lambda x: x[0]): + attrs[name] = value + token["data"] = attrs + yield token diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/base.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/base.py new file mode 100644 index 0000000..c7dbaed --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/base.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import, division, unicode_literals + + +class Filter(object): + def __init__(self, source): + self.source = source + + def __iter__(self): + return iter(self.source) + + def __getattr__(self, name): + return getattr(self.source, name) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/inject_meta_charset.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/inject_meta_charset.py new file mode 100644 index 0000000..2059ec8 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/inject_meta_charset.py @@ -0,0 +1,65 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + + +class Filter(base.Filter): + def __init__(self, source, encoding): + base.Filter.__init__(self, source) + self.encoding = encoding + + def __iter__(self): + state = "pre_head" + meta_found = (self.encoding is None) + pending = [] + + for token in base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag": + if token["name"].lower() == "head": + state = "in_head" + + elif type == "EmptyTag": + if token["name"].lower() == "meta": + # replace charset with actual encoding + has_http_equiv_content_type = False + for (namespace, name), value in token["data"].items(): + if namespace is not None: + continue + elif name.lower() == 'charset': + token["data"][(namespace, name)] = self.encoding + meta_found = True + break + elif name == 'http-equiv' and value.lower() == 'content-type': + has_http_equiv_content_type = True + else: + if has_http_equiv_content_type and (None, "content") in token["data"]: + token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding + meta_found = True + + elif token["name"].lower() == "head" and not meta_found: + # insert meta into empty head + yield {"type": "StartTag", "name": "head", + "data": token["data"]} + yield {"type": "EmptyTag", "name": "meta", + "data": {(None, "charset"): self.encoding}} + yield {"type": "EndTag", "name": "head"} + meta_found = True + continue + + elif type == "EndTag": + if token["name"].lower() == "head" and pending: + # insert meta into head (if necessary) and flush pending queue + yield pending.pop(0) + if not meta_found: + yield {"type": "EmptyTag", "name": "meta", + "data": {(None, "charset"): self.encoding}} + while pending: + yield pending.pop(0) + meta_found = True + state = "post_head" + + if state == "in_head": + pending.append(token) + else: + yield token diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/lint.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/lint.py new file mode 100644 index 0000000..3b892c8 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/lint.py @@ -0,0 +1,81 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import text_type + +from . import base +from ..constants import namespaces, voidElements + +from ..constants import spaceCharacters +spaceCharacters = "".join(spaceCharacters) + + +class Filter(base.Filter): + def __init__(self, source, require_matching_tags=True): + super(Filter, self).__init__(source) + self.require_matching_tags = require_matching_tags + + def __iter__(self): + open_elements = [] + for token in base.Filter.__iter__(self): + type = token["type"] + if type in ("StartTag", "EmptyTag"): + namespace = token["namespace"] + name = token["name"] + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + assert isinstance(token["data"], dict) + if (not namespace or namespace == namespaces["html"]) and name in voidElements: + assert type == "EmptyTag" + else: + assert type == "StartTag" + if type == "StartTag" and self.require_matching_tags: + open_elements.append((namespace, name)) + for (namespace, name), value in token["data"].items(): + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + assert isinstance(value, text_type) + + elif type == "EndTag": + namespace = token["namespace"] + name = token["name"] + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + if (not namespace or namespace == namespaces["html"]) and name in voidElements: + assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name} + elif self.require_matching_tags: + start = open_elements.pop() + assert start == (namespace, name) + + elif type == "Comment": + data = token["data"] + assert isinstance(data, text_type) + + elif type in ("Characters", "SpaceCharacters"): + data = token["data"] + assert isinstance(data, text_type) + assert data != "" + if type == "SpaceCharacters": + assert data.strip(spaceCharacters) == "" + + elif type == "Doctype": + name = token["name"] + assert name is None or isinstance(name, text_type) + assert token["publicId"] is None or isinstance(name, text_type) + assert token["systemId"] is None or isinstance(name, text_type) + + elif type == "Entity": + assert isinstance(token["name"], text_type) + + elif type == "SerializerError": + assert isinstance(token["data"], text_type) + + else: + assert False, "Unknown token type: %(type)s" % {"type": type} + + yield token diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/optionaltags.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/optionaltags.py new file mode 100644 index 0000000..f6edb73 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/optionaltags.py @@ -0,0 +1,206 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + + +class Filter(base.Filter): + def slider(self): + previous1 = previous2 = None + for token in self.source: + if previous1 is not None: + yield previous2, previous1, token + previous2 = previous1 + previous1 = token + if previous1 is not None: + yield previous2, previous1, None + + def __iter__(self): + for previous, token, next in self.slider(): + type = token["type"] + if type == "StartTag": + if (token["data"] or + not self.is_optional_start(token["name"], previous, next)): + yield token + elif type == "EndTag": + if not self.is_optional_end(token["name"], next): + yield token + else: + yield token + + def is_optional_start(self, tagname, previous, next): + type = next and next["type"] or None + if tagname in 'html': + # An html element's start tag may be omitted if the first thing + # inside the html element is not a space character or a comment. + return type not in ("Comment", "SpaceCharacters") + elif tagname == 'head': + # A head element's start tag may be omitted if the first thing + # inside the head element is an element. + # XXX: we also omit the start tag if the head element is empty + if type in ("StartTag", "EmptyTag"): + return True + elif type == "EndTag": + return next["name"] == "head" + elif tagname == 'body': + # A body element's start tag may be omitted if the first thing + # inside the body element is not a space character or a comment, + # except if the first thing inside the body element is a script + # or style element and the node immediately preceding the body + # element is a head element whose end tag has been omitted. + if type in ("Comment", "SpaceCharacters"): + return False + elif type == "StartTag": + # XXX: we do not look at the preceding event, so we never omit + # the body element's start tag if it's followed by a script or + # a style element. + return next["name"] not in ('script', 'style') + else: + return True + elif tagname == 'colgroup': + # A colgroup element's start tag may be omitted if the first thing + # inside the colgroup element is a col element, and if the element + # is not immediately preceded by another colgroup element whose + # end tag has been omitted. + if type in ("StartTag", "EmptyTag"): + # XXX: we do not look at the preceding event, so instead we never + # omit the colgroup element's end tag when it is immediately + # followed by another colgroup element. See is_optional_end. + return next["name"] == "col" + else: + return False + elif tagname == 'tbody': + # A tbody element's start tag may be omitted if the first thing + # inside the tbody element is a tr element, and if the element is + # not immediately preceded by a tbody, thead, or tfoot element + # whose end tag has been omitted. + if type == "StartTag": + # omit the thead and tfoot elements' end tag when they are + # immediately followed by a tbody element. See is_optional_end. + if previous and previous['type'] == 'EndTag' and \ + previous['name'] in ('tbody', 'thead', 'tfoot'): + return False + return next["name"] == 'tr' + else: + return False + return False + + def is_optional_end(self, tagname, next): + type = next and next["type"] or None + if tagname in ('html', 'head', 'body'): + # An html element's end tag may be omitted if the html element + # is not immediately followed by a space character or a comment. + return type not in ("Comment", "SpaceCharacters") + elif tagname in ('li', 'optgroup', 'tr'): + # A li element's end tag may be omitted if the li element is + # immediately followed by another li element or if there is + # no more content in the parent element. + # An optgroup element's end tag may be omitted if the optgroup + # element is immediately followed by another optgroup element, + # or if there is no more content in the parent element. + # A tr element's end tag may be omitted if the tr element is + # immediately followed by another tr element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] == tagname + else: + return type == "EndTag" or type is None + elif tagname in ('dt', 'dd'): + # A dt element's end tag may be omitted if the dt element is + # immediately followed by another dt element or a dd element. + # A dd element's end tag may be omitted if the dd element is + # immediately followed by another dd element or a dt element, + # or if there is no more content in the parent element. + if type == "StartTag": + return next["name"] in ('dt', 'dd') + elif tagname == 'dd': + return type == "EndTag" or type is None + else: + return False + elif tagname == 'p': + # A p element's end tag may be omitted if the p element is + # immediately followed by an address, article, aside, + # blockquote, datagrid, dialog, dir, div, dl, fieldset, + # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu, + # nav, ol, p, pre, section, table, or ul, element, or if + # there is no more content in the parent element. + if type in ("StartTag", "EmptyTag"): + return next["name"] in ('address', 'article', 'aside', + 'blockquote', 'datagrid', 'dialog', + 'dir', 'div', 'dl', 'fieldset', 'footer', + 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', + 'header', 'hr', 'menu', 'nav', 'ol', + 'p', 'pre', 'section', 'table', 'ul') + else: + return type == "EndTag" or type is None + elif tagname == 'option': + # An option element's end tag may be omitted if the option + # element is immediately followed by another option element, + # or if it is immediately followed by an optgroup + # element, or if there is no more content in the parent + # element. + if type == "StartTag": + return next["name"] in ('option', 'optgroup') + else: + return type == "EndTag" or type is None + elif tagname in ('rt', 'rp'): + # An rt element's end tag may be omitted if the rt element is + # immediately followed by an rt or rp element, or if there is + # no more content in the parent element. + # An rp element's end tag may be omitted if the rp element is + # immediately followed by an rt or rp element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] in ('rt', 'rp') + else: + return type == "EndTag" or type is None + elif tagname == 'colgroup': + # A colgroup element's end tag may be omitted if the colgroup + # element is not immediately followed by a space character or + # a comment. + if type in ("Comment", "SpaceCharacters"): + return False + elif type == "StartTag": + # XXX: we also look for an immediately following colgroup + # element. See is_optional_start. + return next["name"] != 'colgroup' + else: + return True + elif tagname in ('thead', 'tbody'): + # A thead element's end tag may be omitted if the thead element + # is immediately followed by a tbody or tfoot element. + # A tbody element's end tag may be omitted if the tbody element + # is immediately followed by a tbody or tfoot element, or if + # there is no more content in the parent element. + # A tfoot element's end tag may be omitted if the tfoot element + # is immediately followed by a tbody element, or if there is no + # more content in the parent element. + # XXX: we never omit the end tag when the following element is + # a tbody. See is_optional_start. + if type == "StartTag": + return next["name"] in ['tbody', 'tfoot'] + elif tagname == 'tbody': + return type == "EndTag" or type is None + else: + return False + elif tagname == 'tfoot': + # A tfoot element's end tag may be omitted if the tfoot element + # is immediately followed by a tbody element, or if there is no + # more content in the parent element. + # XXX: we never omit the end tag when the following element is + # a tbody. See is_optional_start. + if type == "StartTag": + return next["name"] == 'tbody' + else: + return type == "EndTag" or type is None + elif tagname in ('td', 'th'): + # A td element's end tag may be omitted if the td element is + # immediately followed by a td or th element, or if there is + # no more content in the parent element. + # A th element's end tag may be omitted if the th element is + # immediately followed by a td or th element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] in ('td', 'th') + else: + return type == "EndTag" or type is None + return False diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/sanitizer.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/sanitizer.py new file mode 100644 index 0000000..026748d --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/sanitizer.py @@ -0,0 +1,865 @@ +from __future__ import absolute_import, division, unicode_literals + +import re +from xml.sax.saxutils import escape, unescape + +from pip._vendor.six.moves import urllib_parse as urlparse + +from . import base +from ..constants import namespaces, prefixes + +__all__ = ["Filter"] + + +allowed_elements = frozenset(( + (namespaces['html'], 'a'), + (namespaces['html'], 'abbr'), + (namespaces['html'], 'acronym'), + (namespaces['html'], 'address'), + (namespaces['html'], 'area'), + (namespaces['html'], 'article'), + (namespaces['html'], 'aside'), + (namespaces['html'], 'audio'), + (namespaces['html'], 'b'), + (namespaces['html'], 'big'), + (namespaces['html'], 'blockquote'), + (namespaces['html'], 'br'), + (namespaces['html'], 'button'), + (namespaces['html'], 'canvas'), + (namespaces['html'], 'caption'), + (namespaces['html'], 'center'), + (namespaces['html'], 'cite'), + (namespaces['html'], 'code'), + (namespaces['html'], 'col'), + (namespaces['html'], 'colgroup'), + (namespaces['html'], 'command'), + (namespaces['html'], 'datagrid'), + (namespaces['html'], 'datalist'), + (namespaces['html'], 'dd'), + (namespaces['html'], 'del'), + (namespaces['html'], 'details'), + (namespaces['html'], 'dfn'), + (namespaces['html'], 'dialog'), + (namespaces['html'], 'dir'), + (namespaces['html'], 'div'), + (namespaces['html'], 'dl'), + (namespaces['html'], 'dt'), + (namespaces['html'], 'em'), + (namespaces['html'], 'event-source'), + (namespaces['html'], 'fieldset'), + (namespaces['html'], 'figcaption'), + (namespaces['html'], 'figure'), + (namespaces['html'], 'footer'), + (namespaces['html'], 'font'), + (namespaces['html'], 'form'), + (namespaces['html'], 'header'), + (namespaces['html'], 'h1'), + (namespaces['html'], 'h2'), + (namespaces['html'], 'h3'), + (namespaces['html'], 'h4'), + (namespaces['html'], 'h5'), + (namespaces['html'], 'h6'), + (namespaces['html'], 'hr'), + (namespaces['html'], 'i'), + (namespaces['html'], 'img'), + (namespaces['html'], 'input'), + (namespaces['html'], 'ins'), + (namespaces['html'], 'keygen'), + (namespaces['html'], 'kbd'), + (namespaces['html'], 'label'), + (namespaces['html'], 'legend'), + (namespaces['html'], 'li'), + (namespaces['html'], 'm'), + (namespaces['html'], 'map'), + (namespaces['html'], 'menu'), + (namespaces['html'], 'meter'), + (namespaces['html'], 'multicol'), + (namespaces['html'], 'nav'), + (namespaces['html'], 'nextid'), + (namespaces['html'], 'ol'), + (namespaces['html'], 'output'), + (namespaces['html'], 'optgroup'), + (namespaces['html'], 'option'), + (namespaces['html'], 'p'), + (namespaces['html'], 'pre'), + (namespaces['html'], 'progress'), + (namespaces['html'], 'q'), + (namespaces['html'], 's'), + (namespaces['html'], 'samp'), + (namespaces['html'], 'section'), + (namespaces['html'], 'select'), + (namespaces['html'], 'small'), + (namespaces['html'], 'sound'), + (namespaces['html'], 'source'), + (namespaces['html'], 'spacer'), + (namespaces['html'], 'span'), + (namespaces['html'], 'strike'), + (namespaces['html'], 'strong'), + (namespaces['html'], 'sub'), + (namespaces['html'], 'sup'), + (namespaces['html'], 'table'), + (namespaces['html'], 'tbody'), + (namespaces['html'], 'td'), + (namespaces['html'], 'textarea'), + (namespaces['html'], 'time'), + (namespaces['html'], 'tfoot'), + (namespaces['html'], 'th'), + (namespaces['html'], 'thead'), + (namespaces['html'], 'tr'), + (namespaces['html'], 'tt'), + (namespaces['html'], 'u'), + (namespaces['html'], 'ul'), + (namespaces['html'], 'var'), + (namespaces['html'], 'video'), + (namespaces['mathml'], 'maction'), + (namespaces['mathml'], 'math'), + (namespaces['mathml'], 'merror'), + (namespaces['mathml'], 'mfrac'), + (namespaces['mathml'], 'mi'), + (namespaces['mathml'], 'mmultiscripts'), + (namespaces['mathml'], 'mn'), + (namespaces['mathml'], 'mo'), + (namespaces['mathml'], 'mover'), + (namespaces['mathml'], 'mpadded'), + (namespaces['mathml'], 'mphantom'), + (namespaces['mathml'], 'mprescripts'), + (namespaces['mathml'], 'mroot'), + (namespaces['mathml'], 'mrow'), + (namespaces['mathml'], 'mspace'), + (namespaces['mathml'], 'msqrt'), + (namespaces['mathml'], 'mstyle'), + (namespaces['mathml'], 'msub'), + (namespaces['mathml'], 'msubsup'), + (namespaces['mathml'], 'msup'), + (namespaces['mathml'], 'mtable'), + (namespaces['mathml'], 'mtd'), + (namespaces['mathml'], 'mtext'), + (namespaces['mathml'], 'mtr'), + (namespaces['mathml'], 'munder'), + (namespaces['mathml'], 'munderover'), + (namespaces['mathml'], 'none'), + (namespaces['svg'], 'a'), + (namespaces['svg'], 'animate'), + (namespaces['svg'], 'animateColor'), + (namespaces['svg'], 'animateMotion'), + (namespaces['svg'], 'animateTransform'), + (namespaces['svg'], 'clipPath'), + (namespaces['svg'], 'circle'), + (namespaces['svg'], 'defs'), + (namespaces['svg'], 'desc'), + (namespaces['svg'], 'ellipse'), + (namespaces['svg'], 'font-face'), + (namespaces['svg'], 'font-face-name'), + (namespaces['svg'], 'font-face-src'), + (namespaces['svg'], 'g'), + (namespaces['svg'], 'glyph'), + (namespaces['svg'], 'hkern'), + (namespaces['svg'], 'linearGradient'), + (namespaces['svg'], 'line'), + (namespaces['svg'], 'marker'), + (namespaces['svg'], 'metadata'), + (namespaces['svg'], 'missing-glyph'), + (namespaces['svg'], 'mpath'), + (namespaces['svg'], 'path'), + (namespaces['svg'], 'polygon'), + (namespaces['svg'], 'polyline'), + (namespaces['svg'], 'radialGradient'), + (namespaces['svg'], 'rect'), + (namespaces['svg'], 'set'), + (namespaces['svg'], 'stop'), + (namespaces['svg'], 'svg'), + (namespaces['svg'], 'switch'), + (namespaces['svg'], 'text'), + (namespaces['svg'], 'title'), + (namespaces['svg'], 'tspan'), + (namespaces['svg'], 'use'), +)) + +allowed_attributes = frozenset(( + # HTML attributes + (None, 'abbr'), + (None, 'accept'), + (None, 'accept-charset'), + (None, 'accesskey'), + (None, 'action'), + (None, 'align'), + (None, 'alt'), + (None, 'autocomplete'), + (None, 'autofocus'), + (None, 'axis'), + (None, 'background'), + (None, 'balance'), + (None, 'bgcolor'), + (None, 'bgproperties'), + (None, 'border'), + (None, 'bordercolor'), + (None, 'bordercolordark'), + (None, 'bordercolorlight'), + (None, 'bottompadding'), + (None, 'cellpadding'), + (None, 'cellspacing'), + (None, 'ch'), + (None, 'challenge'), + (None, 'char'), + (None, 'charoff'), + (None, 'choff'), + (None, 'charset'), + (None, 'checked'), + (None, 'cite'), + (None, 'class'), + (None, 'clear'), + (None, 'color'), + (None, 'cols'), + (None, 'colspan'), + (None, 'compact'), + (None, 'contenteditable'), + (None, 'controls'), + (None, 'coords'), + (None, 'data'), + (None, 'datafld'), + (None, 'datapagesize'), + (None, 'datasrc'), + (None, 'datetime'), + (None, 'default'), + (None, 'delay'), + (None, 'dir'), + (None, 'disabled'), + (None, 'draggable'), + (None, 'dynsrc'), + (None, 'enctype'), + (None, 'end'), + (None, 'face'), + (None, 'for'), + (None, 'form'), + (None, 'frame'), + (None, 'galleryimg'), + (None, 'gutter'), + (None, 'headers'), + (None, 'height'), + (None, 'hidefocus'), + (None, 'hidden'), + (None, 'high'), + (None, 'href'), + (None, 'hreflang'), + (None, 'hspace'), + (None, 'icon'), + (None, 'id'), + (None, 'inputmode'), + (None, 'ismap'), + (None, 'keytype'), + (None, 'label'), + (None, 'leftspacing'), + (None, 'lang'), + (None, 'list'), + (None, 'longdesc'), + (None, 'loop'), + (None, 'loopcount'), + (None, 'loopend'), + (None, 'loopstart'), + (None, 'low'), + (None, 'lowsrc'), + (None, 'max'), + (None, 'maxlength'), + (None, 'media'), + (None, 'method'), + (None, 'min'), + (None, 'multiple'), + (None, 'name'), + (None, 'nohref'), + (None, 'noshade'), + (None, 'nowrap'), + (None, 'open'), + (None, 'optimum'), + (None, 'pattern'), + (None, 'ping'), + (None, 'point-size'), + (None, 'poster'), + (None, 'pqg'), + (None, 'preload'), + (None, 'prompt'), + (None, 'radiogroup'), + (None, 'readonly'), + (None, 'rel'), + (None, 'repeat-max'), + (None, 'repeat-min'), + (None, 'replace'), + (None, 'required'), + (None, 'rev'), + (None, 'rightspacing'), + (None, 'rows'), + (None, 'rowspan'), + (None, 'rules'), + (None, 'scope'), + (None, 'selected'), + (None, 'shape'), + (None, 'size'), + (None, 'span'), + (None, 'src'), + (None, 'start'), + (None, 'step'), + (None, 'style'), + (None, 'summary'), + (None, 'suppress'), + (None, 'tabindex'), + (None, 'target'), + (None, 'template'), + (None, 'title'), + (None, 'toppadding'), + (None, 'type'), + (None, 'unselectable'), + (None, 'usemap'), + (None, 'urn'), + (None, 'valign'), + (None, 'value'), + (None, 'variable'), + (None, 'volume'), + (None, 'vspace'), + (None, 'vrml'), + (None, 'width'), + (None, 'wrap'), + (namespaces['xml'], 'lang'), + # MathML attributes + (None, 'actiontype'), + (None, 'align'), + (None, 'columnalign'), + (None, 'columnalign'), + (None, 'columnalign'), + (None, 'columnlines'), + (None, 'columnspacing'), + (None, 'columnspan'), + (None, 'depth'), + (None, 'display'), + (None, 'displaystyle'), + (None, 'equalcolumns'), + (None, 'equalrows'), + (None, 'fence'), + (None, 'fontstyle'), + (None, 'fontweight'), + (None, 'frame'), + (None, 'height'), + (None, 'linethickness'), + (None, 'lspace'), + (None, 'mathbackground'), + (None, 'mathcolor'), + (None, 'mathvariant'), + (None, 'mathvariant'), + (None, 'maxsize'), + (None, 'minsize'), + (None, 'other'), + (None, 'rowalign'), + (None, 'rowalign'), + (None, 'rowalign'), + (None, 'rowlines'), + (None, 'rowspacing'), + (None, 'rowspan'), + (None, 'rspace'), + (None, 'scriptlevel'), + (None, 'selection'), + (None, 'separator'), + (None, 'stretchy'), + (None, 'width'), + (None, 'width'), + (namespaces['xlink'], 'href'), + (namespaces['xlink'], 'show'), + (namespaces['xlink'], 'type'), + # SVG attributes + (None, 'accent-height'), + (None, 'accumulate'), + (None, 'additive'), + (None, 'alphabetic'), + (None, 'arabic-form'), + (None, 'ascent'), + (None, 'attributeName'), + (None, 'attributeType'), + (None, 'baseProfile'), + (None, 'bbox'), + (None, 'begin'), + (None, 'by'), + (None, 'calcMode'), + (None, 'cap-height'), + (None, 'class'), + (None, 'clip-path'), + (None, 'color'), + (None, 'color-rendering'), + (None, 'content'), + (None, 'cx'), + (None, 'cy'), + (None, 'd'), + (None, 'dx'), + (None, 'dy'), + (None, 'descent'), + (None, 'display'), + (None, 'dur'), + (None, 'end'), + (None, 'fill'), + (None, 'fill-opacity'), + (None, 'fill-rule'), + (None, 'font-family'), + (None, 'font-size'), + (None, 'font-stretch'), + (None, 'font-style'), + (None, 'font-variant'), + (None, 'font-weight'), + (None, 'from'), + (None, 'fx'), + (None, 'fy'), + (None, 'g1'), + (None, 'g2'), + (None, 'glyph-name'), + (None, 'gradientUnits'), + (None, 'hanging'), + (None, 'height'), + (None, 'horiz-adv-x'), + (None, 'horiz-origin-x'), + (None, 'id'), + (None, 'ideographic'), + (None, 'k'), + (None, 'keyPoints'), + (None, 'keySplines'), + (None, 'keyTimes'), + (None, 'lang'), + (None, 'marker-end'), + (None, 'marker-mid'), + (None, 'marker-start'), + (None, 'markerHeight'), + (None, 'markerUnits'), + (None, 'markerWidth'), + (None, 'mathematical'), + (None, 'max'), + (None, 'min'), + (None, 'name'), + (None, 'offset'), + (None, 'opacity'), + (None, 'orient'), + (None, 'origin'), + (None, 'overline-position'), + (None, 'overline-thickness'), + (None, 'panose-1'), + (None, 'path'), + (None, 'pathLength'), + (None, 'points'), + (None, 'preserveAspectRatio'), + (None, 'r'), + (None, 'refX'), + (None, 'refY'), + (None, 'repeatCount'), + (None, 'repeatDur'), + (None, 'requiredExtensions'), + (None, 'requiredFeatures'), + (None, 'restart'), + (None, 'rotate'), + (None, 'rx'), + (None, 'ry'), + (None, 'slope'), + (None, 'stemh'), + (None, 'stemv'), + (None, 'stop-color'), + (None, 'stop-opacity'), + (None, 'strikethrough-position'), + (None, 'strikethrough-thickness'), + (None, 'stroke'), + (None, 'stroke-dasharray'), + (None, 'stroke-dashoffset'), + (None, 'stroke-linecap'), + (None, 'stroke-linejoin'), + (None, 'stroke-miterlimit'), + (None, 'stroke-opacity'), + (None, 'stroke-width'), + (None, 'systemLanguage'), + (None, 'target'), + (None, 'text-anchor'), + (None, 'to'), + (None, 'transform'), + (None, 'type'), + (None, 'u1'), + (None, 'u2'), + (None, 'underline-position'), + (None, 'underline-thickness'), + (None, 'unicode'), + (None, 'unicode-range'), + (None, 'units-per-em'), + (None, 'values'), + (None, 'version'), + (None, 'viewBox'), + (None, 'visibility'), + (None, 'width'), + (None, 'widths'), + (None, 'x'), + (None, 'x-height'), + (None, 'x1'), + (None, 'x2'), + (namespaces['xlink'], 'actuate'), + (namespaces['xlink'], 'arcrole'), + (namespaces['xlink'], 'href'), + (namespaces['xlink'], 'role'), + (namespaces['xlink'], 'show'), + (namespaces['xlink'], 'title'), + (namespaces['xlink'], 'type'), + (namespaces['xml'], 'base'), + (namespaces['xml'], 'lang'), + (namespaces['xml'], 'space'), + (None, 'y'), + (None, 'y1'), + (None, 'y2'), + (None, 'zoomAndPan'), +)) + +attr_val_is_uri = frozenset(( + (None, 'href'), + (None, 'src'), + (None, 'cite'), + (None, 'action'), + (None, 'longdesc'), + (None, 'poster'), + (None, 'background'), + (None, 'datasrc'), + (None, 'dynsrc'), + (None, 'lowsrc'), + (None, 'ping'), + (namespaces['xlink'], 'href'), + (namespaces['xml'], 'base'), +)) + +svg_attr_val_allows_ref = frozenset(( + (None, 'clip-path'), + (None, 'color-profile'), + (None, 'cursor'), + (None, 'fill'), + (None, 'filter'), + (None, 'marker'), + (None, 'marker-start'), + (None, 'marker-mid'), + (None, 'marker-end'), + (None, 'mask'), + (None, 'stroke'), +)) + +svg_allow_local_href = frozenset(( + (None, 'altGlyph'), + (None, 'animate'), + (None, 'animateColor'), + (None, 'animateMotion'), + (None, 'animateTransform'), + (None, 'cursor'), + (None, 'feImage'), + (None, 'filter'), + (None, 'linearGradient'), + (None, 'pattern'), + (None, 'radialGradient'), + (None, 'textpath'), + (None, 'tref'), + (None, 'set'), + (None, 'use') +)) + +allowed_css_properties = frozenset(( + 'azimuth', + 'background-color', + 'border-bottom-color', + 'border-collapse', + 'border-color', + 'border-left-color', + 'border-right-color', + 'border-top-color', + 'clear', + 'color', + 'cursor', + 'direction', + 'display', + 'elevation', + 'float', + 'font', + 'font-family', + 'font-size', + 'font-style', + 'font-variant', + 'font-weight', + 'height', + 'letter-spacing', + 'line-height', + 'overflow', + 'pause', + 'pause-after', + 'pause-before', + 'pitch', + 'pitch-range', + 'richness', + 'speak', + 'speak-header', + 'speak-numeral', + 'speak-punctuation', + 'speech-rate', + 'stress', + 'text-align', + 'text-decoration', + 'text-indent', + 'unicode-bidi', + 'vertical-align', + 'voice-family', + 'volume', + 'white-space', + 'width', +)) + +allowed_css_keywords = frozenset(( + 'auto', + 'aqua', + 'black', + 'block', + 'blue', + 'bold', + 'both', + 'bottom', + 'brown', + 'center', + 'collapse', + 'dashed', + 'dotted', + 'fuchsia', + 'gray', + 'green', + '!important', + 'italic', + 'left', + 'lime', + 'maroon', + 'medium', + 'none', + 'navy', + 'normal', + 'nowrap', + 'olive', + 'pointer', + 'purple', + 'red', + 'right', + 'solid', + 'silver', + 'teal', + 'top', + 'transparent', + 'underline', + 'white', + 'yellow', +)) + +allowed_svg_properties = frozenset(( + 'fill', + 'fill-opacity', + 'fill-rule', + 'stroke', + 'stroke-width', + 'stroke-linecap', + 'stroke-linejoin', + 'stroke-opacity', +)) + +allowed_protocols = frozenset(( + 'ed2k', + 'ftp', + 'http', + 'https', + 'irc', + 'mailto', + 'news', + 'gopher', + 'nntp', + 'telnet', + 'webcal', + 'xmpp', + 'callto', + 'feed', + 'urn', + 'aim', + 'rsync', + 'tag', + 'ssh', + 'sftp', + 'rtsp', + 'afs', + 'data', +)) + +allowed_content_types = frozenset(( + 'image/png', + 'image/jpeg', + 'image/gif', + 'image/webp', + 'image/bmp', + 'text/plain', +)) + + +data_content_type = re.compile(r''' + ^ + # Match a content type / + (?P[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+) + # Match any character set and encoding + (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?) + |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?) + # Assume the rest is data + ,.* + $ + ''', + re.VERBOSE) + + +class Filter(base.Filter): + """ sanitization of XHTML+MathML+SVG and of inline style attributes.""" + def __init__(self, + source, + allowed_elements=allowed_elements, + allowed_attributes=allowed_attributes, + allowed_css_properties=allowed_css_properties, + allowed_css_keywords=allowed_css_keywords, + allowed_svg_properties=allowed_svg_properties, + allowed_protocols=allowed_protocols, + allowed_content_types=allowed_content_types, + attr_val_is_uri=attr_val_is_uri, + svg_attr_val_allows_ref=svg_attr_val_allows_ref, + svg_allow_local_href=svg_allow_local_href): + super(Filter, self).__init__(source) + self.allowed_elements = allowed_elements + self.allowed_attributes = allowed_attributes + self.allowed_css_properties = allowed_css_properties + self.allowed_css_keywords = allowed_css_keywords + self.allowed_svg_properties = allowed_svg_properties + self.allowed_protocols = allowed_protocols + self.allowed_content_types = allowed_content_types + self.attr_val_is_uri = attr_val_is_uri + self.svg_attr_val_allows_ref = svg_attr_val_allows_ref + self.svg_allow_local_href = svg_allow_local_href + + def __iter__(self): + for token in base.Filter.__iter__(self): + token = self.sanitize_token(token) + if token: + yield token + + # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and + # stripping out all # attributes not in ALLOWED_ATTRIBUTES. Style + # attributes are parsed, and a restricted set, # specified by + # ALLOWED_CSS_PROPERTIES and ALLOWED_CSS_KEYWORDS, are allowed through. + # attributes in ATTR_VAL_IS_URI are scanned, and only URI schemes specified + # in ALLOWED_PROTOCOLS are allowed. + # + # sanitize_html('') + # => <script> do_nasty_stuff() </script> + # sanitize_html('Click here for $100') + # => Click here for $100 + def sanitize_token(self, token): + + # accommodate filters which use token_type differently + token_type = token["type"] + if token_type in ("StartTag", "EndTag", "EmptyTag"): + name = token["name"] + namespace = token["namespace"] + if ((namespace, name) in self.allowed_elements or + (namespace is None and + (namespaces["html"], name) in self.allowed_elements)): + return self.allowed_token(token) + else: + return self.disallowed_token(token) + elif token_type == "Comment": + pass + else: + return token + + def allowed_token(self, token): + if "data" in token: + attrs = token["data"] + attr_names = set(attrs.keys()) + + # Remove forbidden attributes + for to_remove in (attr_names - self.allowed_attributes): + del token["data"][to_remove] + attr_names.remove(to_remove) + + # Remove attributes with disallowed URL values + for attr in (attr_names & self.attr_val_is_uri): + assert attr in attrs + # I don't have a clue where this regexp comes from or why it matches those + # characters, nor why we call unescape. I just know it's always been here. + # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all + # this will do is remove *more* than it otherwise would. + val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\s]+", '', + unescape(attrs[attr])).lower() + # remove replacement characters from unescaped characters + val_unescaped = val_unescaped.replace("\ufffd", "") + try: + uri = urlparse.urlparse(val_unescaped) + except ValueError: + uri = None + del attrs[attr] + if uri and uri.scheme: + if uri.scheme not in self.allowed_protocols: + del attrs[attr] + if uri.scheme == 'data': + m = data_content_type.match(uri.path) + if not m: + del attrs[attr] + elif m.group('content_type') not in self.allowed_content_types: + del attrs[attr] + + for attr in self.svg_attr_val_allows_ref: + if attr in attrs: + attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)', + ' ', + unescape(attrs[attr])) + if (token["name"] in self.svg_allow_local_href and + (namespaces['xlink'], 'href') in attrs and re.search('^\s*[^#\s].*', + attrs[(namespaces['xlink'], 'href')])): + del attrs[(namespaces['xlink'], 'href')] + if (None, 'style') in attrs: + attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')]) + token["data"] = attrs + return token + + def disallowed_token(self, token): + token_type = token["type"] + if token_type == "EndTag": + token["data"] = "" % token["name"] + elif token["data"]: + assert token_type in ("StartTag", "EmptyTag") + attrs = [] + for (ns, name), v in token["data"].items(): + attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v))) + token["data"] = "<%s%s>" % (token["name"], ''.join(attrs)) + else: + token["data"] = "<%s>" % token["name"] + if token.get("selfClosing"): + token["data"] = token["data"][:-1] + "/>" + + token["type"] = "Characters" + + del token["name"] + return token + + def sanitize_css(self, style): + # disallow urls + style = re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style) + + # gauntlet + if not re.match("""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style): + return '' + if not re.match("^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style): + return '' + + clean = [] + for prop, value in re.findall("([-\w]+)\s*:\s*([^:;]*)", style): + if not value: + continue + if prop.lower() in self.allowed_css_properties: + clean.append(prop + ': ' + value + ';') + elif prop.split('-')[0].lower() in ['background', 'border', 'margin', + 'padding']: + for keyword in value.split(): + if keyword not in self.allowed_css_keywords and \ + not re.match("^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa + break + else: + clean.append(prop + ': ' + value + ';') + elif prop.lower() in self.allowed_svg_properties: + clean.append(prop + ': ' + value + ';') + + return ' '.join(clean) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/whitespace.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/whitespace.py new file mode 100644 index 0000000..8921052 --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/filters/whitespace.py @@ -0,0 +1,38 @@ +from __future__ import absolute_import, division, unicode_literals + +import re + +from . import base +from ..constants import rcdataElements, spaceCharacters +spaceCharacters = "".join(spaceCharacters) + +SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) + + +class Filter(base.Filter): + + spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) + + def __iter__(self): + preserve = 0 + for token in base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag" \ + and (preserve or token["name"] in self.spacePreserveElements): + preserve += 1 + + elif type == "EndTag" and preserve: + preserve -= 1 + + elif not preserve and type == "SpaceCharacters" and token["data"]: + # Test on token["data"] above to not introduce spaces where there were not + token["data"] = " " + + elif not preserve and type == "Characters": + token["data"] = collapse_spaces(token["data"]) + + yield token + + +def collapse_spaces(text): + return SPACES_REGEX.sub(' ', text) diff --git a/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/html5parser.py b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/html5parser.py new file mode 100644 index 0000000..f7043cb --- /dev/null +++ b/RBXLegacyDiscordBot/lib/pip/_vendor/html5lib/html5parser.py @@ -0,0 +1,2733 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import with_metaclass, viewkeys, PY3 + +import types + +try: + from collections import OrderedDict +except ImportError: + from pip._vendor.ordereddict import OrderedDict + +from . import _inputstream +from . import _tokenizer + +from . import treebuilders +from .treebuilders.base import Marker + +from . import _utils +from .constants import ( + spaceCharacters, asciiUpper2Lower, + specialElements, headingElements, cdataElements, rcdataElements, + tokenTypes, tagTokenTypes, + namespaces, + htmlIntegrationPointElements, mathmlTextIntegrationPointElements, + adjustForeignAttributes as adjustForeignAttributesMap, + adjustMathMLAttributes, adjustSVGAttributes, + E, + ReparseException +) + + +def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs): + """Parse a string or file-like object into a tree""" + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parse(doc, **kwargs) + + +def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parseFragment(doc, container=container, **kwargs) + + +def method_decorator_metaclass(function): + class Decorated(type): + def __new__(meta, classname, bases, classDict): + for attributeName, attribute in classDict.items(): + if isinstance(attribute, types.FunctionType): + attribute = function(attribute) + + classDict[attributeName] = attribute + return type.__new__(meta, classname, bases, classDict) + return Decorated + + +class HTMLParser(object): + """HTML parser. Generates a tree structure from a stream of (possibly + malformed) HTML""" + + def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): + """ + strict - raise an exception when a parse error is encountered + + tree - a treebuilder class controlling the type of tree that will be + returned. Built in treebuilders can be accessed through + html5lib.treebuilders.getTreeBuilder(treeType) + """ + + # Raise an exception on the first error encountered + self.strict = strict + + if tree is None: + tree = treebuilders.getTreeBuilder("etree") + self.tree = tree(namespaceHTMLElements) + self.errors = [] + + self.phases = dict([(name, cls(self, self.tree)) for name, cls in + getPhases(debug).items()]) + + def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): + + self.innerHTMLMode = innerHTML + self.container = container + self.scripting = scripting + self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) + self.reset() + + try: + self.mainLoop() + except ReparseException: + self.reset() + self.mainLoop() + + def reset(self): + self.tree.reset() + self.firstStartTag = False + self.errors = [] + self.log = [] # only used with debug mode + # "quirks" / "limited quirks" / "no quirks" + self.compatMode = "no quirks" + + if self.innerHTMLMode: + self.innerHTML = self.container.lower() + + if self.innerHTML in cdataElements: + self.tokenizer.state = self.tokenizer.rcdataState + elif self.innerHTML in rcdataElements: + self.tokenizer.state = self.tokenizer.rawtextState + elif self.innerHTML == 'plaintext': + self.tokenizer.state = self.tokenizer.plaintextState + else: + # state already is data state + # self.tokenizer.state = self.tokenizer.dataState + pass + self.phase = self.phases["beforeHtml"] + self.phase.insertHtmlElement() + self.resetInsertionMode() + else: + self.innerHTML = False # pylint:disable=redefined-variable-type + self.phase = self.phases["initial"] + + self.lastPhase = None + + self.beforeRCDataPhase = None + + self.framesetOK = True + + @property + def documentEncoding(self): + """The name of the character encoding + that was used to decode the input stream, + or :obj:`None` if that is not determined yet. + + """ + if not hasattr(self, 'tokenizer'): + return None + return self.tokenizer.stream.charEncoding[0].name + + def isHTMLIntegrationPoint(self, element): + if (element.name == "annotation-xml" and + element.namespace == namespaces["mathml"]): + return ("encoding" in element.attributes and + element.attributes["encoding"].translate( + asciiUpper2Lower) in + ("text/html", "application/xhtml+xml")) + else: + return (element.namespace, element.name) in htmlIntegrationPointElements + + def isMathMLTextIntegrationPoint(self, element): + return (element.namespace, element.name) in mathmlTextIntegrationPointElements + + def mainLoop(self): + CharactersToken = tokenTypes["Characters"] + SpaceCharactersToken = tokenTypes["SpaceCharacters"] + StartTagToken = tokenTypes["StartTag"] + EndTagToken = tokenTypes["EndTag"] + CommentToken = tokenTypes["Comment"] + DoctypeToken = tokenTypes["Doctype"] + ParseErrorToken = tokenTypes["ParseError"] + + for token in self.normalizedTokens(): + prev_token = None + new_token = token + while new_token is not None: + prev_token = new_token + currentNode = self.tree.openElements[-1] if self.tree.openElements else None + currentNodeNamespace = currentNode.namespace if currentNode else None + currentNodeName = currentNode.name if currentNode else None + + type = new_token["type"] + + if type == ParseErrorToken: + self.parseError(new_token["data"], new_token.get("datavars", {})) + new_token = None + else: + if (len(self.tree.openElements) == 0 or + currentNodeNamespace == self.tree.defaultNamespace or + (self.isMathMLTextIntegrationPoint(currentNode) and + ((type == StartTagToken and + token["name"] not in frozenset(["mglyph", "malignmark"])) or + type in (CharactersToken, SpaceCharactersToken))) or + (currentNodeNamespace == namespaces["mathml"] and + currentNodeName == "annotation-xml" and + type == StartTagToken and + token["name"] == "svg") or + (self.isHTMLIntegrationPoint(currentNode) and + type in (StartTagToken, CharactersToken, SpaceCharactersToken))): + phase = self.phase + else: + phase = self.phases["inForeignContent"] + + if type == CharactersToken: + new_token = phase.processCharacters(new_token) + elif type == SpaceCharactersToken: + new_token = phase.processSpaceCharacters(new_token) + elif type == StartTagToken: + new_token = phase.processStartTag(new_token) + elif type == EndTagToken: + new_token = phase.processEndTag(new_token) + elif type == CommentToken: + new_token = phase.processComment(new_token) + elif type == DoctypeToken: + new_token = phase.processDoctype(new_token) + + if (type == StartTagToken and prev_token["selfClosing"] and + not prev_token["selfClosingAcknowledged"]): + self.parseError("non-void-element-with-trailing-solidus", + {"name": prev_token["name"]}) + + # When the loop finishes it's EOF + reprocess = True + phases = [] + while reprocess: + phases.append(self.phase) + reprocess = self.phase.processEOF() + if reprocess: + assert self.phase not in phases + + def normalizedTokens(self): + for token in self.tokenizer: + yield self.normalizeToken(token) + + def parse(self, stream, *args, **kwargs): + """Parse a HTML document into a well-formed tree + + stream - a filelike object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + scripting - treat noscript elements as if javascript was turned on + """ + self._parse(stream, False, None, *args, **kwargs) + return self.tree.getDocument() + + def parseFragment(self, stream, *args, **kwargs): + """Parse a HTML fragment into a well-formed tree fragment + + container - name of the element we're setting the innerHTML property + if set to None, default to 'div' + + stream - a filelike object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + scripting - treat noscript elements as if javascript was turned on + """ + self._parse(stream, True, *args, **kwargs) + return self.tree.getFragment() + + def parseError(self, errorcode="XXX-undefined-error", datavars=None): + # XXX The idea is to make errorcode mandatory. + if datavars is None: + datavars = {} + self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) + if self.strict: + raise ParseError(E[errorcode] % datavars) + + def normalizeToken(self, token): + """ HTML5 specific normalizations to the token stream """ + + if token["type"] == tokenTypes["StartTag"]: + raw = token["data"] + token["data"] = OrderedDict(raw) + if len(raw) > len(token["data"]): + # we had some duplicated attribute, fix so first wins + token["data"].update(raw[::-1]) + + return token + + def adjustMathMLAttributes(self, token): + adjust_attributes(token, adjustMathMLAttributes) + + def adjustSVGAttributes(self, token): + adjust_attributes(token, adjustSVGAttributes) + + def adjustForeignAttributes(self, token): + adjust_attributes(token, adjustForeignAttributesMap) + + def reparseTokenNormal(self, token): + # pylint:disable=unused-argument + self.parser.phase() + + def resetInsertionMode(self): + # The name of this method is mostly historical. (It's also used in the + # specification.) + last = False + newModes = { + "select": "inSelect", + "td": "inCell", + "th": "inCell", + "tr": "inRow", + "tbody": "inTableBody", + "thead": "inTableBody", + "tfoot": "inTableBody", + "caption": "inCaption", + "colgroup": "inColumnGroup", + "table": "inTable", + "head": "inBody", + "body": "inBody", + "frameset": "inFrameset", + "html": "beforeHead" + } + for node in self.tree.openElements[::-1]: + nodeName = node.name + new_phase = None + if node == self.tree.openElements[0]: + assert self.innerHTML + last = True + nodeName = self.innerHTML + # Check for conditions that should only happen in the innerHTML + # case + if nodeName in ("select", "colgroup", "head", "html"): + assert self.innerHTML + + if not last and node.namespace != self.tree.defaultNamespace: + continue + + if nodeName in newModes: + new_phase = self.phases[newModes[nodeName]] + break + elif last: + new_phase = self.phases["inBody"] + break + + self.phase = new_phase + + def parseRCDataRawtext(self, token, contentType): + """Generic RCDATA/RAWTEXT Parsing algorithm + contentType - RCDATA or RAWTEXT + """ + assert contentType in ("RAWTEXT", "RCDATA") + + self.tree.insertElement(token) + + if contentType == "RAWTEXT": + self.tokenizer.state = self.tokenizer.rawtextState + else: + self.tokenizer.state = self.tokenizer.rcdataState + + self.originalPhase = self.phase + + self.phase = self.phases["text"] + + +@_utils.memoize +def getPhases(debug): + def log(function): + """Logger that records which phase processes each token""" + type_names = dict((value, key) for key, value in + tokenTypes.items()) + + def wrapped(self, *args, **kwargs): + if function.__name__.startswith("process") and len(args) > 0: + token = args[0] + try: + info = {"type": type_names[token['type']]} + except: + raise + if token['type'] in tagTokenTypes: + info["name"] = token['name'] + + self.parser.log.append((self.parser.tokenizer.state.__name__, + self.parser.phase.__class__.__name__, + self.__class__.__name__, + function.__name__, + info)) + return function(self, *args, **kwargs) + else: + return function(self, *args, **kwargs) + return wrapped + + def getMetaclass(use_metaclass, metaclass_func): + if use_metaclass: + return method_decorator_metaclass(metaclass_func) + else: + return type + + # pylint:disable=unused-argument + class Phase(with_metaclass(getMetaclass(debug, log))): + """Base class for helper object that implements each phase of processing + """ + + def __init__(self, parser, tree): + self.parser = parser + self.tree = tree + + def processEOF(self): + raise NotImplementedError + + def processComment(self, token): + # For most phases the following is correct. Where it's not it will be + # overridden. + self.tree.insertComment(token, self.tree.openElements[-1]) + + def processDoctype(self, token): + self.parser.parseError("unexpected-doctype") + + def processCharacters(self, token): + self.tree.insertText(token["data"]) + + def processSpaceCharacters(self, token): + self.tree.insertText(token["data"]) + + def processStartTag(self, token): + return self.startTagHandler[token["name"]](token) + + def startTagHtml(self, token): + if not self.parser.firstStartTag and token["name"] == "html": + self.parser.parseError("non-html-root") + # XXX Need a check here to see if the first start tag token emitted is + # this token... If it's not, invoke self.parser.parseError(). + for attr, value in token["data"].items(): + if attr not in self.tree.openElements[0].attributes: + self.tree.openElements[0].attributes[attr] = value + self.parser.firstStartTag = False + + def processEndTag(self, token): + return self.endTagHandler[token["name"]](token) + + class InitialPhase(Phase): + def processSpaceCharacters(self, token): + pass + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + correct = token["correct"] + + if (name != "html" or publicId is not None or + systemId is not None and systemId != "about:legacy-compat"): + self.parser.parseError("unknown-doctype") + + if publicId is None: + publicId = "" + + self.tree.insertDoctype(token) + + if publicId != "": + publicId = publicId.translate(asciiUpper2Lower) + + if (not correct or token["name"] != "html" or + publicId.startswith( + ("+//silmaril//dtd html pro v0r11 19970101//", + "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", + "-//as//dtd html 3.0 aswedit + extensions//", + "-//ietf//dtd html 2.0 level 1//", + "-//ietf//dtd html 2.0 level 2//", + "-//ietf//dtd html 2.0 strict level 1//", + "-//ietf//dtd html 2.0 strict level 2//", + "-//ietf//dtd html 2.0 strict//", + "-//ietf//dtd html 2.0//", + "-//ietf//dtd html 2.1e//", + "-//ietf//dtd html 3.0//", + "-//ietf//dtd html 3.2 final//", + "-//ietf//dtd html 3.2//", + "-//ietf//dtd html 3//", + "-//ietf//dtd html level 0//", + "-//ietf//dtd html level 1//", + "-//ietf//dtd html level 2//", + "-//ietf//dtd html level 3//", + "-//ietf//dtd html strict level 0//", + "-//ietf//dtd html strict level 1//", + "-//ietf//dtd html strict level 2//", + "-//ietf//dtd html strict level 3//", + "-//ietf//dtd html strict//", + "-//ietf//dtd html//", + "-//metrius//dtd metrius presentational//", + "-//microsoft//dtd internet explorer 2.0 html strict//", + "-//microsoft//dtd internet explorer 2.0 html//", + "-//microsoft//dtd internet explorer 2.0 tables//", + "-//microsoft//dtd internet explorer 3.0 html strict//", + "-//microsoft//dtd internet explorer 3.0 html//", + "-//microsoft//dtd internet explorer 3.0 tables//", + "-//netscape comm. corp.//dtd html//", + "-//netscape comm. corp.//dtd strict html//", + "-//o'reilly and associates//dtd html 2.0//", + "-//o'reilly and associates//dtd html extended 1.0//", + "-//o'reilly and associates//dtd html extended relaxed 1.0//", + "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", + "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", + "-//spyglass//dtd html 2.0 extended//", + "-//sq//dtd html 2.0 hotmetal + extensions//", + "-//sun microsystems corp.//dtd hotjava html//", + "-//sun microsystems corp.//dtd hotjava strict html//", + "-//w3c//dtd html 3 1995-03-24//", + "-//w3c//dtd html 3.2 draft//", + "-//w3c//dtd html 3.2 final//", + "-//w3c//dtd html 3.2//", + "-//w3c//dtd html 3.2s draft//", + "-//w3c//dtd html 4.0 frameset//", + "-//w3c//dtd html 4.0 transitional//", + "-//w3c//dtd html experimental 19960712//", + "-//w3c//dtd html experimental 970421//", + "-//w3c//dtd w3 html//", + "-//w3o//dtd w3 html 3.0//", + "-//webtechs//dtd mozilla html 2.0//", + "-//webtechs//dtd mozilla html//")) or + publicId in ("-//w3o//dtd w3 html strict 3.0//en//", + "-/w3c/dtd html 4.0 transitional/en", + "html") or + publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is None or + systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): + self.parser.compatMode = "quirks" + elif (publicId.startswith( + ("-//w3c//dtd xhtml 1.0 frameset//", + "-//w3c//dtd xhtml 1.0 transitional//")) or + publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is not None): + self.parser.compatMode = "limited quirks" + + self.parser.phase = self.parser.phases["beforeHtml"] + + def anythingElse(self): + self.parser.compatMode = "quirks" + self.parser.phase = self.parser.phases["beforeHtml"] + + def processCharacters(self, token): + self.parser.parseError("expected-doctype-but-got-chars") + self.anythingElse() + return token + + def processStartTag(self, token): + self.parser.parseError("expected-doctype-but-got-start-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEndTag(self, token): + self.parser.parseError("expected-doctype-but-got-end-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEOF(self): + self.parser.parseError("expected-doctype-but-got-eof") + self.anythingElse() + return True + + class BeforeHtmlPhase(Phase): + # helper methods + def insertHtmlElement(self): + self.tree.insertRoot(impliedTagToken("html", "StartTag")) + self.parser.phase = self.parser.phases["beforeHead"] + + # other + def processEOF(self): + self.insertHtmlElement() + return True + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.insertHtmlElement() + return token + + def processStartTag(self, token): + if token["name"] == "html": + self.parser.firstStartTag = True + self.insertHtmlElement() + return token + + def processEndTag(self, token): + if token["name"] not in ("head", "body", "html", "br"): + self.parser.parseError("unexpected-end-tag-before-html", + {"name": token["name"]}) + else: + self.insertHtmlElement() + return token + + class BeforeHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + (("head", "body", "html", "br"), self.endTagImplyHead) + ]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.startTagHead(impliedTagToken("head", "StartTag")) + return True + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.tree.insertElement(token) + self.tree.headPointer = self.tree.openElements[-1] + self.parser.phase = self.parser.phases["inHead"] + + def startTagOther(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagImplyHead(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagOther(self, token): + self.parser.parseError("end-tag-after-implied-root", + {"name": token["name"]}) + + class InHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("title", self.startTagTitle), + (("noframes", "style"), self.startTagNoFramesStyle), + ("noscript", self.startTagNoscript), + ("script", self.startTagScript), + (("base", "basefont", "bgsound", "command", "link"), + self.startTagBaseLinkCommand), + ("meta", self.startTagMeta), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("head", self.endTagHead), + (("br", "html", "body"), self.endTagHtmlBodyBr) + ]) + self.endTagHandler.default = self.endTagOther + + # the real thing + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.parser.parseError("two-heads-are-not-better-than-one") + + def startTagBaseLinkCommand(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def startTagMeta(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + attributes = token["data"] + if self.parser.tokenizer.stream.charEncoding[1] == "tentative": + if "charset" in attributes: + self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) + elif ("content" in attributes and + "http-equiv" in attributes and + attributes["http-equiv"].lower() == "content-type"): + # Encoding it as UTF-8 here is a hack, as really we should pass + # the abstract Unicode string, and just use the + # ContentAttrParser on that, but using UTF-8 allows all chars + # to be encoded and as a ASCII-superset works. + data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) + parser = _inputstream.ContentAttrParser(data) + codec = parser.parse() + self.parser.tokenizer.stream.changeEncoding(codec) + + def startTagTitle(self, token): + self.parser.parseRCDataRawtext(token, "RCDATA") + + def startTagNoFramesStyle(self, token): + # Need to decide whether to implement the scripting-disabled case + self.parser.parseRCDataRawtext(token, "RAWTEXT") + + def startTagNoscript(self, token): + if self.parser.scripting: + self.parser.parseRCDataRawtext(token, "RAWTEXT") + else: + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inHeadNoscript"] + + def startTagScript(self, token): + self.tree.insertElement(token) + self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState + self.parser.originalPhase = self.parser.phase + self.parser.phase = self.parser.phases["text"] + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHead(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "head", "Expected head got %s" % node.name + self.parser.phase = self.parser.phases["afterHead"] + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.endTagHead(impliedTagToken("head")) + + class InHeadNoscriptPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), + (("head", "noscript"), self.startTagHeadNoscript), + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("noscript", self.endTagNoscript), + ("br", self.endTagBr), + ]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.parser.parseError("eof-in-head-noscript") + self.anythingElse() + return True + + def processComment(self, token): + return self.parser.phases["inHead"].processComment(token) + + def processCharacters(self, token): + self.parser.parseError("char-in-head-noscript") + self.anythingElse() + return token + + def processSpaceCharacters(self, token): + return self.parser.phases["inHead"].processSpaceCharacters(token) + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBaseLinkCommand(self, token): + return self.parser.phases["inHead"].processStartTag(token) + + def startTagHeadNoscript(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) + self.anythingElse() + return token + + def endTagNoscript(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "noscript", "Expected noscript got %s" % node.name + self.parser.phase = self.parser.phases["inHead"] + + def endTagBr(self, token): + self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + # Caller must raise parse error first! + self.endTagNoscript(impliedTagToken("noscript")) + + class AfterHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("body", self.startTagBody), + ("frameset", self.startTagFrameset), + (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", + "style", "title"), + self.startTagFromHead), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), + self.endTagHtmlBodyBr)]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBody(self, token): + self.parser.framesetOK = False + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inBody"] + + def startTagFrameset(self, token): + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inFrameset"] + + def startTagFromHead(self, token): + self.parser.parseError("unexpected-start-tag-out-of-my-head", + {"name": token["name"]}) + self.tree.openElements.append(self.tree.headPointer) + self.parser.phases["inHead"].processStartTag(token) + for node in self.tree.openElements[::-1]: + if node.name == "head": + self.tree.openElements.remove(node) + break + + def startTagHead(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.tree.insertElement(impliedTagToken("body", "StartTag")) + self.parser.phase = self.parser.phases["inBody"] + self.parser.framesetOK = True + + class InBodyPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody + # the really-really-really-very crazy mode + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + # Set this to the default handler + self.processSpaceCharacters = self.processSpaceCharactersNonPre + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + (("base", "basefont", "bgsound", "command", "link", "meta", + "script", "style", "title"), + self.startTagProcessInHead), + ("body", self.startTagBody), + ("frameset", self.startTagFrameset), + (("address", "article", "aside", "blockquote", "center", "details", + "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", + "section", "summary", "ul"), + self.startTagCloseP), + (headingElements, self.startTagHeading), + (("pre", "listing"), self.startTagPreListing), + ("form", self.startTagForm), + (("li", "dd", "dt"), self.startTagListItem), + ("plaintext", self.startTagPlaintext), + ("a", self.startTagA), + (("b", "big", "code", "em", "font", "i", "s", "small", "strike", + "strong", "tt", "u"), self.startTagFormatting), + ("nobr", self.startTagNobr), + ("button", self.startTagButton), + (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), + ("xmp", self.startTagXmp), + ("table", self.startTagTable), + (("area", "br", "embed", "img", "keygen", "wbr"), + self.startTagVoidFormatting), + (("param", "source", "track"), self.startTagParamSource), + ("input", self.startTagInput), + ("hr", self.startTagHr), + ("image", self.startTagImage), + ("isindex", self.startTagIsIndex), + ("textarea", self.startTagTextarea), + ("iframe", self.startTagIFrame), + ("noscript", self.startTagNoscript), + (("noembed", "noframes"), self.startTagRawtext), + ("select", self.startTagSelect), + (("rp", "rt"), self.startTagRpRt), + (("option", "optgroup"), self.startTagOpt), + (("math"), self.startTagMath), + (("svg"), self.startTagSvg), + (("caption", "col", "colgroup", "frame", "head", + "tbody", "td", "tfoot", "th", "thead", + "tr"), self.startTagMisplaced) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("body", self.endTagBody), + ("html", self.endTagHtml), + (("address", "article", "aside", "blockquote", "button", "center", + "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", + "section", "summary", "ul"), self.endTagBlock), + ("form", self.endTagForm), + ("p", self.endTagP), + (("dd", "dt", "li"), self.endTagListItem), + (headingElements, self.endTagHeading), + (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", + "strike", "strong", "tt", "u"), self.endTagFormatting), + (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), + ("br", self.endTagBr), + ]) + self.endTagHandler.default = self.endTagOther + + def isMatchingFormattingElement(self, node1, node2): + return (node1.name == node2.name and + node1.namespace == node2.namespace and + node1.attributes == node2.attributes) + + # helper + def addFormattingElement(self, token): + self.tree.insertElement(token) + element = self.tree.openElements[-1] + + matchingElements = [] + for node in self.tree.activeFormattingElements[::-1]: + if node is Marker: + break + elif self.isMatchingFormattingElement(node, element): + matchingElements.append(node) + + assert len(matchingElements) <= 3 + if len(matchingElements) == 3: + self.tree.activeFormattingElements.remove(matchingElements[-1]) + self.tree.activeFormattingElements.append(element) + + # the real deal + def processEOF(self): + allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", + "tfoot", "th", "thead", "tr", "body", + "html")) + for node in self.tree.openElements[::-1]: + if node.name not in allowed_elements: + self.parser.parseError("expected-closing-tag-but-got-eof") + break + # Stop parsing + + def processSpaceCharactersDropNewline(self, token): + # Sometimes (start of
    , , and